language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/io/TempDirFactory.java
|
{
"start": 2997,
"end": 3403
}
|
class ____ implements TempDirFactory {
public static final TempDirFactory INSTANCE = new Standard();
private static final String TEMP_DIR_PREFIX = "junit-";
public Standard() {
}
@Override
public Path createTempDirectory(AnnotatedElementContext elementContext, ExtensionContext extensionContext)
throws IOException {
return Files.createTempDirectory(TEMP_DIR_PREFIX);
}
}
}
|
Standard
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/core/retry/Retryable.java
|
{
"start": 1507,
"end": 1666
}
|
class ____ of the implementation class.
* @return the name of this retryable operation
*/
default String getName() {
return getClass().getName();
}
}
|
name
|
java
|
google__guice
|
extensions/servlet/test/com/google/inject/servlet/ServletTestUtils.java
|
{
"start": 734,
"end": 810
}
|
class ____ {
private ServletTestUtils() {}
private static
|
ServletTestUtils
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/AutoCloseTests.java
|
{
"start": 16481,
"end": 16603
}
|
class ____ implements TestInterface {
@AutoClose
final AutoCloseable field = null;
}
static
|
NullCloseableFieldTestCase
|
java
|
apache__flink
|
flink-dstl/flink-dstl-dfs/src/test/java/org/apache/flink/changelog/fs/ChangelogStorageMetricsTest.java
|
{
"start": 16404,
"end": 17942
}
|
class ____ implements StateChangeUploader {
private final Map<UploadTask, Integer> attemptsPerTask;
private final int maxAttempts;
public MaxAttemptUploader(int maxAttempts) {
this.maxAttempts = maxAttempts;
this.attemptsPerTask = new HashMap<>();
}
@Override
public UploadTasksResult upload(Collection<UploadTask> tasks) throws IOException {
Map<UploadTask, Map<StateChangeSet, Tuple2<Long, Long>>> map = new HashMap<>();
for (UploadTask uploadTask : tasks) {
int currentAttempt = 1 + attemptsPerTask.getOrDefault(uploadTask, 0);
if (currentAttempt == maxAttempts) {
attemptsPerTask.remove(uploadTask);
map.put(
uploadTask,
uploadTask.changeSets.stream()
.collect(
Collectors.toMap(
Function.identity(),
ign -> Tuple2.of(0L, 0L))));
} else {
attemptsPerTask.put(uploadTask, currentAttempt);
throw new IOException();
}
}
return new UploadTasksResult(map, new EmptyStreamStateHandle());
}
@Override
public void close() {
attemptsPerTask.clear();
}
}
private static
|
MaxAttemptUploader
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/appender/AsyncAppenderEventDispatcher.java
|
{
"start": 1412,
"end": 6724
}
|
class ____ extends Log4jThread {
private static final LogEvent STOP_EVENT = new Log4jLogEvent();
private static final AtomicLong THREAD_COUNTER = new AtomicLong(0);
private static final Logger LOGGER = StatusLogger.getLogger();
private final AppenderControl errorAppender;
private final List<AppenderControl> appenders;
private final BlockingQueue<LogEvent> queue;
private final AtomicBoolean stoppedRef;
AsyncAppenderEventDispatcher(
final String name,
final AppenderControl errorAppender,
final List<AppenderControl> appenders,
final BlockingQueue<LogEvent> queue) {
super("AsyncAppenderEventDispatcher-" + THREAD_COUNTER.incrementAndGet() + "-" + name);
this.setDaemon(true);
this.errorAppender = errorAppender;
this.appenders = appenders;
this.queue = queue;
this.stoppedRef = new AtomicBoolean();
}
/**
* Gets all Appenders.
*
* @return a list of Appenders.
*/
List<Appender> getAppenders() {
return appenders.stream().map(AppenderControl::getAppender).collect(Collectors.toList());
}
@Override
public void run() {
LOGGER.trace("{} has started.", getName());
dispatchAll();
dispatchRemaining();
}
private void dispatchAll() {
while (!stoppedRef.get()) {
LogEvent event;
try {
event = queue.take();
} catch (final InterruptedException ignored) {
// Restore the interrupted flag cleared when the exception is caught.
interrupt();
break;
}
if (event == STOP_EVENT) {
break;
}
event.setEndOfBatch(queue.isEmpty());
dispatch(event);
}
LOGGER.trace("{} has stopped.", getName());
}
private void dispatchRemaining() {
int eventCount = 0;
while (true) {
// Note the non-blocking Queue#poll() method!
final LogEvent event = queue.poll();
if (event == null) {
break;
}
// Allow events that managed to be submitted after the sentinel.
if (event == STOP_EVENT) {
continue;
}
event.setEndOfBatch(queue.isEmpty());
dispatch(event);
eventCount++;
}
LOGGER.trace("{} has processed the last {} remaining event(s).", getName(), eventCount);
}
/**
* Dispatches the given {@code event} to the registered appenders <b>in the
* current thread</b>.
*/
void dispatch(final LogEvent event) {
// Dispatch the event to all registered appenders.
boolean succeeded = false;
// noinspection ForLoopReplaceableByForEach (avoid iterator instantion)
for (int appenderIndex = 0; appenderIndex < appenders.size(); appenderIndex++) {
final AppenderControl control = appenders.get(appenderIndex);
try {
control.callAppender(event);
succeeded = true;
} catch (final Throwable error) {
// If no appender is successful, the error appender will get it.
// It is okay to simply log it here.
LOGGER.trace("{} has failed to call appender {}", getName(), control.getAppenderName(), error);
}
}
// Fallback to the error appender if none has succeeded so far.
if (!succeeded && errorAppender != null) {
try {
errorAppender.callAppender(event);
} catch (final Throwable error) {
// If the error appender also fails, there is nothing further
// we can do about it.
LOGGER.trace(
"{} has failed to call the error appender {}",
getName(),
errorAppender.getAppenderName(),
error);
}
}
}
void stop(final long timeoutMillis) throws InterruptedException {
// Mark the completion, if necessary.
final boolean stopped = stoppedRef.compareAndSet(false, true);
if (stopped) {
LOGGER.trace("{} is signaled to stop.", getName());
}
// There is a slight chance that the thread is not started yet, wait for
// it to run. Otherwise, interrupt+join might block.
// noinspection StatementWithEmptyBody
while (Thread.State.NEW.equals(getState()))
;
// Enqueue the stop event, if there is sufficient room; otherwise,
// fallback to interruption. (We should avoid interrupting the thread if
// at all possible due to the subtleties of Java interruption, which
// will actually close sockets if any blocking operations are in
// progress! This means a socket appender may surprisingly fail to
// deliver final events. I recall some oddities with file I/O as well.
// — ckozak)
final boolean added = queue.offer(STOP_EVENT);
if (!added) {
interrupt();
}
// Wait for the completion.
join(timeoutMillis);
}
}
|
AsyncAppenderEventDispatcher
|
java
|
spring-projects__spring-security
|
access/src/main/java/org/springframework/security/access/method/MapBasedMethodSecurityMetadataSource.java
|
{
"start": 9900,
"end": 10105
}
|
class ____
* inherits but does not redeclare a method, the registered Class will be the Class
* we're invoking against and the Method will provide details of the declared class.
*/
private static
|
merely
|
java
|
google__dagger
|
hilt-compiler/main/java/dagger/hilt/processor/internal/ComponentDescriptor.java
|
{
"start": 915,
"end": 1078
}
|
class ____ it doesn't conflict with
// dagger.internal.codegen.ComponentDescriptor
/** Represents a single component in the hierarchy. */
@AutoValue
public abstract
|
so
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/MultisetAssert_contains_Test.java
|
{
"start": 1073,
"end": 3770
}
|
class ____ {
@Test
public void should_fail_if_actual_is_null() {
// GIVEN
Multiset<String> actual = null;
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).contains(1, "test"));
// THEN
assertThat(throwable).isInstanceOf(AssertionError.class)
.hasMessage(actualIsNull());
}
@Test
public void should_fail_if_expected_is_negative() {
// GIVEN
Multiset<String> actual = HashMultiset.create();
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).contains(-1, "test"));
// THEN
assertThat(throwable).isInstanceOf(IllegalArgumentException.class)
.hasMessage("The expected count should not be negative.");
}
@Test
public void should_fail_if_actual_contains_value_fewer_times_than_expected() {
// GIVEN
Multiset<String> actual = HashMultiset.create();
actual.add("test", 2);
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).contains(3, "test"));
// THEN
assertThat(throwable).isInstanceOf(AssertionError.class)
.hasMessage(format("%nExpecting:%n" +
" [\"test\", \"test\"]%n" +
"to contain:%n" +
" \"test\"%n" +
"exactly 3 times but was found 2 times."));
}
@Test
public void should_pass_if_actual_contains_value_number_of_times_expected() {
// GIVEN
Multiset<String> actual = HashMultiset.create();
actual.add("test", 2);
// THEN
assertThat(actual).contains(2, "test");
}
@Test
public void should_fail_if_actual_contains_value_more_times_than_expected() {
// GIVEN
Multiset<String> actual = HashMultiset.create();
actual.add("test", 2);
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).contains(1, "test"));
// THEN
assertThat(throwable).isInstanceOf(AssertionError.class)
.hasMessage(format("%nExpecting:%n" +
" [\"test\", \"test\"]%n" +
"to contain:%n" +
" \"test\"%n" +
"exactly 1 times but was found 2 times."));
}
@Test
public void should_work_with_filtering() {
// GIVEN
Multiset<String> actual = HashMultiset.create();
actual.add("test", 2);
// THEN
assertThat(actual).filteredOn(s -> s.startsWith("t"))
.contains(2, "test");
}
}
|
MultisetAssert_contains_Test
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/method/annotation/HandlerMethodValidationException.java
|
{
"start": 6781,
"end": 9801
}
|
interface ____ {
/**
* Handle results for {@code @CookieValue} method parameters.
* @param cookieValue the annotation declared on the parameter
* @param result the validation result
*/
void cookieValue(CookieValue cookieValue, ParameterValidationResult result);
/**
* Handle results for {@code @MatrixVariable} method parameters.
* @param matrixVariable the annotation declared on the parameter
* @param result the validation result
*/
void matrixVariable(MatrixVariable matrixVariable, ParameterValidationResult result);
/**
* Handle results for {@code @ModelAttribute} method parameters.
* @param modelAttribute the optional {@code ModelAttribute} annotation,
* possibly {@code null} if the method parameter is declared without it.
* @param errors the validation errors
*/
void modelAttribute(@Nullable ModelAttribute modelAttribute, ParameterErrors errors);
/**
* Handle results for {@code @PathVariable} method parameters.
* @param pathVariable the annotation declared on the parameter
* @param result the validation result
*/
void pathVariable(PathVariable pathVariable, ParameterValidationResult result);
/**
* Handle results for {@code @RequestBody} method parameters.
* @param requestBody the annotation declared on the parameter
* @param errors the validation error
*/
void requestBody(RequestBody requestBody, ParameterErrors errors);
/**
* An additional {@code @RequestBody} callback for validation failures
* for constraints on the method parameter. For example:
* <pre class="code">
* @RequestBody List<@NotEmpty String> ids
* </pre>
* Handle results for {@code @RequestBody} method parameters.
* @param requestBody the annotation declared on the parameter
* @param result the validation result
* @since 6.2.4
*/
default void requestBodyValidationResult(RequestBody requestBody, ParameterValidationResult result) {
}
/**
* Handle results for {@code @RequestHeader} method parameters.
* @param requestHeader the annotation declared on the parameter
* @param result the validation result
*/
void requestHeader(RequestHeader requestHeader, ParameterValidationResult result);
/**
* Handle results for {@code @RequestParam} method parameters.
* @param requestParam the optional {@code RequestParam} annotation,
* possibly {@code null} if the method parameter is declared without it.
* @param result the validation result
*/
void requestParam(@Nullable RequestParam requestParam, ParameterValidationResult result);
/**
* Handle results for {@code @RequestPart} method parameters.
* @param requestPart the annotation declared on the parameter
* @param errors the validation errors
*/
void requestPart(RequestPart requestPart, ParameterErrors errors);
/**
* Handle other results that aren't any of the above.
* @param result the validation result
*/
void other(ParameterValidationResult result);
}
}
|
Visitor
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/leaderelection/LeaderElectionTest.java
|
{
"start": 8175,
"end": 8914
}
|
class ____ implements ServiceClass {
private EmbeddedLeaderService embeddedLeaderService;
@Override
public void setup(FatalErrorHandler fatalErrorHandler) {
embeddedLeaderService = new EmbeddedLeaderService(EXECUTOR_RESOURCE.getExecutor());
}
@Override
public void teardown() {
if (embeddedLeaderService != null) {
embeddedLeaderService.shutdown();
embeddedLeaderService = null;
}
}
@Override
public LeaderElection createLeaderElection() {
return embeddedLeaderService.createLeaderElectionService("embedded_leader_election");
}
}
private static final
|
EmbeddedServiceClass
|
java
|
resilience4j__resilience4j
|
resilience4j-core/src/main/java/io/github/resilience4j/core/functions/CheckedRunnable.java
|
{
"start": 883,
"end": 1281
}
|
interface ____ {
void run() throws Throwable;
default Runnable unchecked() {
return () -> {
try {
run();
} catch(Throwable x) {
sneakyThrow(x);
}
};
}
@SuppressWarnings("unchecked")
static <T extends Throwable, R> R sneakyThrow(Throwable t) throws T {
throw (T) t;
}
}
|
CheckedRunnable
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/processors/ReplayProcessor.java
|
{
"start": 27041,
"end": 31346
}
|
class ____<T>
implements ReplayBuffer<T> {
final List<T> buffer;
Throwable error;
volatile boolean done;
volatile int size;
UnboundedReplayBuffer(int capacityHint) {
this.buffer = new ArrayList<>(capacityHint);
}
@Override
public void next(T value) {
buffer.add(value);
size++;
}
@Override
public void error(Throwable ex) {
error = ex;
done = true;
}
@Override
public void complete() {
done = true;
}
@Override
public void trimHead() {
// not applicable for an unbounded buffer
}
@Override
@Nullable
public T getValue() {
int s = size;
if (s == 0) {
return null;
}
return buffer.get(s - 1);
}
@Override
@SuppressWarnings("unchecked")
public T[] getValues(T[] array) {
int s = size;
if (s == 0) {
if (array.length != 0) {
array[0] = null;
}
return array;
}
List<T> b = buffer;
if (array.length < s) {
array = (T[])Array.newInstance(array.getClass().getComponentType(), s);
}
for (int i = 0; i < s; i++) {
array[i] = b.get(i);
}
if (array.length > s) {
array[s] = null;
}
return array;
}
@Override
public void replay(ReplaySubscription<T> rs) {
if (rs.getAndIncrement() != 0) {
return;
}
int missed = 1;
final List<T> b = buffer;
final Subscriber<? super T> a = rs.downstream;
Integer indexObject = (Integer)rs.index;
int index;
if (indexObject != null) {
index = indexObject;
} else {
index = 0;
rs.index = 0;
}
long e = rs.emitted;
for (;;) {
long r = rs.requested.get();
while (e != r) {
if (rs.cancelled) {
rs.index = null;
return;
}
boolean d = done;
int s = size;
if (d && index == s) {
rs.index = null;
rs.cancelled = true;
Throwable ex = error;
if (ex == null) {
a.onComplete();
} else {
a.onError(ex);
}
return;
}
if (index == s) {
break;
}
a.onNext(b.get(index));
index++;
e++;
}
if (e == r) {
if (rs.cancelled) {
rs.index = null;
return;
}
boolean d = done;
int s = size;
if (d && index == s) {
rs.index = null;
rs.cancelled = true;
Throwable ex = error;
if (ex == null) {
a.onComplete();
} else {
a.onError(ex);
}
return;
}
}
rs.index = index;
rs.emitted = e;
missed = rs.addAndGet(-missed);
if (missed == 0) {
break;
}
}
}
@Override
public int size() {
return size;
}
@Override
public boolean isDone() {
return done;
}
@Override
public Throwable getError() {
return error;
}
}
static final
|
UnboundedReplayBuffer
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java
|
{
"start": 11620,
"end": 12087
}
|
class ____ extends SqlExpressionTranslator<StringQueryPredicate> {
@Override
protected QueryTranslation asQuery(StringQueryPredicate q, boolean onAggs, TranslatorHandler handler) {
Check.isTrue(onAggs == false, "Like not supported within an aggregation context");
return new QueryTranslation(org.elasticsearch.xpack.ql.planner.ExpressionTranslators.StringQueries.doTranslate(q, handler));
}
}
static
|
StringQueries
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/annotations/FilterDef.java
|
{
"start": 1383,
"end": 2186
}
|
class ____ {
* @Id @GeneratedValue Long id;
* @Enumerated(STRING) Status status;
* ...
* }
* </pre>
* <p>
* At runtime, a filter may be enabled in a particular session by
* calling {@link org.hibernate.Session#enableFilter(String)},
* passing the name of the filter, and then supplying arguments to
* its parameters.
* <pre>
* session.enableFilter("Current");
* </pre>
* <p>
* A filter has no effect unless:
* <ul>
* <li>it is explicitly enabled by calling {@code enableFilter}, or
* <li>it is declared {@link #autoEnabled autoEnabled = true}.
* </ul>
*
* @author Matthew Inger
* @author Emmanuel Bernard
*
* @see org.hibernate.Filter
* @see DialectOverride.FilterDefs
*/
@Target({TYPE, PACKAGE})
@Retention(RUNTIME)
@Repeatable(FilterDefs.class)
public @
|
Record
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java
|
{
"start": 15257,
"end": 19630
}
|
interface ____ {
ReturnsPrimitiveBoolean newInstance();
}
public static final ScriptContext<Factory> CONTEXT = new ScriptContext<>("returnsprimitiveboolean", Factory.class);
public static final String[] PARAMETERS = new String[] {};
public abstract boolean execute();
}
public void testReturnsPrimitiveBoolean() throws Exception {
assertTrue(
scriptEngine.compile("testReturnsPrimitiveBoolean0", "true", ReturnsPrimitiveBoolean.CONTEXT, emptyMap())
.newInstance()
.execute()
);
assertFalse(
scriptEngine.compile("testReturnsPrimitiveBoolean1", "false", ReturnsPrimitiveBoolean.CONTEXT, emptyMap())
.newInstance()
.execute()
);
assertTrue(
scriptEngine.compile("testReturnsPrimitiveBoolean2", "Boolean.TRUE", ReturnsPrimitiveBoolean.CONTEXT, emptyMap())
.newInstance()
.execute()
);
assertFalse(
scriptEngine.compile("testReturnsPrimitiveBoolean3", "Boolean.FALSE", ReturnsPrimitiveBoolean.CONTEXT, emptyMap())
.newInstance()
.execute()
);
assertTrue(
scriptEngine.compile("testReturnsPrimitiveBoolean4", "def i = true; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap())
.newInstance()
.execute()
);
assertTrue(
scriptEngine.compile("testReturnsPrimitiveBoolean5", "def i = Boolean.TRUE; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap())
.newInstance()
.execute()
);
assertTrue(
scriptEngine.compile("testReturnsPrimitiveBoolean6", "true || false", ReturnsPrimitiveBoolean.CONTEXT, emptyMap())
.newInstance()
.execute()
);
String debug = Debugger.toString(ReturnsPrimitiveBoolean.class, "false", new CompilerSettings(), PAINLESS_BASE_WHITELIST);
assertThat(debug, containsString("ICONST_0"));
// The important thing here is that we have the bytecode for returning an integer instead of an object. booleans are integers.
assertThat(debug, containsString("IRETURN"));
Exception e = expectScriptThrows(
ClassCastException.class,
() -> scriptEngine.compile("testReturnsPrimitiveBoolean7", "1L", ReturnsPrimitiveBoolean.CONTEXT, emptyMap())
.newInstance()
.execute()
);
assertEquals("Cannot cast from [long] to [boolean].", e.getMessage());
e = expectScriptThrows(
ClassCastException.class,
() -> scriptEngine.compile("testReturnsPrimitiveBoolean8", "1.1f", ReturnsPrimitiveBoolean.CONTEXT, emptyMap())
.newInstance()
.execute()
);
assertEquals("Cannot cast from [float] to [boolean].", e.getMessage());
e = expectScriptThrows(
ClassCastException.class,
() -> scriptEngine.compile("testReturnsPrimitiveBoolean9", "1.1d", ReturnsPrimitiveBoolean.CONTEXT, emptyMap())
.newInstance()
.execute()
);
assertEquals("Cannot cast from [double] to [boolean].", e.getMessage());
expectScriptThrows(
ClassCastException.class,
() -> scriptEngine.compile("testReturnsPrimitiveBoolean10", "def i = 1L; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap())
.newInstance()
.execute()
);
expectScriptThrows(
ClassCastException.class,
() -> scriptEngine.compile("testReturnsPrimitiveBoolean11", "def i = 1.1f; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap())
.newInstance()
.execute()
);
expectScriptThrows(
ClassCastException.class,
() -> scriptEngine.compile("testReturnsPrimitiveBoolean12", "def i = 1.1d; i", ReturnsPrimitiveBoolean.CONTEXT, emptyMap())
.newInstance()
.execute()
);
assertFalse(
scriptEngine.compile("testReturnsPrimitiveBoolean13", "int i = 0", ReturnsPrimitiveBoolean.CONTEXT, emptyMap())
.newInstance()
.execute()
);
}
public abstract static
|
Factory
|
java
|
apache__spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/GeometryExecutionSuite.java
|
{
"start": 1109,
"end": 7871
}
|
class ____ {
// A sample Geometry byte array for testing purposes, representing a POINT(1 2) with SRID 4326.
private final byte[] testGeometryVal = new byte[] {
(byte)0xE6, 0x10, 0x00, 0x00,
0x01, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, (byte)0xF0,
0x3F, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x40
};
/** Tests for Geometry factory methods and getters. */
@Test
void testFromBytes() {
Geometry geometry = Geometry.fromBytes(testGeometryVal);
assertNotNull(geometry);
assertArrayEquals(testGeometryVal, geometry.getBytes());
}
@Test
void testFromValue() {
GeometryVal value = GeometryVal.fromBytes(testGeometryVal);
Geometry geometry = Geometry.fromValue(value);
assertNotNull(geometry);
assertEquals(value, geometry.getValue());
}
@Test
void testGetBytes() {
Geometry geometry = Geometry.fromBytes(testGeometryVal);
assertArrayEquals(testGeometryVal, geometry.getBytes());
}
@Test
void testCopy() {
Geometry geometry = Geometry.fromBytes(testGeometryVal);
Geometry geometryCopy = geometry.copy();
assertNotNull(geometryCopy);
assertArrayEquals(geometry.getBytes(), geometryCopy.getBytes());
}
/** Tests for Geometry constants. */
@Test
void testDefaultSrid() {
assertEquals(0, Geometry.DEFAULT_SRID);
}
/** Tests for Geometry WKB parsing. */
@Test
void testFromWkbWithSridRudimentary() {
byte[] wkb = new byte[]{1, 2, 3};
// Note: This is a rudimentary WKB handling test; actual WKB parsing is not yet implemented.
// Once we implement the appropriate parsing logic, this test should be updated accordingly.
Geometry geometry = Geometry.fromWkb(wkb, 4326);
assertNotNull(geometry);
assertArrayEquals(wkb, geometry.toWkb());
assertEquals(4326, geometry.srid());
}
@Test
void testFromWkbNoSridRudimentary() {
byte[] wkb = new byte[]{1, 2, 3};
// Note: This is a rudimentary WKB handling test; actual WKB parsing is not yet implemented.
// Once we implement the appropriate parsing logic, this test should be updated accordingly.
Geometry geometry = Geometry.fromWkb(wkb);
assertNotNull(geometry);
assertArrayEquals(wkb, geometry.toWkb());
assertEquals(0, geometry.srid());
}
/** Tests for Geometry EWKB parsing. */
@Test
void testFromEwkbUnsupported() {
byte[] ewkb = new byte[]{1, 2, 3};
UnsupportedOperationException exception = assertThrows(
UnsupportedOperationException.class,
() -> Geometry.fromEwkb(ewkb)
);
assertEquals("Geometry EWKB parsing is not yet supported.", exception.getMessage());
}
/** Tests for Geometry WKT parsing. */
@Test
void testFromWktWithSridUnsupported() {
byte[] wkt = new byte[]{4, 5, 5};
UnsupportedOperationException exception = assertThrows(
UnsupportedOperationException.class,
() -> Geometry.fromWkt(wkt, 0)
);
assertEquals("Geometry WKT parsing is not yet supported.", exception.getMessage());
}
@Test
void testFromWktNoSridUnsupported() {
byte[] wkt = new byte[]{4, 5, 5};
UnsupportedOperationException exception = assertThrows(
UnsupportedOperationException.class,
() -> Geometry.fromWkt(wkt)
);
assertEquals("Geometry WKT parsing is not yet supported.", exception.getMessage());
}
/** Tests for Geometry EWKT parsing. */
@Test
void testFromEwktUnsupported() {
byte[] ewkt = new byte[]{4, 5, 5};
UnsupportedOperationException exception = assertThrows(
UnsupportedOperationException.class,
() -> Geometry.fromEwkt(ewkt)
);
assertEquals("Geometry EWKT parsing is not yet supported.", exception.getMessage());
}
/** Tests for Geometry WKB and EWKB converters. */
@Test
void testToWkb() {
Geometry geometry = Geometry.fromBytes(testGeometryVal);
// WKB value (endianness: NDR) corresponding to WKT: POINT(1 2).
byte[] wkb = HexFormat.of().parseHex("0101000000000000000000f03f0000000000000040");
assertArrayEquals(wkb, geometry.toWkb());
}
@Test
void testToWkbEndiannessNDR() {
Geometry geometry = Geometry.fromBytes(testGeometryVal);
// WKB value (endianness: NDR) corresponding to WKT: POINT(1 2).
byte[] wkb = HexFormat.of().parseHex("0101000000000000000000f03f0000000000000040");
assertArrayEquals(wkb, geometry.toWkb(ByteOrder.LITTLE_ENDIAN));
}
@Test
void testToWkbEndiannessXDR() {
Geometry geometry = Geometry.fromBytes(testGeometryVal);
UnsupportedOperationException exception = assertThrows(
UnsupportedOperationException.class,
() -> geometry.toWkb(ByteOrder.BIG_ENDIAN)
);
assertEquals("Geometry WKB endianness is not yet supported.", exception.getMessage());
}
@Test
void testToEwkbUnsupported() {
Geometry geometry = Geometry.fromBytes(testGeometryVal);
UnsupportedOperationException exception = assertThrows(
UnsupportedOperationException.class,
geometry::toEwkb
);
assertEquals("Geometry EWKB conversion is not yet supported.", exception.getMessage());
}
@Test
void testToEwkbEndiannessXDRUnsupported() {
Geometry geometry = Geometry.fromBytes(testGeometryVal);
UnsupportedOperationException exception = assertThrows(
UnsupportedOperationException.class,
() -> geometry.toEwkb(ByteOrder.BIG_ENDIAN)
);
assertEquals("Geometry EWKB endianness is not yet supported.", exception.getMessage());
}
@Test
void testToEwkbEndiannessNDRUnsupported() {
Geometry geometry = Geometry.fromBytes(testGeometryVal);
UnsupportedOperationException exception = assertThrows(
UnsupportedOperationException.class,
() -> geometry.toEwkb(ByteOrder.LITTLE_ENDIAN)
);
assertEquals("Geometry EWKB endianness is not yet supported.", exception.getMessage());
}
/** Tests for Geometry WKT and EWKT converters. */
@Test
void testToWktUnsupported() {
Geometry geometry = Geometry.fromBytes(testGeometryVal);
UnsupportedOperationException exception = assertThrows(
UnsupportedOperationException.class,
geometry::toWkt
);
assertEquals("Geometry WKT conversion is not yet supported.", exception.getMessage());
}
@Test
void testToEwktUnsupported() {
Geometry geometry = Geometry.fromBytes(testGeometryVal);
UnsupportedOperationException exception = assertThrows(
UnsupportedOperationException.class,
geometry::toEwkt
);
assertEquals("Geometry EWKT conversion is not yet supported.", exception.getMessage());
}
/** Tests for other Geometry methods. */
@Test
void testSrid() {
Geometry geometry = Geometry.fromBytes(testGeometryVal);
assertEquals(4326, geometry.srid());
}
}
|
GeometryExecutionSuite
|
java
|
apache__thrift
|
lib/javame/src/org/apache/thrift/protocol/TField.java
|
{
"start": 910,
"end": 1127
}
|
class ____ {
public TField() {}
public TField(String n, byte t, short i) {
name = n;
type = t;
id = i;
}
public String name = "";
public byte type = TType.STOP;
public short id = 0;
}
|
TField
|
java
|
quarkusio__quarkus
|
core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTestModeTest.java
|
{
"start": 672,
"end": 6916
}
|
class ____ extends BootstrapFromOriginalJarTestBase {
@Override
protected QuarkusBootstrap.Mode getBootstrapMode() {
return QuarkusBootstrap.Mode.TEST;
}
@Override
protected TsArtifact composeApplication() {
final TsArtifact extADep = TsArtifact.jar("ext-a-dep");
addToExpectedLib(extADep);
final TsArtifact depC1 = TsArtifact.jar("dep-c");
//addToExpectedLib(depC1);
extADep.addDependency(depC1);
final TsArtifact extAProvidedDep = TsArtifact.jar("ext-a-provided-dep");
final TsArtifact extADeploymentDep = TsArtifact.jar("ext-a-deployment-dep");
final TsArtifact extAOptionalDeploymentDep = TsArtifact.jar("ext-a-provided-deployment-dep");
final TsQuarkusExt extA = new TsQuarkusExt("ext-a");
addToExpectedLib(extA.getRuntime());
extA.getRuntime()
.addDependency(extADep)
.addDependency(new TsDependency(extAProvidedDep, JavaScopes.PROVIDED));
extA.getDeployment()
.addDependency(extADeploymentDep)
.addDependency(new TsDependency(extAOptionalDeploymentDep, JavaScopes.PROVIDED));
final TsQuarkusExt extB = new TsQuarkusExt("ext-b");
addToExpectedLib(extB.getRuntime());
this.install(extB);
final TsArtifact directProvidedDep = TsArtifact.jar("direct-provided-dep");
addToExpectedLib(directProvidedDep);
final TsArtifact depC2 = TsArtifact.jar("dep-c", "2");
// here provided dependencies will override compile/runtime ones during version convergence
addToExpectedLib(depC2);
directProvidedDep.addDependency(depC2);
final TsArtifact transitiveProvidedDep = TsArtifact.jar("transitive-provided-dep");
addToExpectedLib(transitiveProvidedDep);
directProvidedDep.addDependency(transitiveProvidedDep);
return TsArtifact.jar("app")
.addManagedDependency(platformDescriptor())
.addManagedDependency(platformProperties())
.addDependency(extA)
.addDependency(extB, JavaScopes.PROVIDED)
.addDependency(new TsDependency(directProvidedDep, JavaScopes.PROVIDED));
}
@Override
protected void assertAppModel(ApplicationModel model) throws Exception {
Set<Dependency> expected = new HashSet<>();
expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-a-deployment", "1"),
DependencyFlags.DEPLOYMENT_CP));
expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-a-deployment-dep", "1"),
DependencyFlags.DEPLOYMENT_CP));
expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-b-deployment", "1"),
JavaScopes.PROVIDED,
DependencyFlags.DEPLOYMENT_CP));
assertEquals(expected, getDeploymentOnlyDeps(model));
expected = new HashSet<>();
expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-a", "1"),
DependencyFlags.RUNTIME_CP,
DependencyFlags.DEPLOYMENT_CP,
DependencyFlags.DIRECT,
DependencyFlags.RUNTIME_EXTENSION_ARTIFACT,
DependencyFlags.TOP_LEVEL_RUNTIME_EXTENSION_ARTIFACT));
expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-a-dep", "1"),
DependencyFlags.RUNTIME_CP,
DependencyFlags.DEPLOYMENT_CP));
expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "dep-c", "2"),
DependencyFlags.RUNTIME_CP,
DependencyFlags.DEPLOYMENT_CP));
expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-b", "1"),
JavaScopes.PROVIDED,
DependencyFlags.RUNTIME_EXTENSION_ARTIFACT,
DependencyFlags.DIRECT,
DependencyFlags.TOP_LEVEL_RUNTIME_EXTENSION_ARTIFACT,
DependencyFlags.RUNTIME_CP,
DependencyFlags.DEPLOYMENT_CP,
DependencyFlags.COMPILE_ONLY));
expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "direct-provided-dep", "1"),
JavaScopes.PROVIDED,
DependencyFlags.DIRECT,
DependencyFlags.RUNTIME_CP,
DependencyFlags.DEPLOYMENT_CP,
DependencyFlags.COMPILE_ONLY));
expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "transitive-provided-dep", "1"),
JavaScopes.PROVIDED,
DependencyFlags.RUNTIME_CP,
DependencyFlags.DEPLOYMENT_CP,
DependencyFlags.COMPILE_ONLY));
assertEquals(expected, getDependenciesWithFlag(model, DependencyFlags.RUNTIME_CP));
expected = new HashSet<>();
expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-b", "1"),
JavaScopes.PROVIDED,
DependencyFlags.RUNTIME_EXTENSION_ARTIFACT,
DependencyFlags.DIRECT,
DependencyFlags.TOP_LEVEL_RUNTIME_EXTENSION_ARTIFACT,
DependencyFlags.RUNTIME_CP,
DependencyFlags.DEPLOYMENT_CP,
DependencyFlags.COMPILE_ONLY));
expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "direct-provided-dep", "1"),
JavaScopes.PROVIDED,
DependencyFlags.DIRECT,
DependencyFlags.RUNTIME_CP,
DependencyFlags.DEPLOYMENT_CP,
DependencyFlags.COMPILE_ONLY));
expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "transitive-provided-dep", "1"),
JavaScopes.PROVIDED,
DependencyFlags.RUNTIME_CP,
DependencyFlags.DEPLOYMENT_CP,
DependencyFlags.COMPILE_ONLY));
assertEquals(expected, getDependenciesWithFlag(model, DependencyFlags.COMPILE_ONLY));
}
}
|
ProvidedExtensionDepsTestModeTest
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/ModuleTest.java
|
{
"start": 392,
"end": 988
}
|
class ____ extends TestCase {
public void test_for_module() throws Exception {
ParserConfig config = new ParserConfig();
config.register(new MyModuel2());
config.register(new MyModuel());
assertSame(MiscCodec.instance, config.getDeserializer(A.class));
}
public void test_for_module_1() throws Exception {
SerializeConfig config = new SerializeConfig();
config.register(new MyModuel2());
config.register(new MyModuel());
assertSame(MiscCodec.instance, config.getObjectWriter(A.class));
}
public static
|
ModuleTest
|
java
|
hibernate__hibernate-orm
|
hibernate-spatial/src/test/java/org/hibernate/spatial/dialect/hana/TestHANASpatialFunctions.java
|
{
"start": 1262,
"end": 41207
}
|
class ____ extends SpatialFunctionalTestCase {
private static final HSMessageLogger LOG = Logger.getMessageLogger(
MethodHandles.lookup(),
HSMessageLogger.class,
TestHANASpatialFunctions.class.getName()
);
protected HANAExpectationsFactory hanaExpectationsFactory;
@Override
protected HSMessageLogger getLogger() {
return LOG;
}
@Test
public void test_alphashape_on_jts(SessionFactoryScope scope) throws SQLException {
alphashape( JTS, scope );
}
@Test
public void test_alphashape_on_geolatte(SessionFactoryScope scope) throws SQLException {
alphashape( GEOLATTE, scope );
}
public void alphashape(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getAlphaShape( 1 );
String hql = format(
Locale.ENGLISH,
"SELECT id, alphashape(geom, 1) FROM %s where geometrytype(geom) in ('ST_Point', 'ST_MultiPoint')",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_area_on_jts(SessionFactoryScope scope) throws SQLException {
area( JTS, scope );
}
@Test
public void test_area_on_geolatte(SessionFactoryScope scope) throws SQLException {
area( GEOLATTE, scope );
}
public void area(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getArea();
String hql = format(
"SELECT id, area(geom) FROM %s where geometrytype(geom) in ('ST_Polygon', 'ST_MultiPolygon')",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_asewkb_on_jts(SessionFactoryScope scope) throws SQLException {
asewkb( JTS, scope );
}
@Test
public void test_asewkb_on_geolatte(SessionFactoryScope scope) throws SQLException {
asewkb( GEOLATTE, scope );
}
public void asewkb(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, byte[]> dbexpected = hanaExpectationsFactory.getAsEWKB();
String hql = format( "SELECT id, asewkb(geom) FROM %s", entityName( pckg ) );
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_asewkt_on_jts(SessionFactoryScope scope) throws SQLException {
asewkt( JTS, scope );
}
@Test
public void test_asewkt_on_geolatte(SessionFactoryScope scope) throws SQLException {
asewkt( GEOLATTE, scope );
}
public void asewkt(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, String> dbexpected = hanaExpectationsFactory.getAsEWKT();
String hql = format( "SELECT id, asewkt(geom) FROM %s", entityName( pckg ) );
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_asgeojson_on_jts(SessionFactoryScope scope) throws SQLException {
asgeojson( JTS, scope );
}
@Test
public void test_asgeojson_on_geolatte(SessionFactoryScope scope) throws SQLException {
asgeojson( GEOLATTE, scope );
}
public void asgeojson(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, String> dbexpected = hanaExpectationsFactory.getAsGeoJSON();
String hql = format( "SELECT id, asgeojson(geom) FROM %s", entityName( pckg ) );
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_assvg_on_jts(SessionFactoryScope scope) throws SQLException {
assvg( JTS, scope );
}
@Test
public void test_assvg_on_geolatte(SessionFactoryScope scope) throws SQLException {
assvg( GEOLATTE, scope );
}
public void assvg(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, String> dbexpected = hanaExpectationsFactory.getAsSVG();
String hql = format( "SELECT id, assvg(geom) FROM %s", entityName( pckg ) );
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_assvgaggr_on_jts(SessionFactoryScope scope) throws SQLException {
assvgaggr( JTS, scope );
}
@Test
public void test_assvgaggr_on_geolatte(SessionFactoryScope scope) throws SQLException {
assvgaggr( GEOLATTE, scope );
}
public void assvgaggr(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, String> dbexpected = hanaExpectationsFactory.getAsSVGAggr();
String hql = format(
"SELECT cast(count(g) as int), assvgaggr(geom) FROM %s g",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_aswkb_on_jts(SessionFactoryScope scope) throws SQLException {
aswkb( JTS, scope );
}
@Test
public void test_aswkb_on_geolatte(SessionFactoryScope scope) throws SQLException {
aswkb( GEOLATTE, scope );
}
public void aswkb(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, byte[]> dbexpected = hanaExpectationsFactory.getAsWKB();
String hql = format( "SELECT id, aswkb(geom) FROM %s", entityName( pckg ) );
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_aswkt_on_jts(SessionFactoryScope scope) throws SQLException {
aswkt( JTS, scope );
}
@Test
public void test_aswkt_on_geolatte(SessionFactoryScope scope) throws SQLException {
aswkt( GEOLATTE, scope );
}
public void aswkt(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, String> dbexpected = hanaExpectationsFactory.getAsWKT();
String hql = format( "SELECT id, aswkt(geom) FROM %s", entityName( pckg ) );
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_convexhullaggr_on_jts(SessionFactoryScope scope) throws SQLException {
convexhullaggr( JTS, scope );
}
@Test
public void test_convexhullaggr_on_geolatte(SessionFactoryScope scope) throws SQLException {
convexhullaggr( GEOLATTE, scope );
}
public void convexhullaggr(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getConvexHullAggr();
String hql = format(
"SELECT cast(count(g) as int), convexhullaggr(geom) FROM %s g",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_centroid_on_jts(SessionFactoryScope scope) throws SQLException {
centroid( JTS, scope );
}
@Test
public void test_centroid_on_geolatte(SessionFactoryScope scope) throws SQLException {
centroid( GEOLATTE, scope );
}
public void centroid(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getCentroid();
String hql = format(
"SELECT id, centroid(geom) FROM %s g where geometrytype(geom) = 'ST_Polygon'",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_coorddim_on_jts(SessionFactoryScope scope) throws SQLException {
coorddim( JTS, scope );
}
@Test
public void test_coorddim_on_geolatte(SessionFactoryScope scope) throws SQLException {
coorddim( GEOLATTE, scope );
}
public void coorddim(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getCoordDim();
String hql = format( "SELECT id, coorddim(geom) FROM %s", entityName( pckg ) );
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_coveredby_on_jts(SessionFactoryScope scope) throws SQLException {
coveredby( JTS, scope );
}
@Test
public void test_coveredby_on_geolatte(SessionFactoryScope scope) throws SQLException {
coveredby( GEOLATTE, scope );
}
public void coveredby(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getCoveredBy( expectationsFactory.getTestPolygon() );
String hql = format(
"SELECT id, coveredby(geom, :filter) FROM %s where coveredby(geom, :filter) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg, scope );
}
@Test
public void test_covers_on_jts(SessionFactoryScope scope) throws SQLException {
covers( JTS, scope );
}
@Test
public void test_covers_on_geolatte(SessionFactoryScope scope) throws SQLException {
covers( GEOLATTE, scope );
}
public void covers(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getCovers( expectationsFactory.getTestPolygon() );
String hql = format(
"SELECT id, covers(geom, :filter) FROM %s where covers(geom, :filter) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg, scope );
}
@Test
public void test_endpoint_on_jts(SessionFactoryScope scope) throws SQLException {
endpoint( JTS, scope );
}
@Test
public void test_endpoint_on_geolatte(SessionFactoryScope scope) throws SQLException {
endpoint( GEOLATTE, scope );
}
public void endpoint(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getEndPoint();
String hql = format(
"SELECT id, endpoint(geom) FROM %s g where geometrytype(geom) = 'ST_LineString'",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_envelopeaggr_on_jts(SessionFactoryScope scope) throws SQLException {
envelopeaggr( JTS, scope );
}
@Test
public void test_envelopeaggr_on_geolatte(SessionFactoryScope scope) throws SQLException {
envelopeaggr( GEOLATTE, scope );
}
public void envelopeaggr(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getEnvelopeAggr();
String hql = format(
"SELECT cast(count(g) as int), envelopeaggr(geom) FROM %s g",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_exteriorring_on_jts(SessionFactoryScope scope) throws SQLException {
exteriorring( JTS, scope );
}
@Test
public void test_exteriorring_on_geolatte(SessionFactoryScope scope) throws SQLException {
exteriorring( GEOLATTE, scope );
}
public void exteriorring(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getExteriorRing();
String hql = format(
"SELECT id, exteriorring(geom) FROM %s g where geometrytype(geom) = 'ST_Polygon'",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_geomfromewkb_on_jts(SessionFactoryScope scope) throws SQLException {
geomfromewkb( JTS, scope );
}
@Test
public void test_geomfromewkb_on_geolatte(SessionFactoryScope scope) throws SQLException {
geomfromewkb( GEOLATTE, scope );
}
public void geomfromewkb(String pckg, SessionFactoryScope scope) throws SQLException {
WKBWriter writer = new WKBWriter( 2, true );
byte[] ewkb = writer.write( expectationsFactory.getTestPolygon() );
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromEWKB( ewkb );
String hql = format(
"SELECT 1, cast(geomfromewkb(:param) as %s) FROM %s g",
getGeometryTypeFromPackage( pckg ),
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "param", ewkb );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg, scope );
}
@Test
public void test_geomfromewkt_on_jts(SessionFactoryScope scope) throws SQLException {
geomfromewkt( JTS, scope );
}
@Test
public void test_geomfromewkt_on_geolatte(SessionFactoryScope scope) throws SQLException {
geomfromewkt( GEOLATTE, scope );
}
public void geomfromewkt(String pckg, SessionFactoryScope scope) throws SQLException {
WKTWriter writer = new WKTWriter();
String ewkt = "SRID=" + expectationsFactory.getTestSrid() + ";" + writer.write(
expectationsFactory.getTestPolygon() );
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromEWKT( ewkt );
String hql = format(
"SELECT 1, cast(geomfromewkt(:param) as %s) FROM %s g",
getGeometryTypeFromPackage( pckg ),
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "param", ewkt );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg, scope );
}
@Test
public void test_geomfromtext_on_jts(SessionFactoryScope scope) throws SQLException {
geomfromtext( JTS, scope );
}
@Test
public void test_geomfromtext_on_geolatte(SessionFactoryScope scope) throws SQLException {
geomfromtext( GEOLATTE, scope );
}
public void geomfromtext(String pckg, SessionFactoryScope scope) throws SQLException {
String text = expectationsFactory.getTestPolygon().toText();
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromText( text );
String hql = format(
"SELECT 1, cast(geomfromtext(:param) as %s) FROM %s g",
getGeometryTypeFromPackage( pckg ),
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "param", text );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg, scope );
}
@Test
public void test_geomfromwkb_on_jts(SessionFactoryScope scope) throws SQLException {
geomfromwkb( JTS, scope );
}
@Test
public void test_geomfromwkb_on_geolatte(SessionFactoryScope scope) throws SQLException {
geomfromwkb( GEOLATTE, scope );
}
public void geomfromwkb(String pckg, SessionFactoryScope scope) throws SQLException {
WKBWriter writer = new WKBWriter( 2, false );
byte[] wkb = writer.write( expectationsFactory.getTestPolygon() );
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromWKB( wkb );
String hql = format(
"SELECT 1, cast(geomfromwkb(:param) as %s) FROM %s g",
getGeometryTypeFromPackage( pckg ),
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "param", wkb );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg, scope );
}
@Test
public void test_geomfromwkt_on_jts(SessionFactoryScope scope) throws SQLException {
geomfromwkt( JTS, scope );
}
@Test
public void test_geomfromwkt_on_geolatte(SessionFactoryScope scope) throws SQLException {
geomfromwkt( GEOLATTE, scope );
}
public void geomfromwkt(String pckg, SessionFactoryScope scope) throws SQLException {
WKTWriter writer = new WKTWriter();
String wkt = writer.write( expectationsFactory.getTestPolygon() );
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromWKT( wkt );
String hql = format(
"SELECT 1, cast(geomfromwkt(:param) as %s) FROM %s g",
getGeometryTypeFromPackage( pckg ),
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "param", wkt );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg, scope );
}
@Test
public void test_geometryn_on_jts(SessionFactoryScope scope) throws SQLException {
geometryn( JTS, scope );
}
@Test
public void test_geometryn_on_geolatte(SessionFactoryScope scope) throws SQLException {
geometryn( GEOLATTE, scope );
}
public void geometryn(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeometryN( 1 );
String hql = format(
"SELECT id, cast(geometryn(geom, :n) as %s) FROM %s g where geometrytype(geom) = 'ST_GeometryCollection'",
getGeometryTypeFromPackage( pckg ),
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "n", 1 );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg, scope );
}
@Test
public void test_interiorringn_on_jts(SessionFactoryScope scope) throws SQLException {
interiorringn( JTS, scope );
}
@Test
public void test_interiorringn_on_geolatte(SessionFactoryScope scope) throws SQLException {
interiorringn( GEOLATTE, scope );
}
public void interiorringn(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getInteriorRingN( 1 );
String hql = format(
"SELECT id, cast(interiorringn(geom, :n) as %s) FROM %s g where geometrytype(geom) = 'ST_Polygon'",
getGeometryTypeFromPackage( pckg ),
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "n", 1 );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg, scope );
}
@Test
public void test_intersectionaggr_on_jts(SessionFactoryScope scope) throws SQLException {
intersectionaggr( JTS, scope );
}
@Test
public void test_intersectionaggr_on_geolatte(SessionFactoryScope scope) throws SQLException {
intersectionaggr( GEOLATTE, scope );
}
public void intersectionaggr(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getIntersectionAggr();
String hql = format(
"SELECT cast(count(g) as int), intersectionaggr(geom) FROM %s g",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_intersectsrect_on_jts(SessionFactoryScope scope) throws SQLException {
intersectsrect( JTS, scope );
}
@Test
public void test_intersectsrect_on_geolatte(SessionFactoryScope scope) throws SQLException {
intersectsrect( GEOLATTE, scope );
}
public void intersectsrect(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIntersectsRect(
(Point) expectationsFactory.getTestPoint().reverse(),
expectationsFactory.getTestPoint()
);
String hql = format(
"SELECT id, intersectsrect(geom, :pmin, :pmax) FROM %s where intersectsrect(geom, :pmin, :pmax) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
Map<String, Object> params = createQueryParams( "pmin", expectationsFactory.getTestPoint().reverse() );
params.put( "pmax", expectationsFactory.getTestPoint() );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg, scope );
}
@Test
public void test_is3d_on_jts(SessionFactoryScope scope) throws SQLException {
is3d( JTS, scope );
}
@Test
public void test_is3d_on_geolatte(SessionFactoryScope scope) throws SQLException {
is3d( GEOLATTE, scope );
}
public void is3d(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIs3D();
String hql = format(
"SELECT id, is3d(geom) FROM %s where is3d(geom) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_isclosed_on_jts(SessionFactoryScope scope) throws SQLException {
isclosed( JTS, scope );
}
@Test
public void test_isclosed_on_geolatte(SessionFactoryScope scope) throws SQLException {
isclosed( GEOLATTE, scope );
}
public void isclosed(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsClosed();
String hql = format(
"SELECT id, isclosed(geom) FROM %s where geometrytype(geom) in ('ST_LineString', 'ST_MultiLineString') and isclosed(geom) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_ismeasured_on_jts(SessionFactoryScope scope) throws SQLException {
ismeasured( JTS, scope );
}
@Test
public void test_ismeasured_on_geolatte(SessionFactoryScope scope) throws SQLException {
ismeasured( GEOLATTE, scope );
}
public void ismeasured(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsMeasured();
String hql = format(
"SELECT id, ismeasured(geom) FROM %s where ismeasured(geom) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_isring_on_jts(SessionFactoryScope scope) throws SQLException {
isring( JTS, scope );
}
@Test
public void test_isring_on_geolatte(SessionFactoryScope scope) throws SQLException {
isring( GEOLATTE, scope );
}
public void isring(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsRing();
String hql = format(
"SELECT id, isring(geom) FROM %s where geometrytype(geom) in ('ST_LineString') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_isvalid_on_jts(SessionFactoryScope scope) throws SQLException {
isvalid( JTS, scope );
}
@Test
public void test_isvalid_on_geolatte(SessionFactoryScope scope) throws SQLException {
isvalid( GEOLATTE, scope );
}
public void isvalid(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsValid();
String hql = format(
"SELECT id, isvalid(geom) FROM %s where isvalid(geom) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_length_on_jts(SessionFactoryScope scope) throws SQLException {
length( JTS, scope );
}
@Test
public void test_length_on_geolatte(SessionFactoryScope scope) throws SQLException {
length( GEOLATTE, scope );
}
public void length(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getLength();
String hql = format(
"SELECT id, length(geom) FROM %s where geometrytype(geom) in ('ST_LineString', 'ST_MultiLineString') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_m_on_jts(SessionFactoryScope scope) throws SQLException {
m( JTS, scope );
}
@Test
public void test_m_on_geolatte(SessionFactoryScope scope) throws SQLException {
m( GEOLATTE, scope );
}
public void m(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getM();
String hql = format(
"SELECT id, m(geom) FROM %s where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_mmax_on_jts(SessionFactoryScope scope) throws SQLException {
mmax( JTS, scope );
}
@Test
public void test_mmax_on_geolatte(SessionFactoryScope scope) throws SQLException {
mmax( GEOLATTE, scope );
}
public void mmax(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getMMax();
String hql = format(
"SELECT id, mmax(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_mmin_on_jts(SessionFactoryScope scope) throws SQLException {
mmin( JTS, scope );
}
@Test
public void test_mmin_on_geolatte(SessionFactoryScope scope) throws SQLException {
mmin( GEOLATTE, scope );
}
public void mmin(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getMMin();
String hql = format(
"SELECT id, mmin(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_numgeometries_on_jts(SessionFactoryScope scope) throws SQLException {
numgeometries( JTS, scope );
}
@Test
public void test_numgeometries_on_geolatte(SessionFactoryScope scope) throws SQLException {
numgeometries( GEOLATTE, scope );
}
public void numgeometries(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumGeometries();
String hql = format(
"SELECT id, numgeometries(geom) FROM %s where geometrytype(geom) in ('ST_GeometryCollection') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_numinteriorring_on_jts(SessionFactoryScope scope) throws SQLException {
numinteriorring( JTS, scope );
}
@Test
public void test_numnuminteriorring_on_geolatte(SessionFactoryScope scope) throws SQLException {
numinteriorring( GEOLATTE, scope );
}
public void numinteriorring(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumInteriorRing();
String hql = format(
"SELECT id, numinteriorring(geom) FROM %s where geometrytype(geom) in ('ST_Polygon') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_numinteriorrings_on_jts(SessionFactoryScope scope) throws SQLException {
numinteriorrings( JTS, scope );
}
@Test
public void test_numnuminteriorrings_on_geolatte(SessionFactoryScope scope) throws SQLException {
numinteriorrings( GEOLATTE, scope );
}
public void numinteriorrings(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumInteriorRings();
String hql = format(
"SELECT id, numinteriorrings(geom) FROM %s where geometrytype(geom) in ('ST_Polygon') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_numpoints_on_jts(SessionFactoryScope scope) throws SQLException {
numpoints( JTS, scope );
}
@Test
public void test_numpoints_on_geolatte(SessionFactoryScope scope) throws SQLException {
numpoints( GEOLATTE, scope );
}
public void numpoints(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumPoints();
String hql = format(
"SELECT id, numpoints(geom) FROM %s where geometrytype(geom) in ('ST_LineString') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_orderingequals_on_jts(SessionFactoryScope scope) throws SQLException {
orderingequals( JTS, scope );
}
@Test
public void test_orderingequals_on_geolatte(SessionFactoryScope scope) throws SQLException {
orderingequals( GEOLATTE, scope );
}
public void orderingequals(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getOrderingEquals(
expectationsFactory.getTestPolygon() );
String hql = format(
"SELECT id, orderingequals(geom, :filter) FROM %s where orderingequals(geom, :filter) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg, scope );
}
@Test
public void test_perimeter_on_jts(SessionFactoryScope scope) throws SQLException {
perimeter( JTS, scope );
}
@Test
public void test_perimeter_on_geolatte(SessionFactoryScope scope) throws SQLException {
perimeter( GEOLATTE, scope );
}
public void perimeter(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getPerimeter();
String hql = format(
"SELECT id, perimeter(geom) FROM %s where geometrytype(geom) in ('ST_Polygon', 'ST_MultiPolygon') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_pointonsurface_on_jts(SessionFactoryScope scope) throws SQLException {
pointonsurface( JTS, scope );
}
@Test
public void test_pointonsurface_on_geolatte(SessionFactoryScope scope) throws SQLException {
pointonsurface( GEOLATTE, scope );
}
public void pointonsurface(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getPointOnSurface();
String hql = format(
"SELECT id, pointonsurface(geom) FROM %s where geometrytype(geom) in ('ST_Polygon', 'ST_MultiPolygon') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_pointn_on_jts(SessionFactoryScope scope) throws SQLException {
pointn( JTS, scope );
}
@Test
public void test_pointn_on_geolatte(SessionFactoryScope scope) throws SQLException {
pointn( GEOLATTE, scope );
}
public void pointn(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getPointN( 1 );
String hql = format(
"SELECT id, pointn(geom, :n) FROM %s where geometrytype(geom) in ('ST_LineString') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
Map<String, Object> params = createQueryParams( "n", 1 );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg, scope );
}
// ST_GEOMETRY columns are not supported
@Test
public void test_snaptogrid_on_jts(SessionFactoryScope scope) {
assertThrows( SQLException.class, () -> snaptogrid( JTS, scope ) );
}
// ST_GEOMETRY columns are not supported
@Test
public void test_snaptogrid_on_geolatte(SessionFactoryScope scope) {
assertThrows( SQLException.class, () -> snaptogrid( GEOLATTE, scope ) );
}
public void snaptogrid(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getSnapToGrid();
String hql = format(
"SELECT id, snaptogrid(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_startpoint_on_jts(SessionFactoryScope scope) throws SQLException {
startpoint( JTS, scope );
}
@Test
public void test_startpoint_on_geolatte(SessionFactoryScope scope) throws SQLException {
startpoint( GEOLATTE, scope );
}
public void startpoint(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getStartPoint();
String hql = format(
"SELECT id, startpoint(geom) FROM %s g where geometrytype(geom) = 'ST_LineString'",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_unionaggr_on_jts(SessionFactoryScope scope) throws SQLException {
unionaggr( JTS, scope );
}
@Test
public void test_unionaggr_on_geolatte(SessionFactoryScope scope) throws SQLException {
unionaggr( GEOLATTE, scope );
}
public void unionaggr(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getUnionAggr();
String hql = format(
"SELECT cast(count(g) as int), unionaggr(geom) FROM %s g",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_x_on_jts(SessionFactoryScope scope) throws SQLException {
x( JTS, scope );
}
@Test
public void test_x_on_geolatte(SessionFactoryScope scope) throws SQLException {
x( GEOLATTE, scope );
}
public void x(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getX();
String hql = format(
"SELECT id, x(geom) FROM %s where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_xmax_on_jts(SessionFactoryScope scope) throws SQLException {
xmax( JTS, scope );
}
@Test
public void test_xmax_on_geolatte(SessionFactoryScope scope) throws SQLException {
xmax( GEOLATTE, scope );
}
public void xmax(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getXMax();
String hql = format(
"SELECT id, xmax(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_xmin_on_jts(SessionFactoryScope scope) throws SQLException {
xmin( JTS, scope );
}
@Test
public void test_xmin_on_geolatte(SessionFactoryScope scope) throws SQLException {
xmin( GEOLATTE, scope );
}
public void xmin(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getXMin();
String hql = format(
"SELECT id, xmin(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_y_on_jts(SessionFactoryScope scope) throws SQLException {
y( JTS, scope );
}
@Test
public void test_y_on_geolatte(SessionFactoryScope scope) throws SQLException {
y( GEOLATTE, scope );
}
public void y(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getY();
String hql = format(
"SELECT id, y(geom) FROM %s where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_ymax_on_jts(SessionFactoryScope scope) throws SQLException {
ymax( JTS, scope );
}
@Test
public void test_ymax_on_geolatte(SessionFactoryScope scope) throws SQLException {
ymax( GEOLATTE, scope );
}
public void ymax(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getYMax();
String hql = format(
"SELECT id, ymax(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_ymin_on_jts(SessionFactoryScope scope) throws SQLException {
ymin( JTS, scope );
}
@Test
public void test_ymin_on_geolatte(SessionFactoryScope scope) throws SQLException {
ymin( GEOLATTE, scope );
}
public void ymin(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getYMin();
String hql = format(
"SELECT id, ymin(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_z_on_jts(SessionFactoryScope scope) throws SQLException {
z( JTS, scope );
}
@Test
public void test_z_on_geolatte(SessionFactoryScope scope) throws SQLException {
z( GEOLATTE, scope );
}
public void z(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getZ();
String hql = format(
"SELECT id, z(geom) FROM %s where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_zmax_on_jts(SessionFactoryScope scope) throws SQLException {
zmax( JTS, scope );
}
@Test
public void test_zmax_on_geolatte(SessionFactoryScope scope) throws SQLException {
zmax( GEOLATTE, scope );
}
public void zmax(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getZMax();
String hql = format(
"SELECT id, zmax(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_zmin_on_jts(SessionFactoryScope scope) throws SQLException {
zmin( JTS, scope );
}
@Test
public void test_zmin_on_geolatte(SessionFactoryScope scope) throws SQLException {
zmin( GEOLATTE, scope );
}
public void zmin(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getZMin();
String hql = format(
"SELECT id, zmin(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg, scope );
}
@Test
public void test_nestedfunction_on_jts(SessionFactoryScope scope) throws SQLException {
nestedfunction( JTS, scope );
}
@Test
public void test_nestedfunction_on_geolatte(SessionFactoryScope scope) throws SQLException {
nestedfunction( GEOLATTE, scope );
}
public void nestedfunction(String pckg, SessionFactoryScope scope) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getNestedFunctionInner(
expectationsFactory.getTestPolygon() );
String hql = format(
"SELECT id, geom FROM %s g where dwithin(geom, srid(:filter, 0), 1) = true",
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg, scope );
dbexpected = hanaExpectationsFactory.getNestedFunctionOuter( expectationsFactory.getTestPolygon() );
hql = format(
"SELECT id, geom FROM %s g where dwithin(:filter, srid(geom, 0), 1) = true",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg, scope );
}
private String getGeometryTypeFromPackage(String pckg) {
switch ( pckg ) {
case GEOLATTE:
return org.geolatte.geom.Geometry.class.getName();
case JTS:
return Geometry.class.getName();
default:
throw new IllegalArgumentException( "Invalid package: " + pckg );
}
}
private Map<String, Object> createQueryParams(String filterParamName, Object value) {
Map<String, Object> params = new HashMap<String, Object>();
params.put( filterParamName, value );
return params;
}
public <T> void retrieveHQLResultsAndCompare(Map<Integer, T> dbexpected, String hql, String geometryType, SessionFactoryScope scope) {
retrieveHQLResultsAndCompare( dbexpected, hql, null, geometryType, scope );
}
protected <T> void retrieveHQLResultsAndCompare(
Map<Integer, T> dbexpected,
String hql,
Map<String, Object> params,
String geometryType,
SessionFactoryScope scope) {
Map<Integer, T> hsreceived = new HashMap<>();
scope.inTransaction(
session -> {
Query query = session.createQuery( hql );
setParameters( params, query );
addQueryResults( hsreceived, query );
}
);
compare( dbexpected, hsreceived, geometryType );
}
private void setParameters(Map<String, Object> params, Query query) {
if ( params == null ) {
return;
}
for ( Map.Entry<String, Object> entry : params.entrySet() ) {
query.setParameter( entry.getKey(), entry.getValue() );
}
}
}
|
TestHANASpatialFunctions
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/builditem/GeneratedResourceBuildItem.java
|
{
"start": 170,
"end": 1591
}
|
class ____ extends MultiBuildItem {
final String name;
final byte[] data;
/**
* This option is only meant to be set by extensions that also generated the resource on the file system
* and must rely on Quarkus not getting in the way of loading that resource.
* It is currently used by Kogito to get serving of static resources in Dev Mode by Vert.x
* <br>
*
* @deprecated If you want to serve static resources use
* {@link io.quarkus.vertx.http.deployment.spi.GeneratedStaticResourceBuildItem}
* instead.
*/
@Deprecated
final boolean excludeFromDevCL;
public GeneratedResourceBuildItem(String name, byte[] data) {
this.name = name;
this.data = data;
this.excludeFromDevCL = false;
}
public GeneratedResourceBuildItem(String name, byte[] data, boolean excludeFromDevCL) {
this.name = name;
this.data = data;
this.excludeFromDevCL = excludeFromDevCL;
}
public String getName() {
return name;
}
public byte[] getData() {
return data;
}
/**
* @deprecated use {@link GeneratedResourceBuildItem#getData} instead
*/
@Deprecated(forRemoval = true)
public byte[] getClassData() {
return getData();
}
public boolean isExcludeFromDevCL() {
return excludeFromDevCL;
}
}
|
GeneratedResourceBuildItem
|
java
|
apache__avro
|
lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestIDLMojo.java
|
{
"start": 1215,
"end": 5251
}
|
class ____ extends AbstractAvroMojoTest {
private File testPom = new File(getBasedir(), "src/test/resources/unit/idl/pom.xml");
private File injectingVelocityToolsTestPom = new File(getBasedir(),
"src/test/resources/unit/idl/pom-injecting-velocity-tools.xml");
private File incrementalCompilationTestPom = new File(getBasedir(),
"src/test/resources/unit/idl/pom-incremental-compilation.xml");
@Test
public void testIdlProtocolMojo() throws Exception {
// Clear output directory to ensure files are recompiled.
final File outputDir = new File(getBasedir(), "target/test-harness/idl/test/");
FileUtils.deleteDirectory(outputDir);
final IDLMojo mojo = (IDLMojo) lookupMojo("idl", testPom);
final TestLog log = new TestLog();
mojo.setLog(log);
assertNotNull(mojo);
mojo.execute();
final Set<String> generatedFiles = new HashSet<>(
asList("IdlPrivacy.java", "IdlTest.java", "IdlUser.java", "IdlUserWrapper.java"));
assertFilesExist(outputDir, generatedFiles);
final String idlUserContent = FileUtils.fileRead(new File(outputDir, "IdlUser.java"));
assertTrue(idlUserContent.contains("@org.jetbrains.annotations.Nullable\n public java.lang.String getId"));
assertTrue(idlUserContent.contains("@org.jetbrains.annotations.NotNull\n public java.time.Instant getModifiedOn"));
assertEquals(Collections.singletonList("[WARN] Line 22, char 1: Ignoring out-of-place documentation comment.\n"
+ "Did you mean to use a multiline comment ( /* ... */ ) instead?"), log.getLogEntries());
}
@Test
public void testSetCompilerVelocityAdditionalTools() throws Exception {
// Clear output directory to ensure files are recompiled.
final File outputDir = new File(getBasedir(), "target/test-harness/idl-inject/test/");
FileUtils.deleteDirectory(outputDir);
final IDLProtocolMojo mojo = (IDLProtocolMojo) lookupMojo("idl-protocol", injectingVelocityToolsTestPom);
final TestLog log = new TestLog();
mojo.setLog(log);
assertNotNull(mojo);
mojo.execute();
final Set<String> generatedFiles = new HashSet<>(
asList("IdlPrivacy.java", "IdlTest.java", "IdlUser.java", "IdlUserWrapper.java"));
assertFilesExist(outputDir, generatedFiles);
final String schemaUserContent = FileUtils.fileRead(new File(outputDir, "IdlUser.java"));
assertTrue(schemaUserContent.contains("It works!"));
// The previous test already verifies the warnings.
assertFalse(log.getLogEntries().isEmpty());
}
@Test
public void testIDLProtocolMojoSupportsIncrementalCompilation() throws Exception {
// Ensure that the IDL files have already been compiled once.
final IDLMojo mojo = (IDLMojo) lookupMojo("idl", incrementalCompilationTestPom);
final TestLog log = new TestLog();
mojo.setLog(log);
assertNotNull(mojo);
mojo.execute();
// Remove one file to ensure it is recreated and the others are not.
final Path outputDirPath = Paths.get(getBasedir(), "target/test-harness/idl-incremental/test/");
final File outputDir = outputDirPath.toFile();
final Path idlPrivacyFilePath = outputDirPath.resolve("IdlPrivacy.java");
final FileTime idpPrivacyModificationTime = Files.getLastModifiedTime(idlPrivacyFilePath);
Files.delete(idlPrivacyFilePath);
final Path idlUserFilePath = outputDirPath.resolve("IdlUser.java");
final FileTime idlUserModificationTime = Files.getLastModifiedTime(idlUserFilePath);
mojo.execute();
// Asserting contents is done in previous tests so just assert existence.
final Set<String> generatedFiles = new HashSet<>(
asList("IdlPrivacy.java", "IdlTest.java", "IdlUser.java", "IdlUserWrapper.java"));
assertFilesExist(outputDir, generatedFiles);
assertTrue(idlPrivacyFilePath.toFile().exists());
assertEquals(Files.getLastModifiedTime(idlUserFilePath), idlUserModificationTime);
assertTrue(Files.getLastModifiedTime(idlPrivacyFilePath).compareTo(idpPrivacyModificationTime) > 0);
}
}
|
TestIDLMojo
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionRequestTests.java
|
{
"start": 417,
"end": 1035
}
|
class ____ extends ESTestCase {
public void testCannotSetQueryStringTwice() {
final SamlInvalidateSessionRequest samlInvalidateSessionRequest = new SamlInvalidateSessionRequest();
samlInvalidateSessionRequest.setQueryString("query_string");
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> samlInvalidateSessionRequest.setQueryString("queryString")
);
assertThat(e.getMessage(), containsString("Must use either [query_string] or [queryString], not both at the same time"));
}
}
|
SamlInvalidateSessionRequestTests
|
java
|
elastic__elasticsearch
|
modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianNormalizationFilterFactory.java
|
{
"start": 910,
"end": 1323
}
|
class ____ extends AbstractTokenFilterFactory implements NormalizingTokenFilterFactory {
PersianNormalizationFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(name);
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new PersianNormalizationFilter(tokenStream);
}
}
|
PersianNormalizationFilterFactory
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/authorization/JwkSetTests.java
|
{
"start": 5685,
"end": 6381
}
|
class ____ {
@Bean
OAuth2AuthorizationService authorizationService(JdbcOperations jdbcOperations,
RegisteredClientRepository registeredClientRepository) {
return new JdbcOAuth2AuthorizationService(jdbcOperations, registeredClientRepository);
}
@Bean
RegisteredClientRepository registeredClientRepository(JdbcOperations jdbcOperations) {
return new JdbcRegisteredClientRepository(jdbcOperations);
}
@Bean
JdbcOperations jdbcOperations() {
return new JdbcTemplate(db);
}
@Bean
JWKSource<SecurityContext> jwkSource() {
return jwkSource;
}
}
@EnableWebSecurity
@Import(OAuth2AuthorizationServerConfiguration.class)
static
|
AuthorizationServerConfiguration
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/decorators/validation/DecoratorWithProducerFieldTest.java
|
{
"start": 1249,
"end": 1524
}
|
class ____ implements Converter<Number> {
@Inject
@Delegate
Converter<Number> delegate;
@Override
public Number convert(String value) {
return null;
}
@Produces
String produce = "";
}
}
|
MyDecorator
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/sqlite/SQLite_SelectTest_1.java
|
{
"start": 1082,
"end": 2524
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "SELECT xiaojie,event_cnt,country_code,area_code,carrier,country,province,city,type,subtype,displayname,slogan,logo,source,state,priority,is_change FROM ktv_xiaojie_20170921 WHERE day_modify > 20170921 AND priority >= 4499000 ORDER BY priority DESC LIMIT 500000";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.SQLITE);
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
SchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(18, visitor.getColumns().size());
assertEquals(2, visitor.getConditions().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("ktv_xiaojie_20170921")));
assertTrue(visitor.getColumns().contains(new Column("ktv_xiaojie_20170921", "xiaojie")));
assertTrue(visitor.getColumns().contains(new Column("ktv_xiaojie_20170921", "event_cnt")));
}
}
|
SQLite_SelectTest_1
|
java
|
apache__camel
|
core/camel-management-api/src/main/java/org/apache/camel/api/management/mbean/ManagedDoTryMBean.java
|
{
"start": 964,
"end": 1198
}
|
interface ____ extends ManagedProcessorMBean, ManagedExtendedInformation {
@Override
@ManagedOperation(description = "Statistics of the doTry for each caught exception")
TabularData extendedInformation();
}
|
ManagedDoTryMBean
|
java
|
elastic__elasticsearch
|
x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTriggerEvent.java
|
{
"start": 4749,
"end": 4872
}
|
interface ____ extends TriggerEvent.Field {
ParseField SCHEDULED_TIME = new ParseField("scheduled_time");
}
}
|
Field
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/configuration/BeanMethodQualificationTests.java
|
{
"start": 12282,
"end": 12630
}
|
class ____ {
@InterestingBean
public TestBean testBean1() {
return new TestBean("interesting");
}
@Bean(defaultCandidate=false) @Qualifier("boring") @Lazy
public TestBean testBean2(@Lazy TestBean testBean1) {
TestBean tb = new TestBean("boring");
tb.setSpouse(testBean1);
return tb;
}
}
@Configuration
static
|
CustomConfig
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/expression/function/scalar/FunctionTestUtils.java
|
{
"start": 960,
"end": 1772
}
|
class ____ {
public static Literal l(Object value) {
return new Literal(EMPTY, value, DataTypes.fromJava(value));
}
public static Literal l(Object value, DataType type) {
return new Literal(EMPTY, value, type);
}
public static Literal randomStringLiteral() {
return l(ESTestCase.randomRealisticUnicodeOfLength(10), KEYWORD);
}
public static Literal randomIntLiteral() {
return l(ESTestCase.randomInt(), INTEGER);
}
public static Literal randomBooleanLiteral() {
return l(ESTestCase.randomBoolean(), BOOLEAN);
}
public static Literal randomDatetimeLiteral() {
return l(ZonedDateTime.ofInstant(Instant.ofEpochMilli(ESTestCase.randomLong()), ESTestCase.randomZone()), DATETIME);
}
public static
|
FunctionTestUtils
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/hhh12225/HQLTypeTest.java
|
{
"start": 737,
"end": 2870
}
|
class ____ {
@Test
public void smokeTest(SessionFactoryScope scope) {
// scope.inTransaction( (session) -> {
// final Vehicle vehicle = new Vehicle();
// final VehicleContract contract = new VehicleContract();
// vehicle.setContract( contract );
//
// session.persist( contract );
// session.persist( vehicle );
// } );
scope.inTransaction( (session) -> {
// final Vehicle vehicle = new Vehicle();
final VehicleTrackContract contract = new VehicleTrackContract();
// vehicle.setContract( contract );
session.persist( contract );
// session.persist( vehicle );
} );
}
@Test
public void test(SessionFactoryScope scope) {
VehicleContract contract = scope.fromTransaction( session -> {
VehicleContract firstCotract = null;
for ( long i = 0; i < 10; i++ ) {
VehicleContract vehicleContract = new VehicleContract();
Vehicle vehicle1 = new Vehicle();
vehicle1.setContract( vehicleContract );
VehicleTrackContract vehicleTrackContract = new VehicleTrackContract();
Vehicle vehicle2 = new Vehicle();
vehicle2.setContract( vehicleTrackContract );
session.persist( vehicle1 );
session.persist( vehicle2 );
session.persist( vehicleContract );
session.persist( vehicleTrackContract );
if ( i == 0 ) {
firstCotract = vehicleContract;
}
}
return firstCotract;
} );
scope.inTransaction( session -> {
List workingResults = session.createQuery(
"select rootAlias.id from Contract as rootAlias where rootAlias.id = :id" )
.setParameter( "id", contract.getId() )
.getResultList();
assertFalse( workingResults.isEmpty() );
Long workingId = (Long) workingResults.get( 0 );
assertEquals( contract.getId(), workingId );
List failingResults = session.createQuery(
"select rootAlias.id, type(rootAlias) from Contract as rootAlias where rootAlias.id = :id" )
.setParameter( "id", contract.getId() )
.getResultList();
assertFalse( failingResults.isEmpty() );
Long failingId = (Long) ( (Object[]) failingResults.get( 0 ) )[0];
assertEquals( contract.getId(), failingId );
} );
}
}
|
HQLTypeTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/LongLiteralLowerCaseSuffixTest.java
|
{
"start": 4632,
"end": 6149
}
|
class ____ {
public void positiveUpperCase() {
long value = 123432L;
}
public void zeroUpperCase() {
long value = 0L;
}
public void negativeUpperCase() {
long value = -3L;
}
public void notLong() {
String value = "0l";
}
public void variableEndingInEllIsNotALongLiteral() {
long ell = 0L;
long value = ell;
}
public void positiveNoSuffix() {
long value = 3;
}
public void negativeNoSuffix() {
long value = -3;
}
public void positiveHexUpperCase() {
long value = 0x80L;
}
public void zeroHexUpperCase() {
long value = 0x0L;
}
public void negativeHexUpperCase() {
long value = -0x80L;
}
}\
""")
.doTest();
}
@Test
public void disableable() {
compilationHelper
.setArgs(ImmutableList.of("-Xep:LongLiteralLowerCaseSuffix:OFF"))
.expectNoDiagnostics()
.addSourceLines(
"LongLiteralLowerCaseSuffixPositiveCase1.java",
"""
package com.google.errorprone.bugpatterns.testdata;
/** Positive cases for {@link LongLiteralLowerCaseSuffix}. */
public
|
LongLiteralLowerCaseSuffixNegativeCases
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/dates/Dates_assertHasTime_Test.java
|
{
"start": 1427,
"end": 2236
}
|
class ____ extends DatesBaseTest {
@Override
protected void initActualDate() {
actual = new Date(42);
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> dates.assertHasTime(someInfo(), null, 1))
.withMessage(actualIsNull());
}
@Test
void should_pass_if_actual_has_same_time() {
dates.assertHasTime(someInfo(), actual, 42L);
}
@Test
void should_fail_if_actual_has_not_same_time() {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> dates.assertHasTime(someInfo(), actual, 24L));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldHaveTime(actual, 24L));
}
}
|
Dates_assertHasTime_Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/onetomany/Asset.java
|
{
"start": 272,
"end": 1012
}
|
class ____ implements Serializable {
@Id
@Column(name = "id_asset")
private final Integer idAsset;
@Id
@Column(name = "id_test")
private final Integer test;
@ManyToOne(cascade = { CascadeType.ALL })
@JoinColumn(nullable = false)
private Employee employee;
public Asset() {
this.idAsset = 0;
this.test = 1;
}
/**
* @param idAsset
*/
public Asset(Integer idAsset) {
this.idAsset = idAsset;
this.test = 1;
}
/**
* @return the id
*/
public Integer getIdAsset() {
return idAsset;
}
/**
* @return the employee
*/
public Employee getEmployee() {
return employee;
}
/**
* @param employee the employee to set
*/
public void setEmployee(Employee employee) {
this.employee = employee;
}
}
|
Asset
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ComponentProcessorTest.java
|
{
"start": 45551,
"end": 45923
}
|
class ____ {",
" @Inject SomeInjectableType(GeneratedInjectType generatedInjectType) {}",
"}");
Source componentFile =
CompilerTests.javaSource(
"test.SimpleComponent",
"package test;",
"",
"import dagger.Component;",
"",
"@Component",
"
|
SomeInjectableType
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/joincolumn/embedded/StringToCharArrayInEmbeddedMultipleJoinColumnTest.java
|
{
"start": 4117,
"end": 5034
}
|
class ____ implements Serializable {
@Id
private Long id;
@Column(name = "char_array_col", nullable = false)
private char[] charArrayProp;
@Column(name = "int_col", nullable = false)
private int intProp;
@OneToMany(mappedBy = "vehicle")
private List<VehicleInvoice> invoices;
public Vehicle() {
this.invoices = new ArrayList<>();
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public char[] getCharArrayProp() {
return charArrayProp;
}
public void setCharArrayProp(char[] charArrayProp) {
this.charArrayProp = charArrayProp;
}
public int getIntProp() {
return intProp;
}
public void setIntProp(int intProp) {
this.intProp = intProp;
}
public List<VehicleInvoice> getInvoices() {
return invoices;
}
public void setInvoices(List<VehicleInvoice> invoices) {
this.invoices = invoices;
}
}
}
|
Vehicle
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/query/VersionedKeyQuery.java
|
{
"start": 1266,
"end": 3431
}
|
class ____<K, V> implements Query<VersionedRecord<V>> {
private final K key;
private final Optional<Instant> asOfTimestamp;
private VersionedKeyQuery(final K key, final Optional<Instant> asOfTimestamp) {
this.key = key;
this.asOfTimestamp = asOfTimestamp;
}
/**
* Creates a query that will retrieve the record from a versioned state store identified by {@code key} if it exists
* (or {@code null} otherwise).
* <p>
* While the query by default returns the latest value of the specified {@code key}, setting
* the {@code asOfTimestamp} (by calling the {@link #asOf(Instant)} method), makes the query
* to return the value associated to the specified {@code asOfTimestamp}.
*
* @param key The key to retrieve
* @param <K> The type of the key
* @param <V> The type of the value that will be retrieved
* @throws NullPointerException if {@code key} is null
*/
public static <K, V> VersionedKeyQuery<K, V> withKey(final K key) {
Objects.requireNonNull(key, "key cannot be null.");
return new VersionedKeyQuery<>(key, Optional.empty());
}
/**
* Specifies the timestamp for the key query. The key query returns the record's version for the specified timestamp.
* (To be more precise: The key query returns the record with the greatest timestamp <= asOfTimestamp)
*
* @param asOfTimestamp The timestamp of the query.
* @throws NullPointerException if {@code asOfTimestamp} is null
*/
public VersionedKeyQuery<K, V> asOf(final Instant asOfTimestamp) {
Objects.requireNonNull(asOfTimestamp, "asOf timestamp cannot be null.");
return new VersionedKeyQuery<>(key, Optional.of(asOfTimestamp));
}
/**
* The key that was specified for this query.
* @return The specified {@code key} of the query.
*/
public K key() {
return key;
}
/**
* The timestamp of the query, if specified.
* @return The specified {@code asOfTimestamp} of the query.
*/
public Optional<Instant> asOfTimestamp() {
return asOfTimestamp;
}
}
|
VersionedKeyQuery
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallerContext.java
|
{
"start": 1605,
"end": 1697
}
|
class ____ immutable.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public final
|
is
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/issues/AmahDeadLetterTest.java
|
{
"start": 1092,
"end": 1912
}
|
class ____ extends ContextTestSupport {
@Test
public void testException() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:err");
mock.expectedMessageCount(1);
template.sendBody("seda:a", "Test message");
mock.assertIsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
errorHandler(deadLetterChannel("mock:err").redeliveryDelay(0));
from("seda:a").process(new Processor() {
public void process(Exchange exchange) throws Exception {
throw new Exception("Test exception");
}
});
}
};
}
}
|
AmahDeadLetterTest
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/StringHelper.java
|
{
"start": 1331,
"end": 6439
}
|
class ____ {
// Common joiners to avoid per join creation of joiners
public static final Joiner SSV_JOINER = Joiner.on(' ');
public static final Joiner CSV_JOINER = Joiner.on(',');
public static final Joiner JOINER = Joiner.on("");
public static final Joiner _JOINER = Joiner.on('_');
public static final Joiner PATH_JOINER = Joiner.on('/');
public static final Joiner PATH_ARG_JOINER = Joiner.on("/:");
public static final Joiner DOT_JOINER = Joiner.on('.');
public static final Splitter SSV_SPLITTER =
Splitter.on(' ').omitEmptyStrings().trimResults();
public static final Splitter _SPLITTER = Splitter.on('_').trimResults();
private static final Pattern ABS_URL_RE =Pattern.compile("^(?:\\w+:)?//");
/**
* Join on space.
* @param args to join
* @return args joined by space
*/
public static String sjoin(Object... args) {
return SSV_JOINER.join(args);
}
/**
* Join on comma.
* @param args to join
* @return args joined by comma
*/
public static String cjoin(Object... args) {
return CSV_JOINER.join(args);
}
/**
* Join on dot.
* @param args to join
* @return args joined by dot
*/
public static String djoin(Object... args) {
return DOT_JOINER.join(args);
}
/**
* Join on underscore.
* @param args to join
* @return args joined underscore
*/
public static String _join(Object... args) {
return _JOINER.join(args);
}
/**
* Join on slash.
* @param args to join
* @return args joined with slash
*/
public static String pjoin(Object... args) {
return PATH_JOINER.join(args);
}
/**
* Join on slash and colon (e.g., path args in routing spec)
* @param args to join
* @return args joined with /:
*/
public static String pajoin(Object... args) {
return PATH_ARG_JOINER.join(args);
}
/**
* Join without separator.
* @param args to join.
* @return joined args with no separator
*/
public static String join(Object... args) {
return JOINER.join(args);
}
/**
* Join with a separator
* @param sep the separator
* @param args to join
* @return args joined with a separator
*/
public static String joins(String sep, Object...args) {
return Joiner.on(sep).join(args);
}
/**
* Split on space and trim results.
* @param s the string to split
* @return an iterable of strings
*/
public static Iterable<String> split(CharSequence s) {
return SSV_SPLITTER.split(s);
}
/**
* Split on _ and trim results.
* @param s the string to split
* @return an iterable of strings
*/
public static Iterable<String> _split(CharSequence s) {
return _SPLITTER.split(s);
}
/**
* Check whether a url is absolute or note.
* @param url to check
* @return true if url starts with scheme:// or //
*/
public static boolean isAbsUrl(CharSequence url) {
return ABS_URL_RE.matcher(url).find();
}
/**
* Join url components.
* @param pathPrefix for relative urls
* @param args url components to join
* @return an url string
*/
public static String ujoin(String pathPrefix, String... args) {
StringBuilder sb = new StringBuilder();
boolean first = true;
for (String part : args) {
if (first) {
first = false;
if (part.startsWith("#") || isAbsUrl(part)) {
sb.append(part);
} else {
uappend(sb, pathPrefix);
uappend(sb, part);
}
} else {
uappend(sb, part);
}
}
return sb.toString();
}
private static void uappend(StringBuilder sb, String part) {
if((sb.length() <= 0 || sb.charAt(sb.length() - 1) != '/')
&& !part.startsWith("/")) {
sb.append('/');
}
sb.append(part);
}
public static String getResourceSecondsString(Map<String, Long> targetMap) {
List<String> strings = new ArrayList<>(targetMap.size());
//completed app report in the timeline server doesn't have usage report
Long memorySeconds = 0L;
Long vcoreSeconds = 0L;
if (targetMap.containsKey(ResourceInformation.MEMORY_MB.getName())) {
memorySeconds = targetMap.get(ResourceInformation.MEMORY_MB.getName());
}
if (targetMap.containsKey(ResourceInformation.VCORES.getName())) {
vcoreSeconds = targetMap.get(ResourceInformation.VCORES.getName());
}
strings.add(memorySeconds + " MB-seconds");
strings.add(vcoreSeconds + " vcore-seconds");
Map<String, ResourceInformation> tmp = ResourceUtils.getResourceTypes();
if (targetMap.size() > 2) {
for (Map.Entry<String, Long> entry : targetMap.entrySet()) {
if (!entry.getKey().equals(ResourceInformation.MEMORY_MB.getName())
&& !entry.getKey().equals(ResourceInformation.VCORES.getName())) {
String units = "";
if (tmp.containsKey(entry.getKey())) {
units = tmp.get(entry.getKey()).getUnits();
}
strings.add(entry.getValue() + " " + entry.getKey() + "-" + units
+ "seconds");
}
}
}
return String.join(", ", strings);
}
}
|
StringHelper
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestProfileServlet.java
|
{
"start": 1194,
"end": 3221
}
|
class ____ extends HttpServerFunctionalTest {
private static HttpServer2 server;
private static URL baseUrl;
private static final Logger LOG = LoggerFactory.getLogger(TestProfileServlet.class);
@BeforeAll
public static void setup() throws Exception {
ProfileServlet.setIsTestRun(true);
System.setProperty("async.profiler.home", UUID.randomUUID().toString());
server = createTestServer();
server.start();
baseUrl = getServerURL(server);
}
@AfterAll
public static void cleanup() throws Exception {
ProfileServlet.setIsTestRun(false);
System.clearProperty("async.profiler.home");
server.stop();
}
@Test
public void testQuery() throws Exception {
String output = readOutput(new URL(baseUrl, "/prof"));
LOG.info("/prof output: {}", output);
assertTrue(output.startsWith(
"Started [cpu] profiling. This page will automatically redirect to /prof-output-hadoop/"));
assertTrue(output.contains(
"If empty diagram and Linux 4.6+, see 'Basic Usage' section on the Async Profiler Home"
+ " Page, https://github.com/jvm-profiling-tools/async-profiler."));
HttpURLConnection conn =
(HttpURLConnection) new URL(baseUrl, "/prof").openConnection();
assertEquals("GET", conn.getHeaderField(ProfileServlet.ACCESS_CONTROL_ALLOW_METHODS));
assertEquals(HttpURLConnection.HTTP_ACCEPTED, conn.getResponseCode());
assertNotNull(conn.getHeaderField(ProfileServlet.ACCESS_CONTROL_ALLOW_ORIGIN));
assertTrue(conn.getHeaderField("Refresh").startsWith("10;/prof-output-hadoop/async-prof-pid"));
String redirectOutput = readOutput(new URL(baseUrl, "/prof-output-hadoop"));
LOG.info("/prof-output-hadoop output: {}", redirectOutput);
HttpURLConnection redirectedConn =
(HttpURLConnection) new URL(baseUrl, "/prof-output-hadoop").openConnection();
assertEquals(HttpURLConnection.HTTP_OK, redirectedConn.getResponseCode());
redirectedConn.disconnect();
conn.disconnect();
}
}
|
TestProfileServlet
|
java
|
resilience4j__resilience4j
|
resilience4j-core/src/main/java/io/github/resilience4j/core/registry/EntryAddedEvent.java
|
{
"start": 676,
"end": 989
}
|
class ____<E> extends AbstractRegistryEvent {
private E addedEntry;
EntryAddedEvent(E addedEntry) {
this.addedEntry = addedEntry;
}
@Override
public Type getEventType() {
return Type.ADDED;
}
public E getAddedEntry() {
return addedEntry;
}
}
|
EntryAddedEvent
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/manytoone/referencedcolumnname/ManyToOneReferencedColumnNameTest.java
|
{
"start": 1120,
"end": 1813
}
|
class ____ {
@AfterEach
void tearDown(SessionFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
public void testRecoverableExceptionInFkOrdering(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (session) -> {
var v = new Vendor();
var i = new Item();
var ic = new ZItemCost();
ic.setCost( new BigDecimal( 2 ) );
ic.setItem( i );
ic.setVendor( v );
var wi = new WarehouseItem();
wi.setDefaultCost( ic );
wi.setItem( i );
wi.setVendor( v );
wi.setQtyInStock( new BigDecimal( 2 ) );
session.persist( i );
session.persist( v );
session.persist( ic );
session.persist( wi );
} );
}
}
|
ManyToOneReferencedColumnNameTest
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/event/RefreshRoutesEvent.java
|
{
"start": 841,
"end": 1729
}
|
class ____ extends ApplicationEvent {
private final Map<String, Object> metadata;
/**
* Create a new ApplicationEvent.
* @param source the object on which the event initially occurred (never {@code null})
*/
public RefreshRoutesEvent(Object source) {
super(source);
metadata = Map.of();
}
/**
* Create a new ApplicationEvent that should refresh filtering by {@link #metadata}.
* @param source the object on which the event initially occurred (never {@code null})
* @param metadata map of metadata the routes should match ({code null} is considered
* a global refresh)
*/
public RefreshRoutesEvent(Object source, Map<String, Object> metadata) {
super(source);
this.metadata = metadata;
}
public boolean isScoped() {
return !CollectionUtils.isEmpty(getMetadata());
}
public Map<String, Object> getMetadata() {
return metadata;
}
}
|
RefreshRoutesEvent
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/nfs3/TestFileHandle.java
|
{
"start": 983,
"end": 1430
}
|
class ____ {
@Test
public void testConstructor() {
FileHandle handle = new FileHandle(1024);
XDR xdr = new XDR();
handle.serialize(xdr);
assertThat(handle.getFileId()).isEqualTo(1024);
// Deserialize it back
FileHandle handle2 = new FileHandle();
handle2.deserialize(xdr.asReadOnlyWrap());
assertThat(handle.getFileId())
.withFailMessage("Failed: Assert 1024 is id ").isEqualTo(1024);
}
}
|
TestFileHandle
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/requests/DescribeTransactionsResponse.java
|
{
"start": 1209,
"end": 2488
}
|
class ____ extends AbstractResponse {
private final DescribeTransactionsResponseData data;
public DescribeTransactionsResponse(DescribeTransactionsResponseData data) {
super(ApiKeys.DESCRIBE_TRANSACTIONS);
this.data = data;
}
@Override
public DescribeTransactionsResponseData data() {
return data;
}
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
for (TransactionState transactionState : data.transactionStates()) {
Errors error = Errors.forCode(transactionState.errorCode());
updateErrorCounts(errorCounts, error);
}
return errorCounts;
}
public static DescribeTransactionsResponse parse(Readable readable, short version) {
return new DescribeTransactionsResponse(new DescribeTransactionsResponseData(
readable, version));
}
@Override
public String toString() {
return data.toString();
}
@Override
public int throttleTimeMs() {
return data.throttleTimeMs();
}
@Override
public void maybeSetThrottleTimeMs(int throttleTimeMs) {
data.setThrottleTimeMs(throttleTimeMs);
}
}
|
DescribeTransactionsResponse
|
java
|
apache__camel
|
components/camel-fhir/camel-fhir-component/src/test/java/org/apache/camel/component/fhir/Hl7v2PatientToFhirPatientIT.java
|
{
"start": 1420,
"end": 4788
}
|
class ____ extends AbstractFhirTestSupport {
/*
Segment Purpose FHIR Resource
MSH Message header MessageHeader
PID Patient Identification Patient
PV1 Patient Visit Not used in this example
PV2 Patient Visit – Additional data Not used in this example
ORC Common Order Not used in this example
OBR Observation Request Observation
OBX Observation ObservationProvider
See https://fhirblog.com/2014/10/05/mapping-hl7-version-2-to-fhir-messages for more information
*/
private static final String HL7_MESSAGE = "MSH|^~\\&|Amalga HIS|BUM|New Tester|MS|20111121103141||ORU^R01|2847970-2"
+ "01111211031|P|2.4|||AL|NE|764|ASCII|||\r"
+ "PID||100005056|100005056||Freeman^Vincent^\"\"^^\"\"|\"\"|19810813000000|F||CA|Street 1^\"\"^\"\"^\"\"^34000^SGP^^"
+ "\"\"~\"\"^\"\"^\"\"^\"\"^Danling Street 5th^THA^^\"\"||326-2275^PRN^PH^^66^675~476-5059^ORN^CP^^66^359~(123)"
+ "456-7890^ORN^FX^^66^222~^NET^X.400^a@a.a~^NET^X.400^dummy@hotmail.com|(123)456-7890^WPN^PH^^66|UNK|S|BUD||BP000111899|"
+ "D99999^\"\"||CA|Bangkok|||THA||THA|\"\"|N\r"
+ "PV1||OPD ||||\"\"^\"\"^\"\"||||CNSLT|||||C|VIP|||6262618|PB1||||||||||||||||||||||||20101208134638\r"
+ "PV2|||^Unknown|\"\"^\"\"||||\"\"|\"\"|0||\"\"|||||||||||||||||||||||||||||HP1\r"
+ "ORC|NW|\"\"|BMC1102771601|\"\"|CM||^^^^^\"\"|||||||||\"\"^\"\"^^^\"\"\r"
+ "OBR|1|\"\"|BMC1102771601|\"\"^Brain (CT)||20111028124215||||||||||||||||||CTSCAN|F||^^^^^ROUTINE|||\"\"||||||\"\"|||||||||||^\"\"\r"
+ "OBX|1|FT|\"\"^Brain (CT)||++++ text of report goes here +++|||REQAT|||FINAL|||20111121103040||75929^Gosselin^Angelina";
@Test
public void testUnmarshalWithExplicitUTF16Charset() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.setExpectedMessageCount(1);
// Message with explicit encoding in MSH
String charset = "ASCII";
byte[] body = HL7_MESSAGE.getBytes(Charset.forName(charset));
template.sendBodyAndHeader("direct:input", new ByteArrayInputStream(body), Exchange.CHARSET_NAME, charset);
mock.assertIsSatisfied();
MethodOutcome result = mock.getExchanges().get(0).getIn().getBody(MethodOutcome.class);
assertNotNull(result, "resource result");
assertTrue(result.getCreated());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
Processor patientProcessor = new PatientProcessor();
from("direct:input")
.unmarshal().hl7()
.process(patientProcessor)
.to("fhir://create/resource?inBody=resource")
.to("mock:result");
}
};
}
}
|
Hl7v2PatientToFhirPatientIT
|
java
|
grpc__grpc-java
|
benchmarks/src/generated/main/grpc/io/grpc/benchmarks/proto/BenchmarkServiceGrpc.java
|
{
"start": 15809,
"end": 15909
}
|
class ____ the server implementation of the service BenchmarkService.
*/
public static abstract
|
for
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestUTF8ByteArrayUtils.java
|
{
"start": 931,
"end": 2283
}
|
class ____ extends HadoopTestBase {
@Test
public void testFindByte() {
byte[] data = "Hello, world!".getBytes();
assertEquals(-1, UTF8ByteArrayUtils.findByte(data, 0, data.length, (byte) 'a'),
"Character 'a' does not exist in string");
assertEquals(4, UTF8ByteArrayUtils.findByte(data, 0, data.length, (byte) 'o'),
"Did not find first occurrence of character 'o'");
}
@Test
public void testFindBytes() {
byte[] data = "Hello, world!".getBytes();
assertEquals(1, UTF8ByteArrayUtils.findBytes(data, 0, data.length, "ello".getBytes()),
"Did not find first occurrence of pattern 'ello'");
assertEquals(-1, UTF8ByteArrayUtils.findBytes(data, 2, data.length, "ello".getBytes()),
"Substring starting at position 2 does not contain pattern 'ello'");
}
@Test
public void testFindNthByte() {
byte[] data = "Hello, world!".getBytes();
assertEquals(3, UTF8ByteArrayUtils.findNthByte(data, 0, data.length, (byte) 'l', 2),
"Did not find 2nd occurrence of character 'l'");
assertEquals(-1, UTF8ByteArrayUtils.findNthByte(data, 0, data.length, (byte) 'l', 4),
"4th occurrence of character 'l' does not exist");
assertEquals(10, UTF8ByteArrayUtils.findNthByte(data, (byte) 'l', 3),
"Did not find 3rd occurrence of character 'l'");
}
}
|
TestUTF8ByteArrayUtils
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/client/AmazonBedrockMockRequestSender.java
|
{
"start": 1483,
"end": 4474
}
|
class ____ extends AmazonBedrockRequestSender.Factory {
private final Sender sender;
public Factory(ServiceComponents serviceComponents, ClusterService clusterService) {
super(serviceComponents, clusterService);
this.sender = new AmazonBedrockMockRequestSender();
}
public Sender createSender() {
return sender;
}
}
private Queue<Object> results = new ConcurrentLinkedQueue<>();
private Queue<List<String>> inputs = new ConcurrentLinkedQueue<>();
private Queue<InputType> inputTypes = new ConcurrentLinkedQueue<>();
private int sendCounter = 0;
public void enqueue(Object result) {
results.add(result);
}
public int sendCount() {
return sendCounter;
}
public List<String> getInputs() {
return inputs.remove();
}
public InputType getInputType() {
return inputTypes.remove();
}
@Override
public void startSynchronously() {
// do nothing
}
@Override
public void startAsynchronously(ActionListener<Void> listener) {
throw new UnsupportedOperationException("not supported");
}
@Override
public void send(
RequestManager requestCreator,
InferenceInputs inferenceInputs,
TimeValue timeout,
ActionListener<InferenceServiceResults> listener
) {
sendCounter++;
if (inferenceInputs instanceof EmbeddingsInput embeddingsInput) {
inputs.add(embeddingsInput.getTextInputs());
if (embeddingsInput.getInputType() != null) {
inputTypes.add(embeddingsInput.getInputType());
}
} else if (inferenceInputs instanceof ChatCompletionInput chatCompletionInput) {
inputs.add(chatCompletionInput.getInputs());
} else {
throw new IllegalArgumentException(
"Invalid inference inputs received in mock sender: " + inferenceInputs.getClass().getSimpleName()
);
}
if (results.isEmpty()) {
listener.onFailure(new ElasticsearchException("No results found"));
} else {
var resultObject = results.remove();
if (resultObject instanceof InferenceServiceResults inferenceResult) {
listener.onResponse(inferenceResult);
} else if (resultObject instanceof Exception e) {
listener.onFailure(e);
} else {
throw new RuntimeException("Unknown result type: " + resultObject.getClass());
}
}
}
@Override
public void sendWithoutQueuing(
Logger logger,
Request request,
ResponseHandler responseHandler,
TimeValue timeout,
ActionListener<InferenceServiceResults> listener
) {
throw new UnsupportedOperationException("not supported");
}
@Override
public void close() throws IOException {
// do nothing
}
}
|
Factory
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/jdk/UntypedDeserializationTest.java
|
{
"start": 1805,
"end": 2685
}
|
class ____ extends StdDeserializer<List<Object>>
{
public ListDeserializer() { super(List.class); }
@Override
public List<Object> deserialize(JsonParser p, DeserializationContext ctxt)
{
ArrayList<Object> list = new ArrayList<Object>();
while (p.nextValue() != JsonToken.END_ARRAY) {
list.add("X"+p.getString());
}
return list;
}
@Override
public ValueDeserializer<?> createContextual(DeserializationContext ctxt,
BeanProperty property)
{
// For now, we just need to access "untyped" deserializer; not use it.
/*ValueDeserializer<Object> ob = */
ctxt.findContextualValueDeserializer(ctxt.constructType(Object.class), property);
return this;
}
}
static
|
ListDeserializer
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/request/AmazonBedrockTitanEmbeddingsRequestEntityTests.java
|
{
"start": 575,
"end": 997
}
|
class ____ extends ESTestCase {
public void testRequestEntity_GeneratesExpectedJsonBody() throws IOException {
var entity = new AmazonBedrockTitanEmbeddingsRequestEntity("test input");
var builder = new AmazonBedrockJsonBuilder(entity);
var result = builder.getStringContent();
assertThat(result, is("{\"inputText\":\"test input\"}"));
}
}
|
AmazonBedrockTitanEmbeddingsRequestEntityTests
|
java
|
quarkusio__quarkus
|
extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcServerProcessor.java
|
{
"start": 8590,
"end": 12908
}
|
class ____ the impl base
continue;
}
// Finally, exclude some packages
boolean excluded = false;
for (String excludedPackage : excludedPackages) {
if (mutinyImplBaseName.startsWith(excludedPackage)) {
excluded = true;
break;
}
}
if (!excluded) {
log.debugf("Registering generated gRPC bean %s that will delegate to %s", generatedBean, userDefinedBean);
delegatingBeans.produce(new DelegatingGrpcBeanBuildItem(generatedBean, userDefinedBean));
Set<String> blockingMethods = gatherBlockingOrVirtualMethodNames(userDefinedBean, index.getIndex(), false);
Set<String> virtualMethods = gatherBlockingOrVirtualMethodNames(userDefinedBean, index.getIndex(), true);
generatedBeans.put(generatedBean.name(), blockingMethods);
if (!virtualMethods.isEmpty()) {
virtuals.put(generatedBean.name(), virtualMethods);
}
}
}
if (!generatedBeans.isEmpty() || !virtuals.isEmpty()) {
// For every suitable bean we must:
// (a) add @Singleton and @GrpcService
// (b) register a BindableServiceBuildItem, incl. all blocking methods (derived from the user-defined impl)
Set<DotName> names = new HashSet<>(generatedBeans.keySet());
names.addAll(virtuals.keySet());
for (DotName name : names) {
BindableServiceBuildItem bindableService = new BindableServiceBuildItem(name);
var blocking = generatedBeans.get(name);
var rovt = virtuals.get(name);
if (blocking != null) {
for (String blockingMethod : blocking) {
bindableService.registerBlockingMethod(blockingMethod);
}
}
if (rovt != null) {
for (String virtualMethod : rovt) {
bindableService.registerVirtualMethod(virtualMethod);
}
}
bindables.produce(bindableService);
}
transformers.produce(new AnnotationsTransformerBuildItem(new AnnotationsTransformer() {
@Override
public boolean appliesTo(Kind kind) {
return kind == Kind.CLASS;
}
@Override
public void transform(TransformationContext context) {
if (generatedBeans.containsKey(context.getTarget().asClass().name())) {
context.transform()
.add(BuiltinScope.SINGLETON.getName())
.add(GrpcDotNames.GRPC_SERVICE)
.done();
}
}
}));
}
}
@BuildStep
void discoverBindableServices(BuildProducer<BindableServiceBuildItem> bindables,
CombinedIndexBuildItem combinedIndexBuildItem) {
IndexView index = combinedIndexBuildItem.getIndex();
Collection<ClassInfo> bindableServices = index.getAllKnownImplementors(GrpcDotNames.BINDABLE_SERVICE);
for (ClassInfo service : bindableServices) {
if (service.interfaceNames().contains(GrpcDotNames.MUTINY_BEAN)) {
// Ignore the generated beans
continue;
}
if (Modifier.isAbstract(service.flags())) {
continue;
}
BindableServiceBuildItem item = new BindableServiceBuildItem(service.name());
Set<String> blockingMethods = gatherBlockingOrVirtualMethodNames(service, index, false);
Set<String> virtualMethods = gatherBlockingOrVirtualMethodNames(service, index, true);
for (String method : blockingMethods) {
item.registerBlockingMethod(method);
}
for (String method : virtualMethods) {
item.registerVirtualMethod(method);
}
bindables.produce(item);
}
}
/**
* Generate list of {@link ClassInfo} with {@code service} as the first element and the
|
extends
|
java
|
apache__camel
|
components/camel-quartz/src/test/java/org/apache/camel/component/quartz/QuartzCronRouteTest.java
|
{
"start": 1282,
"end": 2443
}
|
class ____ extends BaseQuartzTest {
@Test
public void testQuartzCronRoute() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(3);
MockEndpoint.assertIsSatisfied(context);
Trigger trigger = mock.getReceivedExchanges().get(0).getIn().getHeader("trigger", Trigger.class);
assertIsInstanceOf(CronTrigger.class, trigger);
JobDetail detail = mock.getReceivedExchanges().get(0).getIn().getHeader("jobDetail", JobDetail.class);
assertEquals(CamelJob.class, detail.getJobClass());
assertEquals("cron", detail.getJobDataMap().get(QuartzConstants.QUARTZ_TRIGGER_TYPE));
assertEquals("0/1 * * * * ?", detail.getJobDataMap().get(QuartzConstants.QUARTZ_TRIGGER_CRON_EXPRESSION));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// triggers every 1th second at precise 00,01,02,03..59
from("quartz://myGroup/myTimerName?cron=0/1 * * * * ?").to("mock:result");
}
};
}
}
|
QuartzCronRouteTest
|
java
|
apache__dubbo
|
dubbo-remoting/dubbo-remoting-http3/src/main/java/org/apache/dubbo/remoting/http3/netty4/NettyHttp3FrameCodec.java
|
{
"start": 2369,
"end": 7090
}
|
class ____ extends Http3RequestStreamInboundHandler implements ChannelOutboundHandler {
public static final NettyHttp3FrameCodec INSTANCE = new NettyHttp3FrameCodec();
@Override
protected void channelRead(ChannelHandlerContext ctx, Http3HeadersFrame frame) {
Http3Headers headers = frame.headers();
if (headers.contains(TRI_PING)) {
pingReceived(ctx);
return;
}
ctx.fireChannelRead(new Http2MetadataFrame(getStreamId(ctx), new DefaultHttpHeaders(headers), false));
}
private void pingReceived(ChannelHandlerContext ctx) {
Http3Headers pongHeader = new DefaultHttp3Headers(false);
pongHeader.set(TRI_PING, "0");
pongHeader.set(PseudoHeaderName.STATUS.value(), HttpStatus.OK.getStatusString());
ChannelFuture future = ctx.write(new DefaultHttp3HeadersFrame(pongHeader), ctx.newPromise());
if (future.isDone()) {
ctx.close();
} else {
future.addListener((ChannelFutureListener) f -> ctx.close());
}
}
@Override
protected void channelRead(ChannelHandlerContext ctx, Http3DataFrame frame) {
ctx.fireChannelRead(
new Http2InputMessageFrame(getStreamId(ctx), new ByteBufInputStream(frame.content(), true), false));
}
private static long getStreamId(ChannelHandlerContext ctx) {
return ((QuicStreamChannel) ctx.channel()).streamId();
}
@Override
protected void channelInputClosed(ChannelHandlerContext ctx) {
ctx.fireChannelRead(new Http2InputMessageFrame(getStreamId(ctx), StreamUtils.EMPTY, true));
}
@Override
@SuppressWarnings("unchecked")
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception {
if (msg instanceof Http2Header) {
Http2Header headers = (Http2Header) msg;
if (headers.isEndStream()) {
ChannelFuture future = ctx.write(
new DefaultHttp3HeadersFrame(((NettyHttpHeaders<Http3Headers>) headers.headers()).getHeaders()),
ctx.newPromise());
if (future.isDone()) {
ctx.close(promise);
} else {
future.addListener((ChannelFutureListener) f -> ctx.close(promise));
}
return;
}
ctx.write(
new DefaultHttp3HeadersFrame(((NettyHttpHeaders<Http3Headers>) headers.headers()).getHeaders()),
promise);
} else if (msg instanceof Http2OutputMessage) {
Http2OutputMessage message = (Http2OutputMessage) msg;
OutputStream body = message.getBody();
assert body instanceof ByteBufOutputStream || body == null;
if (message.isEndStream()) {
if (body == null) {
ctx.close(promise);
return;
}
ChannelFuture future =
ctx.write(new DefaultHttp3DataFrame(((ByteBufOutputStream) body).buffer()), ctx.newPromise());
if (future.isDone()) {
ctx.close(promise);
} else {
future.addListener((ChannelFutureListener) f -> ctx.close(promise));
}
return;
}
if (body == null) {
promise.trySuccess();
return;
}
ctx.write(new DefaultHttp3DataFrame(((ByteBufOutputStream) body).buffer()), promise);
} else {
ctx.write(msg, promise);
}
}
@Override
public void bind(ChannelHandlerContext ctx, SocketAddress localAddress, ChannelPromise promise) throws Exception {
ctx.bind(localAddress, promise);
}
@Override
public void connect(
ChannelHandlerContext ctx,
SocketAddress remoteAddress,
SocketAddress localAddress,
ChannelPromise promise) {
ctx.connect(remoteAddress, localAddress, promise);
}
@Override
public void disconnect(ChannelHandlerContext ctx, ChannelPromise promise) {
ctx.disconnect(promise);
}
@Override
public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
ctx.close(promise);
}
@Override
public void deregister(ChannelHandlerContext ctx, ChannelPromise promise) {
ctx.deregister(promise);
}
@Override
public void read(ChannelHandlerContext ctx) throws Exception {
ctx.read();
}
@Override
public void flush(ChannelHandlerContext ctx) {
ctx.flush();
}
}
|
NettyHttp3FrameCodec
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/boot/conditional1/XmlReferenceBeanConditionalTest.java
|
{
"start": 1883,
"end": 2760
}
|
class ____ {
@BeforeAll
public static void beforeAll() {
DubboBootstrap.reset();
}
@AfterAll
public static void afterAll() {
DubboBootstrap.reset();
}
@Autowired
private HelloService helloService;
@Autowired
private ApplicationContext applicationContext;
@Test
void testConsumer() {
Map<String, HelloService> helloServiceMap = applicationContext.getBeansOfType(HelloService.class);
Assertions.assertEquals(1, helloServiceMap.size());
Assertions.assertNotNull(helloServiceMap.get("helloService"));
Assertions.assertNull(helloServiceMap.get("myHelloService"));
}
@Order(Integer.MAX_VALUE - 2)
@Configuration
@ImportResource("classpath:/org/apache/dubbo/config/spring/boot/conditional1/consumer/dubbo-consumer.xml")
public static
|
XmlReferenceBeanConditionalTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java
|
{
"start": 3167,
"end": 4123
}
|
class ____ extends FsVolumeImpl {
/**
* Get a suffix of the full path, excluding the given prefix.
*
* @param prefix a prefix of the path.
* @param fullPath the full path whose suffix is needed.
* @return the suffix of the path, which when resolved against {@code prefix}
* gets back the {@code fullPath}.
*/
@VisibleForTesting
protected static String getSuffix(final Path prefix, final Path fullPath) {
String prefixStr = prefix.toString();
String pathStr = fullPath.toString();
if (!pathStr.startsWith(prefixStr)) {
LOG.debug("Path {} is not a prefix of the path {}", prefix, fullPath);
return pathStr;
}
String suffix = pathStr.replaceFirst("^" + prefixStr, "");
if (suffix.startsWith("/")) {
suffix = suffix.substring(1);
}
return suffix;
}
/**
* Class to keep track of the capacity usage statistics for provided volumes.
*/
public static
|
ProvidedVolumeImpl
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/SamlPrepareAuthenticationRequestTests.java
|
{
"start": 592,
"end": 2022
}
|
class ____ extends SamlTestCase {
public void testSerialiseNonNullCriteria() throws IOException {
final SamlPrepareAuthenticationRequest req = new SamlPrepareAuthenticationRequest();
req.setRealmName("saml1");
req.setAssertionConsumerServiceURL("https://sp.example.com/sso/saml2/post");
req.setRelayState("the_relay_state");
serialiseAndValidate(req);
}
public void testSerialiseNullCriteria() throws IOException {
final SamlPrepareAuthenticationRequest req = new SamlPrepareAuthenticationRequest();
req.setRealmName(null);
req.setAssertionConsumerServiceURL(null);
req.setRelayState(null);
serialiseAndValidate(req);
}
private void serialiseAndValidate(SamlPrepareAuthenticationRequest req1) throws IOException {
final BytesStreamOutput out = new BytesStreamOutput();
req1.writeTo(out);
final SamlPrepareAuthenticationRequest req2 = new SamlPrepareAuthenticationRequest(out.bytes().streamInput());
assertThat(req2.getRealmName(), Matchers.equalTo(req1.getRealmName()));
assertThat(req2.getAssertionConsumerServiceURL(), Matchers.equalTo(req1.getAssertionConsumerServiceURL()));
assertThat(req2.getRelayState(), Matchers.equalTo(req1.getRelayState()));
assertThat(req2.getParentTask(), Matchers.equalTo(req1.getParentTask()));
}
}
|
SamlPrepareAuthenticationRequestTests
|
java
|
spring-projects__spring-framework
|
spring-beans/src/testFixtures/java/org/springframework/beans/testfixture/beans/TestBeanWithPrivateConstructor.java
|
{
"start": 687,
"end": 777
}
|
class ____ {
private TestBeanWithPrivateConstructor() {
}
}
|
TestBeanWithPrivateConstructor
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/file/SyncableFileOutputStream.java
|
{
"start": 1345,
"end": 3484
}
|
class ____ extends FileOutputStream implements Syncable {
/**
* Creates an instance of {@linkplain SyncableFileOutputStream} with the given
* name.
*
* @param name - the full file name.
* @throws FileNotFoundException - if the file cannot be created or opened.
*/
public SyncableFileOutputStream(String name) throws FileNotFoundException {
super(name);
}
/**
* Creates an instance of {@linkplain SyncableFileOutputStream} using the given
* {@linkplain File} instance.
*
* @param file - The file to use to create the output stream.
*
* @throws FileNotFoundException - if the file cannot be created or opened.
*/
public SyncableFileOutputStream(File file) throws FileNotFoundException {
super(file);
}
/**
* Creates an instance of {@linkplain SyncableFileOutputStream} with the given
* name and optionally append to the file if it already exists.
*
* @param name - the full file name.
* @param append - true if the file is to be appended to
*
* @throws FileNotFoundException - if the file cannot be created or opened.
*/
public SyncableFileOutputStream(String name, boolean append) throws FileNotFoundException {
super(name, append);
}
/**
* Creates an instance of {@linkplain SyncableFileOutputStream} that writes to
* the file represented by the given {@linkplain File} instance and optionally
* append to the file if it already exists.
*
* @param file - the file instance to use to create the stream.
* @param append - true if the file is to be appended to
*
* @throws FileNotFoundException - if the file cannot be created or opened.
*/
public SyncableFileOutputStream(File file, boolean append) throws FileNotFoundException {
super(file, append);
}
/**
* Creates an instance of {@linkplain SyncableFileOutputStream} using the given
* {@linkplain FileDescriptor} instance.
*/
public SyncableFileOutputStream(FileDescriptor fdObj) {
super(fdObj);
}
/**
* {@inheritDoc}
*/
@Override
public void sync() throws IOException {
getFD().sync();
}
}
|
SyncableFileOutputStream
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/sql/exec/internal/JdbcOperationQueryMutationNative.java
|
{
"start": 643,
"end": 1700
}
|
class ____ implements JdbcOperationQueryMutation {
private final String sql;
private final List<JdbcParameterBinder> parameterBinders;
private final Set<String> affectedTableNames;
public JdbcOperationQueryMutationNative(
String sql,
List<JdbcParameterBinder> parameterBinders,
Set<String> affectedTableNames) {
this.sql = sql;
this.parameterBinders = parameterBinders;
this.affectedTableNames = affectedTableNames;
}
@Override
public String getSqlString() {
return sql;
}
@Override
public List<JdbcParameterBinder> getParameterBinders() {
return parameterBinders;
}
@Override
public Set<String> getAffectedTableNames() {
return affectedTableNames;
}
@Override
public boolean dependsOnParameterBindings() {
return false;
}
@Override
public Map<JdbcParameter, JdbcParameterBinding> getAppliedParameters() {
return Collections.emptyMap();
}
@Override
public boolean isCompatibleWith(JdbcParameterBindings jdbcParameterBindings, QueryOptions queryOptions) {
return true;
}
}
|
JdbcOperationQueryMutationNative
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/properties/OptionalPropertyPlaceholderEipTest.java
|
{
"start": 1052,
"end": 2906
}
|
class ____ extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testQueryOptionalNotPresent() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.split(body()).delimiter("{{?myDelim}}")
.to("mock:line");
}
});
context.start();
getMockEndpoint("mock:line").expectedMessageCount(2);
template.sendBody("direct:start", "A,B");
assertMockEndpointsSatisfied();
}
@Test
public void testQueryOptionalPresent() throws Exception {
Properties prop = new Properties();
prop.put("myDelim", ";");
context.getPropertiesComponent().setInitialProperties(prop);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.split(body()).delimiter("{{?myDelim}}")
.to("mock:line");
}
});
context.start();
getMockEndpoint("mock:line").expectedMessageCount(1);
template.sendBody("direct:start", "A,B");
assertMockEndpointsSatisfied();
resetMocks();
getMockEndpoint("mock:line").expectedMessageCount(3);
template.sendBody("direct:start", "A;B;C");
assertMockEndpointsSatisfied();
}
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
context.getPropertiesComponent().setLocation("classpath:org/apache/camel/component/properties/myproperties.properties");
return context;
}
}
|
OptionalPropertyPlaceholderEipTest
|
java
|
apache__camel
|
test-infra/camel-test-infra-chatscript/src/test/java/org/apache/camel/test/infra/chatscript/services/ChatScriptServiceFactory.java
|
{
"start": 1605,
"end": 1715
}
|
class ____ extends ChatScriptRemoteInfraService implements ChatScriptService {
}
}
|
ChatScriptRemoteTestService
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Configuration.java
|
{
"start": 1188,
"end": 12739
}
|
class ____ implements Writeable {
public static final int QUERY_COMPRESS_THRESHOLD_CHARS = KB.toIntBytes(5);
private static final TransportVersion TIMESERIES_DEFAULT_LIMIT = TransportVersion.fromName("timeseries_default_limit");
private static final TransportVersion ESQL_SUPPORT_PARTIAL_RESULTS = TransportVersion.fromName("esql_support_partial_results");
private final String clusterName;
private final String username;
private final ZonedDateTime now;
private final ZoneId zoneId;
private final QueryPragmas pragmas;
private final int resultTruncationMaxSizeRegular;
private final int resultTruncationDefaultSizeRegular;
private final int resultTruncationMaxSizeTimeseries;
private final int resultTruncationDefaultSizeTimeseries;
private final Locale locale;
private final String query;
private final boolean profile;
private final boolean allowPartialResults;
private final Map<String, Map<String, Column>> tables;
private final long queryStartTimeNanos;
private final String projectRouting;
public Configuration(
ZoneId zi,
Locale locale,
String username,
String clusterName,
QueryPragmas pragmas,
int resultTruncationMaxSizeRegular,
int resultTruncationDefaultSizeRegular,
String query,
boolean profile,
Map<String, Map<String, Column>> tables,
long queryStartTimeNanos,
boolean allowPartialResults,
int resultTruncationMaxSizeTimeseries,
int resultTruncationDefaultSizeTimeseries,
String projectRouting
) {
this.zoneId = zi.normalized();
this.now = ZonedDateTime.now(Clock.tick(Clock.system(zoneId), Duration.ofNanos(1)));
this.username = username;
this.clusterName = clusterName;
this.locale = locale;
this.pragmas = pragmas;
this.resultTruncationMaxSizeRegular = resultTruncationMaxSizeRegular;
this.resultTruncationDefaultSizeRegular = resultTruncationDefaultSizeRegular;
this.resultTruncationMaxSizeTimeseries = resultTruncationMaxSizeTimeseries;
this.resultTruncationDefaultSizeTimeseries = resultTruncationDefaultSizeTimeseries;
this.query = query;
this.profile = profile;
this.tables = tables;
assert tables != null;
this.queryStartTimeNanos = queryStartTimeNanos;
this.allowPartialResults = allowPartialResults;
this.projectRouting = projectRouting;
}
public Configuration(BlockStreamInput in) throws IOException {
this.zoneId = in.readZoneId();
this.now = Instant.ofEpochSecond(in.readVLong(), in.readVInt()).atZone(zoneId);
this.username = in.readOptionalString();
this.clusterName = in.readOptionalString();
locale = Locale.forLanguageTag(in.readString());
this.pragmas = new QueryPragmas(in);
this.resultTruncationMaxSizeRegular = in.readVInt();
this.resultTruncationDefaultSizeRegular = in.readVInt();
this.query = readQuery(in);
this.profile = in.readBoolean();
this.tables = in.readImmutableMap(i1 -> i1.readImmutableMap(i2 -> new Column((BlockStreamInput) i2)));
this.queryStartTimeNanos = in.readLong();
if (in.getTransportVersion().supports(ESQL_SUPPORT_PARTIAL_RESULTS)) {
this.allowPartialResults = in.readBoolean();
} else {
this.allowPartialResults = false;
}
if (in.getTransportVersion().supports(TIMESERIES_DEFAULT_LIMIT)) {
this.resultTruncationMaxSizeTimeseries = in.readVInt();
this.resultTruncationDefaultSizeTimeseries = in.readVInt();
} else {
this.resultTruncationMaxSizeTimeseries = this.resultTruncationMaxSizeRegular;
this.resultTruncationDefaultSizeTimeseries = this.resultTruncationDefaultSizeRegular;
}
// not needed on the data nodes for now
this.projectRouting = null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeZoneId(zoneId);
var instant = now.toInstant();
out.writeVLong(instant.getEpochSecond());
out.writeVInt(instant.getNano());
out.writeOptionalString(username); // TODO this one is always null
out.writeOptionalString(clusterName); // TODO this one is never null so maybe not optional
out.writeString(locale.toLanguageTag());
pragmas.writeTo(out);
out.writeVInt(resultTruncationMaxSizeRegular);
out.writeVInt(resultTruncationDefaultSizeRegular);
writeQuery(out, query);
out.writeBoolean(profile);
out.writeMap(tables, (o1, columns) -> o1.writeMap(columns, StreamOutput::writeWriteable));
out.writeLong(queryStartTimeNanos);
if (out.getTransportVersion().supports(ESQL_SUPPORT_PARTIAL_RESULTS)) {
out.writeBoolean(allowPartialResults);
}
if (out.getTransportVersion().supports(TIMESERIES_DEFAULT_LIMIT)) {
out.writeVInt(resultTruncationMaxSizeTimeseries);
out.writeVInt(resultTruncationDefaultSizeTimeseries);
}
}
public ZoneId zoneId() {
return zoneId;
}
public ZonedDateTime now() {
return now;
}
public String clusterName() {
return clusterName;
}
public String username() {
return username;
}
public QueryPragmas pragmas() {
return pragmas;
}
public int resultTruncationMaxSize(boolean isTimeseries) {
if (isTimeseries) {
return resultTruncationMaxSizeTimeseries;
}
return resultTruncationMaxSizeRegular;
}
public int resultTruncationDefaultSize(boolean isTimeseries) {
if (isTimeseries) {
return resultTruncationDefaultSizeTimeseries;
}
return resultTruncationDefaultSizeRegular;
}
public Locale locale() {
return locale;
}
public String query() {
return query;
}
/**
* Returns the current time in milliseconds from the time epoch for the execution of this request.
* It ensures consistency by using the same value on all nodes involved in the search request.
*/
public long absoluteStartedTimeInMillis() {
return now.toInstant().toEpochMilli();
}
/**
* @return Start time of the ESQL query in nanos
*/
public long queryStartTimeNanos() {
return queryStartTimeNanos;
}
/**
* Create a new {@link FoldContext} with the limit configured in the {@link QueryPragmas}.
*/
public FoldContext newFoldContext() {
return new FoldContext(pragmas.foldLimit().getBytes());
}
/**
* Tables specified in the request.
*/
public Map<String, Map<String, Column>> tables() {
return tables;
}
public Configuration withoutTables() {
return new Configuration(
zoneId,
locale,
username,
clusterName,
pragmas,
resultTruncationMaxSizeRegular,
resultTruncationDefaultSizeRegular,
query,
profile,
Map.of(),
queryStartTimeNanos,
allowPartialResults,
resultTruncationMaxSizeTimeseries,
resultTruncationDefaultSizeTimeseries,
projectRouting
);
}
/**
* Enable profiling, sacrificing performance to return information about
* what operations are taking the most time.
*/
public boolean profile() {
return profile;
}
/**
* Whether this request can return partial results instead of failing fast on failures
*/
public boolean allowPartialResults() {
return allowPartialResults;
}
public String projectRouting() {
return projectRouting;
}
private static void writeQuery(StreamOutput out, String query) throws IOException {
if (query.length() > QUERY_COMPRESS_THRESHOLD_CHARS) { // compare on chars to avoid UTF-8 encoding unless actually required
out.writeBoolean(true);
var bytesArray = new BytesArray(query.getBytes(StandardCharsets.UTF_8));
var bytesRef = CompressorFactory.COMPRESSOR.compress(bytesArray);
out.writeByteArray(bytesRef.array());
} else {
out.writeBoolean(false);
out.writeString(query);
}
}
private static String readQuery(StreamInput in) throws IOException {
boolean compressed = in.readBoolean();
if (compressed) {
byte[] bytes = in.readByteArray();
var bytesRef = CompressorFactory.uncompress(new BytesArray(bytes));
return new String(bytesRef.array(), StandardCharsets.UTF_8);
} else {
return in.readString();
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Configuration that = (Configuration) o;
return Objects.equals(zoneId, that.zoneId)
&& Objects.equals(now, that.now)
&& Objects.equals(username, that.username)
&& Objects.equals(clusterName, that.clusterName)
&& resultTruncationMaxSizeRegular == that.resultTruncationMaxSizeRegular
&& resultTruncationDefaultSizeRegular == that.resultTruncationDefaultSizeRegular
&& Objects.equals(pragmas, that.pragmas)
&& Objects.equals(locale, that.locale)
&& Objects.equals(that.query, query)
&& profile == that.profile
&& tables.equals(that.tables)
&& allowPartialResults == that.allowPartialResults;
}
@Override
public int hashCode() {
return Objects.hash(
zoneId,
now,
username,
clusterName,
pragmas,
resultTruncationMaxSizeRegular,
resultTruncationDefaultSizeRegular,
locale,
query,
profile,
tables,
allowPartialResults,
resultTruncationMaxSizeTimeseries,
resultTruncationDefaultSizeTimeseries
);
}
@Override
public String toString() {
return "EsqlConfiguration{"
+ "pragmas="
+ pragmas
+ ", resultTruncationMaxSize="
+ "[regular="
+ resultTruncationMaxSize(false)
+ ",timeseries="
+ resultTruncationMaxSize(true)
+ "]"
+ ", resultTruncationDefaultSize="
+ "[regular="
+ resultTruncationDefaultSize(false)
+ ",timeseries="
+ resultTruncationDefaultSize(true)
+ "]"
+ ", zoneId="
+ zoneId
+ ", locale="
+ locale
+ ", query='"
+ query
+ '\''
+ ", profile="
+ profile
+ ", tables="
+ tables
+ "allow_partial_result="
+ allowPartialResults
+ '}';
}
/**
* Reads a {@link Configuration} that doesn't contain any {@link Configuration#tables()}.
*/
public static Configuration readWithoutTables(StreamInput in) throws IOException {
BlockStreamInput blockStreamInput = new BlockStreamInput(in, null);
return new Configuration(blockStreamInput);
}
}
|
Configuration
|
java
|
apache__camel
|
components/camel-netty/src/test/java/org/apache/camel/component/netty/NettySSLConsumerClientModeTest.java
|
{
"start": 6059,
"end": 8452
}
|
class ____ extends ChannelInitializer<SocketChannel> {
private static final StringDecoder DECODER = new StringDecoder();
private static final StringEncoder ENCODER = new StringEncoder();
private static final ServerHandler SERVERHANDLER = new ServerHandler();
private SSLContext sslContext;
ServerInitializer() {
try {
// create the SSLContext that will be used to create SSLEngine
// instances
char[] pass = "changeit".toCharArray();
KeyManagerFactory kmf;
kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
KeyStore ks = KeyStore.getInstance("JKS");
try (InputStream ksStream = new FileInputStream(new File("src/test/resources/keystore.jks"))) {
ks.load(ksStream, pass);
}
kmf.init(ks, pass);
tmf.init(ks);
sslContext = SSLContext.getInstance("TLS");
sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
} catch (NoSuchAlgorithmException | KeyStoreException | CertificateException | IOException
| UnrecoverableKeyException | KeyManagementException e) {
LOG.warn("Failed to initialize server: {}", e.getMessage(), e);
}
}
@Override
public void initChannel(SocketChannel ch) {
ChannelPipeline pipeline = ch.pipeline();
// create a new SslHandler to add at the start of the pipeline
SSLEngine engine = sslContext.createSSLEngine();
engine.setUseClientMode(false);
engine.setNeedClientAuth(true);
pipeline.addLast("ssl", new SslHandler(engine));
// Add the text line codec combination,
pipeline.addLast("framer", new DelimiterBasedFrameDecoder(8192, Delimiters.lineDelimiter()));
// the encoder and decoder are static as these are sharable
pipeline.addLast("decoder", DECODER);
pipeline.addLast("encoder", ENCODER);
// and then business logic.
pipeline.addLast("handler", SERVERHANDLER);
}
}
}
|
ServerInitializer
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/nullness/UnnecessaryCheckNotNullTest.java
|
{
"start": 1590,
"end": 2094
}
|
class ____ extends CompilerBasedAbstractTest {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(UnnecessaryCheckNotNull.class, getClass());
@Test
public void positive_newClass() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.common.base.Preconditions;
import com.google.common.base.Verify;
import java.util.Objects;
|
UnnecessaryCheckNotNullTest
|
java
|
apache__camel
|
components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/integration/FtpConsumerAutoCreateIT.java
|
{
"start": 1449,
"end": 2834
}
|
class ____ extends FtpServerTestSupport {
protected String getFtpUrl() {
return "ftp://admin@localhost:{{ftp.server.port}}///foo/bar/baz/xxx?password=admin";
}
@BeforeEach
void forceRemove() {
FileUtils.deleteQuietly(service.getFtpRootDir().toFile());
}
@Test
public void testAutoCreate() {
FtpEndpoint<?> endpoint = (FtpEndpoint<?>) this.getMandatoryEndpoint(getFtpUrl() + "&autoCreate=true");
endpoint.start();
endpoint.getExchanges();
assertDirectoryExists(service.ftpFile("foo/bar/baz/xxx"));
// producer should create necessary subdirs
sendFile(getFtpUrl(), "Hello World", "sub1/sub2/hello.txt");
assertDirectoryExists(service.ftpFile("foo/bar/baz/xxx/sub1/sub2"));
// to see if another connect causes problems with autoCreate=true
endpoint.stop();
endpoint.start();
endpoint.getExchanges();
}
@Test
public void testNoAutoCreate() {
FtpEndpoint<?> endpoint = (FtpEndpoint<?>) this.getMandatoryEndpoint(getFtpUrl() + "&autoCreate=false");
endpoint.start();
try {
endpoint.getExchanges();
fail("Should fail with 550 No such directory.");
} catch (GenericFileOperationFailedException e) {
assertThat(e.getCode(), equalTo(550));
}
}
}
|
FtpConsumerAutoCreateIT
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/XContentFormattedException.java
|
{
"start": 1158,
"end": 3164
}
|
class ____ extends ElasticsearchException implements ChunkedToXContent {
public static final String X_CONTENT_PARAM = "detailedErrorsEnabled";
private final RestStatus status;
private final Throwable cause;
public XContentFormattedException(String message, RestStatus status) {
super(message);
this.status = Objects.requireNonNull(status);
this.cause = null;
}
public XContentFormattedException(Throwable cause, RestStatus status) {
super(cause);
this.status = Objects.requireNonNull(status);
this.cause = cause;
}
public XContentFormattedException(String message, Throwable cause, RestStatus status) {
super(message, cause);
this.status = Objects.requireNonNull(status);
this.cause = cause;
}
@Override
public RestStatus status() {
return status;
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(Params params) {
return Iterators.concat(
ChunkedToXContentHelper.startObject(),
Iterators.single(
(b, p) -> ElasticsearchException.generateFailureXContent(
b,
p,
cause instanceof Exception e ? e : this,
params.paramAsBoolean(X_CONTENT_PARAM, false)
)
),
Iterators.single((b, p) -> b.field("status", status.getStatus())),
ChunkedToXContentHelper.endObject()
);
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(RestApiVersion restApiVersion, Params params) {
return ChunkedToXContent.super.toXContentChunked(restApiVersion, params);
}
@Override
public Iterator<? extends ToXContent> toXContentChunkedV8(Params params) {
return ChunkedToXContent.super.toXContentChunkedV8(params);
}
@Override
public boolean isFragment() {
return super.isFragment();
}
}
|
XContentFormattedException
|
java
|
quarkusio__quarkus
|
extensions/openshift-client/runtime/src/main/java/io/quarkus/it/openshift/client/runtime/graal/MiscellaneousSubstitutions.java
|
{
"start": 3930,
"end": 4143
}
|
class ____ {
private static final String ERROR_MESSAGE = "OpenShift Miscellaneous API is not available, please add the openshift-model-miscellaneous module to your classpath";
}
static final
|
Constants
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/SuppressWarningsWithoutExplanationTest.java
|
{
"start": 2409,
"end": 2726
}
|
class ____ {
@SuppressWarnings("someotherwarning")
void test() {}
}
""")
.expectUnchanged()
.doTest(TEXT_MATCH);
}
@Test
public void hasInlineComment() {
helper
.addInputLines(
"Test.java",
"""
|
Test
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/MiloClientEndpointBuilderFactory.java
|
{
"start": 78430,
"end": 80596
}
|
interface ____ {
/**
* OPC UA Client (camel-milo)
* Connect to OPC UA servers using the binary protocol for acquiring
* telemetry data.
*
* Category: iot
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-milo
*
* @return the dsl builder for the headers' name.
*/
default MiloClientHeaderNameBuilder miloClient() {
return MiloClientHeaderNameBuilder.INSTANCE;
}
/**
* OPC UA Client (camel-milo)
* Connect to OPC UA servers using the binary protocol for acquiring
* telemetry data.
*
* Category: iot
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-milo
*
* Syntax: <code>milo-client:endpointUri</code>
*
* Path parameter: endpointUri (required)
* The OPC UA server endpoint
*
* @param path endpointUri
* @return the dsl builder
*/
default MiloClientEndpointBuilder miloClient(String path) {
return MiloClientEndpointBuilderFactory.endpointBuilder("milo-client", path);
}
/**
* OPC UA Client (camel-milo)
* Connect to OPC UA servers using the binary protocol for acquiring
* telemetry data.
*
* Category: iot
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-milo
*
* Syntax: <code>milo-client:endpointUri</code>
*
* Path parameter: endpointUri (required)
* The OPC UA server endpoint
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path endpointUri
* @return the dsl builder
*/
default MiloClientEndpointBuilder miloClient(String componentName, String path) {
return MiloClientEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the OPC UA Client component.
*/
public static
|
MiloClientBuilders
|
java
|
google__guice
|
core/test/com/google/inject/TypeConversionTest.java
|
{
"start": 14129,
"end": 14415
}
|
class ____ extends AbstractModule {
@Override
protected void configure() {
install(new Ambiguous1Module());
install(new Ambiguous2Module());
bindConstant().annotatedWith(NumericValue.class).to("foo");
bind(DateHolder.class);
}
}
|
InnerAmbiguousModule
|
java
|
spring-projects__spring-boot
|
core/spring-boot-properties-migrator/src/main/java/org/springframework/boot/context/properties/migrator/PropertiesMigrationReporter.java
|
{
"start": 2202,
"end": 9106
}
|
class ____ {
private final Map<String, ConfigurationMetadataProperty> allProperties;
private final ConfigurableEnvironment environment;
PropertiesMigrationReporter(ConfigurationMetadataRepository metadataRepository,
ConfigurableEnvironment environment) {
this.allProperties = Collections.unmodifiableMap(metadataRepository.getAllProperties());
this.environment = environment;
}
/**
* Analyse the {@link ConfigurableEnvironment environment} and attempt to rename
* legacy properties if a replacement exists.
* @return a report of the migration
*/
PropertiesMigrationReport getReport() {
PropertiesMigrationReport report = new PropertiesMigrationReport();
Map<String, List<PropertyMigration>> properties = getPropertySourceMigrations(
ConfigurationMetadataProperty::isDeprecated);
if (properties.isEmpty()) {
return report;
}
properties.forEach((name, candidates) -> {
PropertySource<?> propertySource = mapPropertiesWithReplacement(report, name, candidates);
if (propertySource != null) {
this.environment.getPropertySources().addBefore(name, propertySource);
}
});
return report;
}
private Map<String, List<PropertyMigration>> getPropertySourceMigrations(
Predicate<ConfigurationMetadataProperty> filter) {
return getPropertySourceMigrations(this.allProperties.values().stream().filter(filter).toList());
}
private Map<String, List<PropertyMigration>> getPropertySourceMigrations(
List<ConfigurationMetadataProperty> metadataProperties) {
MultiValueMap<String, PropertyMigration> result = new LinkedMultiValueMap<>();
getPropertySourcesAsMap().forEach((propertySourceName, propertySource) -> {
for (ConfigurationMetadataProperty metadataProperty : metadataProperties) {
result.addAll(propertySourceName, getMigrations(propertySource, metadataProperty));
}
});
return result;
}
private Map<String, ConfigurationPropertySource> getPropertySourcesAsMap() {
Map<String, ConfigurationPropertySource> map = new LinkedHashMap<>();
for (ConfigurationPropertySource source : ConfigurationPropertySources.get(this.environment)) {
map.put(determinePropertySourceName(source), source);
}
return map;
}
private String determinePropertySourceName(ConfigurationPropertySource source) {
if (source.getUnderlyingSource() instanceof PropertySource<?> underlyingSource) {
return underlyingSource.getName();
}
Object underlyingSource = source.getUnderlyingSource();
Assert.state(underlyingSource != null, "'underlyingSource' must not be null");
return underlyingSource.toString();
}
private List<PropertyMigration> getMigrations(ConfigurationPropertySource propertySource,
ConfigurationMetadataProperty metadataProperty) {
ConfigurationPropertyName propertyName = asConfigurationPropertyName(metadataProperty);
List<PropertyMigration> migrations = new ArrayList<>();
addMigration(propertySource, metadataProperty, propertyName, false, migrations);
if (isMapType(metadataProperty) && propertySource instanceof IterableConfigurationPropertySource iterable) {
iterable.stream()
.filter(propertyName::isAncestorOf)
.forEach((ancestorPropertyName) -> addMigration(propertySource, metadataProperty, ancestorPropertyName,
true, migrations));
}
return migrations;
}
private ConfigurationPropertyName asConfigurationPropertyName(ConfigurationMetadataProperty metadataProperty) {
return ConfigurationPropertyName.isValid(metadataProperty.getId())
? ConfigurationPropertyName.of(metadataProperty.getId())
: ConfigurationPropertyName.adapt(metadataProperty.getId(), '.');
}
private void addMigration(ConfigurationPropertySource propertySource,
ConfigurationMetadataProperty metadataProperty, ConfigurationPropertyName propertyName,
boolean mapMigration, List<PropertyMigration> migrations) {
ConfigurationProperty property = propertySource.getConfigurationProperty(propertyName);
if (property != null) {
ConfigurationMetadataProperty replacement = determineReplacementMetadata(metadataProperty);
if (replacement == null || !hasSameName(property, replacement)) {
migrations.add(new PropertyMigration(property, metadataProperty, replacement, mapMigration));
}
}
}
private boolean hasSameName(ConfigurationProperty property, ConfigurationMetadataProperty replacement) {
return (property.getOrigin() instanceof PropertySourceOrigin propertySourceOrigin)
&& Objects.equals(propertySourceOrigin.getPropertyName(), replacement.getId());
}
private @Nullable ConfigurationMetadataProperty determineReplacementMetadata(
ConfigurationMetadataProperty metadata) {
String replacementId = metadata.getDeprecation().getReplacement();
if (StringUtils.hasText(replacementId)) {
ConfigurationMetadataProperty replacement = this.allProperties.get(replacementId);
if (replacement != null) {
return replacement;
}
return detectMapValueReplacement(replacementId);
}
return null;
}
private @Nullable ConfigurationMetadataProperty detectMapValueReplacement(String fullId) {
int lastDot = fullId.lastIndexOf('.');
if (lastDot == -1) {
return null;
}
ConfigurationMetadataProperty metadata = this.allProperties.get(fullId.substring(0, lastDot));
if (metadata != null && isMapType(metadata)) {
return metadata;
}
return null;
}
private boolean isMapType(ConfigurationMetadataProperty property) {
String type = property.getType();
return type != null && type.startsWith(Map.class.getName());
}
private @Nullable PropertySource<?> mapPropertiesWithReplacement(PropertiesMigrationReport report, String name,
List<PropertyMigration> properties) {
report.add(name, properties);
List<PropertyMigration> renamed = properties.stream().filter(PropertyMigration::isCompatibleType).toList();
if (renamed.isEmpty()) {
return null;
}
NameTrackingPropertySource nameTrackingPropertySource = new NameTrackingPropertySource();
this.environment.getPropertySources().addFirst(nameTrackingPropertySource);
try {
String target = "migrate-" + name;
Map<String, OriginTrackedValue> content = new LinkedHashMap<>();
for (PropertyMigration candidate : renamed) {
String newPropertyName = candidate.getNewPropertyName();
Object value = candidate.getProperty().getValue();
if (nameTrackingPropertySource.isPlaceholderThatAccessesName(value, newPropertyName)) {
continue;
}
OriginTrackedValue originTrackedValue = OriginTrackedValue.of(value,
candidate.getProperty().getOrigin());
content.put(newPropertyName, originTrackedValue);
}
return new OriginTrackedMapPropertySource(target, content);
}
finally {
this.environment.getPropertySources().remove(nameTrackingPropertySource.getName());
}
}
/**
* {@link PropertySource} used to track accessed properties to protect against
* circular references.
*/
private
|
PropertiesMigrationReporter
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/core/fs/UnsupportedSchemeFactory.java
|
{
"start": 1139,
"end": 2020
}
|
class ____ implements FileSystemFactory {
private final String exceptionMessage;
@Nullable private final Throwable exceptionCause;
public UnsupportedSchemeFactory(String exceptionMessage) {
this(exceptionMessage, null);
}
public UnsupportedSchemeFactory(String exceptionMessage, @Nullable Throwable exceptionCause) {
this.exceptionMessage = checkNotNull(exceptionMessage);
this.exceptionCause = exceptionCause;
}
@Override
public String getScheme() {
return "n/a";
}
@Override
public FileSystem create(URI fsUri) throws IOException {
if (exceptionCause == null) {
throw new UnsupportedFileSystemSchemeException(exceptionMessage);
} else {
throw new UnsupportedFileSystemSchemeException(exceptionMessage, exceptionCause);
}
}
}
|
UnsupportedSchemeFactory
|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldMapper.java
|
{
"start": 2261,
"end": 3103
}
|
class ____ extends FieldMapper.Builder {
private final MappedFieldType fieldType;
protected Builder(String name) {
super(name);
this.fieldType = new FakeFieldType(name);
}
@Override
protected Parameter<?>[] getParameters() {
return FieldMapper.EMPTY_PARAMETERS;
}
public Builder addMultiField(Builder builder) {
this.multiFieldsBuilder.add(builder);
return this;
}
public Builder copyTo(String field) {
this.copyTo = copyTo.withAddedFields(List.of(field));
return this;
}
@Override
public MockFieldMapper build(MapperBuilderContext context) {
return new MockFieldMapper(leafName(), fieldType, builderParams(this, context));
}
}
}
|
Builder
|
java
|
apache__flink
|
flink-datastream/src/main/java/org/apache/flink/datastream/impl/operators/BaseKeyedTwoInputNonBroadcastProcessOperator.java
|
{
"start": 1635,
"end": 3501
}
|
class ____<KEY, IN1, IN2, OUT>
extends TwoInputNonBroadcastProcessOperator<IN1, IN2, OUT> {
// TODO Restore this keySet when task initialized from checkpoint.
protected transient Set<Object> keySet;
@Nullable protected final KeySelector<OUT, KEY> outKeySelector;
public BaseKeyedTwoInputNonBroadcastProcessOperator(
TwoInputNonBroadcastStreamProcessFunction<IN1, IN2, OUT> userFunction) {
this(userFunction, null);
}
public BaseKeyedTwoInputNonBroadcastProcessOperator(
TwoInputNonBroadcastStreamProcessFunction<IN1, IN2, OUT> userFunction,
@Nullable KeySelector<OUT, KEY> outKeySelector) {
super(userFunction);
this.outKeySelector = outKeySelector;
}
@Override
public void open() throws Exception {
this.keySet = new HashSet<>();
super.open();
}
@Override
protected TimestampCollector<OUT> getOutputCollector() {
return outKeySelector == null
? new OutputCollector<>(output)
: new KeyCheckedOutputCollector<>(
new OutputCollector<>(output), outKeySelector, () -> (KEY) getCurrentKey());
}
@Override
protected Object currentKey() {
return getCurrentKey();
}
@Override
protected NonPartitionedContext<OUT> getNonPartitionedContext() {
return new DefaultNonPartitionedContext<>(
context,
partitionedContext,
collector,
true,
keySet,
output,
watermarkDeclarationMap);
}
@Override
public void newKeySelected(Object newKey) {
keySet.add(newKey);
}
@Override
public boolean isAsyncKeyOrderedProcessingEnabled() {
return true;
}
}
|
BaseKeyedTwoInputNonBroadcastProcessOperator
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsFeatures.java
|
{
"start": 702,
"end": 888
}
|
class ____ implements FeatureSpecification {
@Override
public Set<NodeFeature> getFeatures() {
return Set.of(MappingStats.SOURCE_MODES_FEATURE);
}
}
|
ClusterStatsFeatures
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/config/LoggerConfigTest.java
|
{
"start": 1927,
"end": 7324
}
|
class ____ {
private static final String FQCN = LoggerConfigTest.class.getName();
private static LoggerConfig createForProperties(final Property[] properties) {
return LoggerConfig.newBuilder()
.setConfig(new NullConfiguration())
.setAdditivity(true)
.setLevel(Level.INFO)
.setLoggerName("name")
.setIncludeLocation("false")
.setProperties(properties)
.build();
}
@Test
void testPropertiesWithoutSubstitution() {
assertNull(createForProperties(null).getPropertyList(), "null propertiesList");
final Property[] all = new Property[] {
Property.createProperty("key1", "value1"), Property.createProperty("key2", "value2"),
};
final LoggerConfig loggerConfig = createForProperties(all);
final List<Property> list = loggerConfig.getPropertyList();
assertEquals(new HashSet<>(list), new HashSet<>(loggerConfig.getPropertyList()), "map and list contents equal");
final AtomicReference<Object> actualList = new AtomicReference<>();
loggerConfig.setLogEventFactory((loggerName, marker, fqcn, level, data, properties, t) -> {
actualList.set(properties);
return new Builder().setTimeMillis(System.currentTimeMillis()).build();
});
loggerConfig.log("name", "fqcn", null, Level.INFO, new SimpleMessage("msg"), null);
assertSame(list, actualList.get(), "propertiesList passed in as is if no substitutions required");
}
@Test
void testPropertiesWithSubstitution() {
final Property[] all = new Property[] {
Property.createProperty("key1", "value1-${sys:user.name}"),
Property.createProperty("key2", "value2-${sys:user.name}"),
};
final LoggerConfig loggerConfig = createForProperties(all);
final List<Property> list = loggerConfig.getPropertyList();
assertEquals(new HashSet<>(list), new HashSet<>(loggerConfig.getPropertyList()), "map and list contents equal");
final AtomicReference<Object> actualListHolder = new AtomicReference<>();
loggerConfig.setLogEventFactory((loggerName, marker, fqcn, level, data, properties, t) -> {
actualListHolder.set(properties);
return new Builder().setTimeMillis(System.currentTimeMillis()).build();
});
loggerConfig.log("name", "fqcn", null, Level.INFO, new SimpleMessage("msg"), null);
assertNotSame(list, actualListHolder.get(), "propertiesList with substitutions");
@SuppressWarnings("unchecked")
final List<Property> actualList = (List<Property>) actualListHolder.get();
for (int i = 0; i < list.size(); i++) {
assertEquals(list.get(i).getName(), actualList.get(i).getName(), "name[" + i + "]");
final String value = list.get(i).getValue().replace("${sys:user.name}", System.getProperty("user.name"));
assertEquals(value, actualList.get(i).getValue(), "value[" + i + "]");
}
}
@Test
void testLevel() {
final Configuration configuration = new DefaultConfiguration();
final LoggerConfig config1 = LoggerConfig.newBuilder()
.setLoggerName("org.apache.logging.log4j.test")
.setLevel(Level.ERROR)
.setAdditivity(false)
.setConfig(configuration)
.build();
final LoggerConfig config2 = LoggerConfig.newBuilder()
.setLoggerName("org.apache.logging.log4j")
.setAdditivity(false)
.setConfig(configuration)
.build();
config1.setParent(config2);
assertEquals(Level.ERROR, config1.getLevel(), "Unexpected Level");
assertEquals(Level.ERROR, config1.getExplicitLevel(), "Unexpected explicit level");
assertEquals(Level.ERROR, config2.getLevel(), "Unexpected Level");
assertNull(config2.getExplicitLevel(), "Unexpected explicit level");
}
@Test
void testSingleFilterInvocation() {
final Configuration configuration = new NullConfiguration();
final Filter filter = mock(Filter.class);
final LoggerConfig config = LoggerConfig.newBuilder()
.setLoggerName(FQCN)
.setConfig(configuration)
.setLevel(Level.INFO)
.setFilter(filter)
.build();
final Appender appender = mock(Appender.class);
when(appender.isStarted()).thenReturn(true);
when(appender.getName()).thenReturn("test");
config.addAppender(appender, null, null);
config.log(FQCN, FQCN, null, Level.INFO, new SimpleMessage(), null);
verify(appender, times(1)).append(any());
verify(filter, times(1)).filter(any());
}
@Test
void testLevelAndRefsWithoutAppenderRef() {
final Configuration configuration = mock(PropertiesConfiguration.class);
final LoggerConfig.Builder builder = LoggerConfig.newBuilder()
.setLoggerName(FQCN)
.setConfig(configuration)
.setLevelAndRefs(Level.INFO.name());
final LoggerConfig loggerConfig = builder.build();
assertNotNull(loggerConfig.getAppenderRefs());
assertTrue(loggerConfig.getAppenderRefs().isEmpty());
}
}
|
LoggerConfigTest
|
java
|
google__error-prone
|
check_api/src/test/java/com/google/errorprone/util/ASTHelpersTest.java
|
{
"start": 39452,
"end": 39846
}
|
class ____ {
Object obj =
// BUG: Diagnostic contains: Target type of ArrayList<Integer> is null
new ArrayList<Integer>() {
int foo() {
return 0;
}
};
}
""")
.expectResult(Result.ERROR)
.doTest();
}
@Target(TYPE_USE)
@
|
Foo
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/fixes/SuggestedFixesTest.java
|
{
"start": 77672,
"end": 77732
}
|
class ____ {}
@Anno(b = 1, a = 2)
|
B
|
java
|
apache__camel
|
components/camel-dfdl/src/main/java/org/apache/camel/component/dfdl/DfdlEndpoint.java
|
{
"start": 2102,
"end": 6022
}
|
class ____ extends ProcessorEndpoint {
@UriPath
@Metadata(required = true, description = "The path to the DFDL schema file.")
private String schemaUri;
@UriParam
@Metadata(defaultValue = "PARSE", description = "Transform direction. Either PARSE or UNPARSE")
private ParseDirection parseDirection;
@UriParam(description = "The root element name of the schema to use. If not specified, the first root element in the schema will be used.",
label = "advanced", defaultValue = "")
private String rootElement = "";
@UriParam(description = "The root namespace of the schema to use.", label = "advanced", defaultValue = "")
private String rootNamespace = "";
private DataProcessor daffodilProcessor;
public DfdlEndpoint(String uri, DfdlComponent component, String schemaFile) {
super(uri, component);
this.schemaUri = schemaFile;
}
@Override
public void doInit() throws Exception {
super.doInit();
ProcessorFactory processorFactory;
Resource schemaResource = ResourceHelper.resolveMandatoryResource(getCamelContext(), getSchemaUri());
if (getRootElement() != null && !getRootElement().isEmpty() &&
getRootNamespace() != null && !getRootNamespace().isEmpty()) {
processorFactory
= Daffodil.compiler().compileSource(schemaResource.getURI(), getRootElement(), getRootNamespace());
} else {
processorFactory = Daffodil.compiler().compileSource(schemaResource.getURI());
}
if (processorFactory.isError()) {
StringBuilder buf = new StringBuilder("Failed to initialize dfdl endpoint: [");
for (Diagnostic d : processorFactory.getDiagnostics()) {
buf.append(d.getMessage()).append("; ");
}
buf.append("]");
throw new IOException(buf.toString());
}
this.daffodilProcessor = processorFactory.onPath("/");
}
@Override
protected void onExchange(Exchange exchange) throws Exception {
if (getParseDirection() == ParseDirection.UNPARSE) {
Document xmlDocument = exchange.getIn().getBody(Document.class);
W3CDOMInfosetInputter inputter = new W3CDOMInfosetInputter(xmlDocument);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
UnparseResult result = this.daffodilProcessor.unparse(inputter, Channels.newChannel(bos));
if (result.isError()) {
exchange.setException(new DfdlUnparseException(result));
return;
}
exchange.getMessage().setBody(bos);
} else {
byte[] binary = exchange.getIn().getBody(byte[].class);
var inputStream = new InputSourceDataInputStream(binary);
var outputter = new W3CDOMInfosetOutputter();
ParseResult result = this.daffodilProcessor.parse(inputStream, outputter);
if (result.isError()) {
exchange.setException(new DfdlParseException(result));
return;
}
exchange.getMessage().setBody(outputter.getResult());
}
}
public ParseDirection getParseDirection() {
return parseDirection;
}
public void setParseDirection(ParseDirection direction) {
this.parseDirection = direction;
}
public String getSchemaUri() {
return schemaUri;
}
public void setSchemaUri(String schemaUri) {
this.schemaUri = schemaUri;
}
public String getRootElement() {
return rootElement;
}
public void setRootElement(String rootElement) {
this.rootElement = rootElement;
}
public String getRootNamespace() {
return rootNamespace;
}
public void setRootNamespace(String rootNamespace) {
this.rootNamespace = rootNamespace;
}
}
|
DfdlEndpoint
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/RedeliveryErrorHandlerLogHandledTest.java
|
{
"start": 1215,
"end": 7221
}
|
class ____ extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testRedeliveryErrorHandlerOnExceptionLogHandledDefault() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
onException(IllegalArgumentException.class).maximumRedeliveries(3).redeliveryDelay(0).handled(true)
.to("mock:handled");
from("direct:foo").throwException(new IllegalArgumentException("Damn"));
}
});
context.start();
getMockEndpoint("mock:handled").expectedBodiesReceived("Hello World");
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testRedeliveryErrorHandlerOnExceptionLogHandled() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
onException(IllegalArgumentException.class).maximumRedeliveries(3).redeliveryDelay(0).logHandled(true)
.handled(true).to("mock:handled");
from("direct:foo").throwException(new IllegalArgumentException("Damn"));
}
});
context.start();
getMockEndpoint("mock:handled").expectedBodiesReceived("Hello World");
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testRedeliveryErrorHandlerOnExceptionLogRetryAttempted() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
onException(IllegalArgumentException.class).maximumRedeliveries(3).redeliveryDelay(0).logHandled(true)
.logRetryAttempted(true).handled(true).to("mock:handled");
from("direct:foo").throwException(new IllegalArgumentException("Damn"));
}
});
context.start();
getMockEndpoint("mock:handled").expectedBodiesReceived("Hello World");
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testRedeliveryErrorHandlerDoNotLogExhausted() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
errorHandler(defaultErrorHandler().logExhausted(false));
from("direct:bar").throwException(new CamelException("Camel rocks"));
}
});
context.start();
getMockEndpoint("mock:handled").expectedMessageCount(0);
try {
template.sendBody("direct:bar", "Hello World");
fail("Should thrown an exception");
} catch (CamelExecutionException e) {
CamelException cause = assertIsInstanceOf(CamelException.class, e.getCause());
assertEquals("Camel rocks", cause.getMessage());
}
assertMockEndpointsSatisfied();
}
@Test
public void testRedeliveryErrorHandlerLogExhaustedDefault() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
errorHandler(defaultErrorHandler());
from("direct:bar").throwException(new CamelException("Camel rocks"));
}
});
context.start();
getMockEndpoint("mock:handled").expectedMessageCount(0);
try {
template.sendBody("direct:bar", "Hello World");
fail("Should thrown an exception");
} catch (CamelExecutionException e) {
CamelException cause = assertIsInstanceOf(CamelException.class, e.getCause());
assertEquals("Camel rocks", cause.getMessage());
}
assertMockEndpointsSatisfied();
}
@Test
public void testRedeliveryErrorHandlerAllOptions() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
errorHandler(defaultErrorHandler().redeliveryDelay(0).maximumRedeliveries(3).logExhausted(true).logHandled(true)
.logRetryStackTrace(true).logStackTrace(true)
.retryAttemptedLogLevel(LoggingLevel.WARN).retriesExhaustedLogLevel(LoggingLevel.ERROR));
from("direct:bar").throwException(new CamelException("Camel rocks"));
}
});
context.start();
getMockEndpoint("mock:handled").expectedMessageCount(0);
try {
template.sendBody("direct:bar", "Hello World");
fail("Should thrown an exception");
} catch (CamelExecutionException e) {
CamelException cause = assertIsInstanceOf(CamelException.class, e.getCause());
assertEquals("Camel rocks", cause.getMessage());
}
assertMockEndpointsSatisfied();
}
@Test
public void testRedeliveryErrorHandlerOnExceptionAllOptions() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
onException(IllegalArgumentException.class).redeliveryDelay(0).maximumRedeliveries(3).logHandled(true)
.logRetryAttempted(true).logRetryStackTrace(true)
.logExhausted(true).logStackTrace(true).handled(true).retryAttemptedLogLevel(LoggingLevel.WARN)
.retriesExhaustedLogLevel(LoggingLevel.ERROR).to("mock:handled");
from("direct:foo").throwException(new IllegalArgumentException("Damn"));
}
});
context.start();
getMockEndpoint("mock:handled").expectedBodiesReceived("Hello World");
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
}
}
|
RedeliveryErrorHandlerLogHandledTest
|
java
|
alibaba__nacos
|
istio/src/main/java/com/alibaba/nacos/istio/xds/ServiceEntryXdsGenerator.java
|
{
"start": 1547,
"end": 6204
}
|
class ____ implements ApiGenerator<Any> {
private static volatile ServiceEntryXdsGenerator singleton = null;
private List<ServiceEntryWrapper> serviceEntries;
public static ServiceEntryXdsGenerator getInstance() {
if (singleton == null) {
synchronized (ServiceEntryXdsGenerator.class) {
if (singleton == null) {
singleton = new ServiceEntryXdsGenerator();
}
}
}
return singleton;
}
@Override
public List<Any> generate(PushRequest pushRequest) {
List<Resource> resources = new ArrayList<>();
serviceEntries = new ArrayList<>(16);
IstioConfig istioConfig = pushRequest.getResourceSnapshot().getIstioConfig();
Map<String, IstioService> serviceInfoMap = pushRequest.getResourceSnapshot().getIstioResources().getIstioServiceMap();
for (Map.Entry<String, IstioService> entry : serviceInfoMap.entrySet()) {
String serviceName = entry.getKey();
ServiceEntryWrapper serviceEntryWrapper = buildServiceEntry(serviceName, serviceName + istioConfig.getDomainSuffix(), serviceInfoMap.get(serviceName));
if (serviceEntryWrapper != null) {
serviceEntries.add(serviceEntryWrapper);
}
}
for (ServiceEntryWrapper serviceEntryWrapper : serviceEntries) {
Metadata metadata = serviceEntryWrapper.getMetadata();
ServiceEntry serviceEntry = serviceEntryWrapper.getServiceEntry();
Any any = Any.newBuilder().setValue(serviceEntry.toByteString()).setTypeUrl(SERVICE_ENTRY_PROTO).build();
resources.add(Resource.newBuilder().setBody(any).setMetadata(metadata).build());
}
List<Any> result = new ArrayList<>();
for (Resource resource : resources) {
result.add(Any.newBuilder().setValue(resource.toByteString()).setTypeUrl(MCP_RESOURCE_PROTO).build());
}
return result;
}
@Override
public List<io.envoyproxy.envoy.service.discovery.v3.Resource> deltaGenerate(PushRequest pushRequest) {
if (pushRequest.isFull()) {
return null;
}
List<io.envoyproxy.envoy.service.discovery.v3.Resource> result = new ArrayList<>();
serviceEntries = new ArrayList<>();
Set<String> reason = pushRequest.getReason();
IstioConfig istioConfig = pushRequest.getResourceSnapshot().getIstioConfig();
Map<String, IstioService> istioServiceMap = pushRequest.getResourceSnapshot().getIstioResources().getIstioServiceMap();
if (pushRequest.getSubscribe().size() != 0) {
for (String subscribe : pushRequest.getSubscribe()) {
String serviceName = parseServiceEntryNameToServiceName(subscribe, istioConfig.getDomainSuffix());
if (reason.contains(serviceName)) {
if (istioServiceMap.containsKey(serviceName)) {
ServiceEntryWrapper serviceEntryWrapper = buildServiceEntry(serviceName, subscribe, istioServiceMap.get(serviceName));
if (serviceEntryWrapper != null) {
serviceEntries.add(serviceEntryWrapper);
} else {
pushRequest.addRemoved(subscribe);
}
} else {
pushRequest.addRemoved(subscribe);
}
}
}
} else {
for (Map.Entry<String, IstioService> entry : istioServiceMap.entrySet()) {
String hostName = entry.getKey() + "." + istioConfig.getDomainSuffix();
ServiceEntryWrapper serviceEntryWrapper = buildServiceEntry(entry.getKey(), hostName, entry.getValue());
if (serviceEntryWrapper != null) {
serviceEntries.add(serviceEntryWrapper);
} else {
pushRequest.addRemoved(hostName);
}
}
}
for (ServiceEntryWrapper serviceEntryWrapper : serviceEntries) {
ServiceEntryOuterClass.ServiceEntry serviceEntry = serviceEntryWrapper.getServiceEntry();
Any any = Any.newBuilder().setValue(serviceEntry.toByteString()).setTypeUrl(SERVICE_ENTRY_PROTO).build();
result.add(io.envoyproxy.envoy.service.discovery.v3.Resource.newBuilder().setResource(any).setVersion(
pushRequest.getResourceSnapshot().getVersion()).build());
}
return result;
}
}
|
ServiceEntryXdsGenerator
|
java
|
quarkusio__quarkus
|
extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/typesafe/getters/TypesafeGettersValidationTest.java
|
{
"start": 373,
"end": 1182
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot(root -> root
.addClasses(SomeBean.class, SomeInterface.class)
.addAsResource(new StringAsset("""
{@io.quarkus.qute.deployment.typesafe.getters.TypesafeGettersValidationTest$SomeBean some}
{some.image.length}::{some.hasImage}::{some.hasImage('bar')}::{some.png}::{some.hasPng('bar')}
"""), "templates/some.html"));
@Inject
Template some;
@Test
public void testValidation() {
assertEquals("3::true::true::ping::false", some.data("some", new SomeBean("bar")).render().strip());
}
public static
|
TypesafeGettersValidationTest
|
java
|
apache__camel
|
components/camel-debezium/camel-debezium-common/camel-debezium-maven-plugin/src/test/java/org/apache/camel/maven/GenerateConnectorConfigMojoTest.java
|
{
"start": 1391,
"end": 2798
}
|
class ____ {
@TempDir
public File configFolder;
@Test
void testIfGeneratedConfigFileCorrectly() throws MojoFailureException, IOException {
final GenerateConnectorConfigMojo generateConnectorConfigMojo = new GenerateConnectorConfigMojo();
final File connectorConfigFolder = new File(configFolder, "connector-configurations");
generateConnectorConfigMojo.setLog(new SystemStreamLog());
generateConnectorConfigMojo.setGeneratedSrcDir(connectorConfigFolder);
generateConnectorConfigMojo.setConnectorClassName(MySqlConnector.class.getName());
generateConnectorConfigMojo.setConnectorConfigClassName(MySqlConnectorConfig.class.getName());
generateConnectorConfigMojo.execute();
// check if we created the file correctly
final File connectorConfigFile = new File(
connectorConfigFolder,
"org/apache/camel/component/debezium/configuration/MySqlConnectorEmbeddedDebeziumConfiguration.java");
assertTrue(connectorConfigFile.exists());
// we check the file content
final String connectorConfigFileAsText = FileUtils.readFileToString(connectorConfigFile, StandardCharsets.UTF_8);
assertNotNull(connectorConfigFileAsText);
assertTrue(connectorConfigFileAsText.contains("MySqlConnectorEmbeddedDebeziumConfiguration"));
}
}
|
GenerateConnectorConfigMojoTest
|
java
|
apache__camel
|
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
|
{
"start": 961537,
"end": 964990
}
|
class ____ extends YamlDeserializerBase<RoutingSlipDefinition> {
public RoutingSlipDefinitionDeserializer() {
super(RoutingSlipDefinition.class);
}
@Override
protected RoutingSlipDefinition newInstance() {
return new RoutingSlipDefinition();
}
@Override
protected RoutingSlipDefinition newInstance(String value) {
return new RoutingSlipDefinition(value);
}
@Override
protected boolean setProperty(RoutingSlipDefinition target, String propertyKey,
String propertyName, Node node) {
propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey);
switch(propertyKey) {
case "cacheSize": {
String val = asText(node);
target.setCacheSize(val);
break;
}
case "disabled": {
String val = asText(node);
target.setDisabled(val);
break;
}
case "expression": {
org.apache.camel.model.language.ExpressionDefinition val = asType(node, org.apache.camel.model.language.ExpressionDefinition.class);
target.setExpression(val);
break;
}
case "ignoreInvalidEndpoints": {
String val = asText(node);
target.setIgnoreInvalidEndpoints(val);
break;
}
case "uriDelimiter": {
String val = asText(node);
target.setUriDelimiter(val);
break;
}
case "id": {
String val = asText(node);
target.setId(val);
break;
}
case "description": {
String val = asText(node);
target.setDescription(val);
break;
}
case "note": {
String val = asText(node);
target.setNote(val);
break;
}
default: {
ExpressionDefinition ed = target.getExpressionType();
if (ed != null) {
throw new org.apache.camel.dsl.yaml.common.exception.DuplicateFieldException(node, propertyName, "as an expression");
}
ed = ExpressionDeserializers.constructExpressionType(propertyKey, node);
if (ed != null) {
target.setExpressionType(ed);
} else {
return false;
}
}
}
return true;
}
}
@YamlType(
nodes = "rss",
types = org.apache.camel.model.dataformat.RssDataFormat.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
displayName = "RSS",
description = "Transform from ROME SyndFeed Java Objects to XML and vice-versa.",
deprecated = false,
properties = @YamlProperty(name = "id", type = "string", description = "The id of this node", displayName = "Id")
)
public static
|
RoutingSlipDefinitionDeserializer
|
java
|
spring-projects__spring-framework
|
spring-context/src/testFixtures/java/org/springframework/context/testfixture/jndi/SimpleNamingContext.java
|
{
"start": 1598,
"end": 2034
}
|
class ____ not intended for direct usage by applications, although it
* can be used for example to override JndiTemplate's {@code createInitialContext}
* method in unit tests. Typically, SimpleNamingContextBuilder will be used to
* set up a JVM-level JNDI environment.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @see SimpleNamingContextBuilder
* @see org.springframework.jndi.JndiTemplate#createInitialContext
*/
public
|
is
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/orm/domain/gambit/EntityOfBasics.java
|
{
"start": 7587,
"end": 7971
}
|
class ____ implements AttributeConverter<MutableValue,String> {
@Override
public String convertToDatabaseColumn(MutableValue attribute) {
return attribute == null ? null : attribute.getState();
}
@Override
public MutableValue convertToEntityAttribute(String dbData) {
return dbData == null ? null : new MutableValue( dbData );
}
}
public static
|
MutableValueConverter
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/Cut.java
|
{
"start": 6681,
"end": 8720
}
|
class ____ extends Cut<Comparable<?>> {
private static final AboveAll INSTANCE = new AboveAll();
private AboveAll() {
// For discussion of "", see BelowAll.
super("");
}
@Override
Comparable<?> endpoint() {
throw new IllegalStateException("range unbounded on this side");
}
@Override
boolean isLessThan(Comparable<?> value) {
return false;
}
@Override
BoundType typeAsLowerBound() {
throw new AssertionError("this statement should be unreachable");
}
@Override
BoundType typeAsUpperBound() {
throw new IllegalStateException();
}
@Override
Cut<Comparable<?>> withLowerBoundType(
BoundType boundType, DiscreteDomain<Comparable<?>> domain) {
throw new AssertionError("this statement should be unreachable");
}
@Override
Cut<Comparable<?>> withUpperBoundType(
BoundType boundType, DiscreteDomain<Comparable<?>> domain) {
throw new IllegalStateException();
}
@Override
void describeAsLowerBound(StringBuilder sb) {
throw new AssertionError();
}
@Override
void describeAsUpperBound(StringBuilder sb) {
sb.append("+\u221e)");
}
@Override
Comparable<?> leastValueAbove(DiscreteDomain<Comparable<?>> domain) {
throw new AssertionError();
}
@Override
Comparable<?> greatestValueBelow(DiscreteDomain<Comparable<?>> domain) {
return domain.maxValue();
}
@Override
public int compareTo(Cut<Comparable<?>> o) {
return (o == this) ? 0 : 1;
}
@Override
public int hashCode() {
return System.identityHashCode(this);
}
@Override
public String toString() {
return "+\u221e";
}
private Object readResolve() {
return INSTANCE;
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
}
static <C extends Comparable> Cut<C> belowValue(C endpoint) {
return new BelowValue<>(endpoint);
}
private static final
|
AboveAll
|
java
|
apache__maven
|
api/maven-api-core/src/main/java/org/apache/maven/api/plugin/Mojo.java
|
{
"start": 2067,
"end": 2240
}
|
interface ____
* be safe to invoke from multiple threads concurrently.</li>
* </ul>
*
* @since 4.0.0
*/
@Experimental
@FunctionalInterface
@Consumer
@ThreadSafe
public
|
must
|
java
|
apache__flink
|
flink-connectors/flink-file-sink-common/src/main/java/org/apache/flink/streaming/api/functions/sink/filesystem/bucketassigners/SimpleVersionedStringSerializer.java
|
{
"start": 1256,
"end": 2819
}
|
class ____ implements SimpleVersionedSerializer<String> {
private static final Charset CHARSET = StandardCharsets.UTF_8;
public static final SimpleVersionedStringSerializer INSTANCE =
new SimpleVersionedStringSerializer();
@Override
public int getVersion() {
return 1;
}
@Override
public byte[] serialize(String value) {
final byte[] serialized = value.getBytes(StandardCharsets.UTF_8);
final byte[] targetBytes = new byte[Integer.BYTES + serialized.length];
final ByteBuffer bb = ByteBuffer.wrap(targetBytes).order(ByteOrder.LITTLE_ENDIAN);
bb.putInt(serialized.length);
bb.put(serialized);
return targetBytes;
}
@Override
public String deserialize(int version, byte[] serialized) throws IOException {
switch (version) {
case 1:
return deserializeV1(serialized);
default:
throw new IOException("Unrecognized version or corrupt state: " + version);
}
}
private static String deserializeV1(byte[] serialized) {
final ByteBuffer bb = ByteBuffer.wrap(serialized).order(ByteOrder.LITTLE_ENDIAN);
final byte[] targetStringBytes = new byte[bb.getInt()];
bb.get(targetStringBytes);
return new String(targetStringBytes, CHARSET);
}
/**
* Private constructor to prevent instantiation. Access the serializer through the {@link
* #INSTANCE}.
*/
private SimpleVersionedStringSerializer() {}
}
|
SimpleVersionedStringSerializer
|
java
|
apache__camel
|
components/camel-sjms/src/main/java/org/apache/camel/component/sjms/ReplyToType.java
|
{
"start": 888,
"end": 940
}
|
enum ____ {
Temporary,
Exclusive
}
|
ReplyToType
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/FileReloadListenerTests.java
|
{
"start": 550,
"end": 1345
}
|
class ____ extends ESTestCase {
public void testCallback() {
final CountDownLatch latch = new CountDownLatch(2);
final FileReloadListener fileReloadListener = new FileReloadListener(PathUtils.get("foo", "bar"), latch::countDown);
Consumer<Path> consumer = randomFrom(
fileReloadListener::onFileCreated,
fileReloadListener::onFileChanged,
fileReloadListener::onFileDeleted
);
consumer.accept(PathUtils.get("foo", "bar"));
assertThat(latch.getCount(), equalTo(1L));
consumer.accept(PathUtils.get("fizz", "baz"));
assertThat(latch.getCount(), equalTo(1L));
consumer.accept(PathUtils.get("foo", "bar"));
assertThat(latch.getCount(), equalTo(0L));
}
}
|
FileReloadListenerTests
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockitousage/junitrunner/SilentRunnerTest.java
|
{
"start": 3103,
"end": 3394
}
|
class ____ {
@Mock List<?> list;
@SuppressWarnings({"MockitoUsage", "CheckReturnValue"})
@Test
public void unfinished_stubbing_test_method() {
when(list.get(0)); // unfinished stubbing
}
}
/**
* The test
|
UsesFrameworkIncorrectly
|
java
|
spring-projects__spring-security
|
webauthn/src/main/java/org/springframework/security/web/webauthn/api/AuthenticatorAttachment.java
|
{
"start": 1637,
"end": 2779
}
|
class ____ not removable from the platform.
*/
public static final AuthenticatorAttachment PLATFORM = new AuthenticatorAttachment("platform");
private final String value;
AuthenticatorAttachment(String value) {
this.value = value;
}
/**
* Gets the value.
* @return the value.
*/
public String getValue() {
return this.value;
}
@Override
public String toString() {
return "AuthenticatorAttachment [" + this.value + "]";
}
/**
* Gets an instance of {@link AuthenticatorAttachment} based upon the value passed in.
* @param value the value to obtain the {@link AuthenticatorAttachment}
* @return the {@link AuthenticatorAttachment}
*/
public static AuthenticatorAttachment valueOf(String value) {
switch (value) {
case "cross-platform":
return CROSS_PLATFORM;
case "platform":
return PLATFORM;
default:
return new AuthenticatorAttachment(value);
}
}
public static AuthenticatorAttachment[] values() {
return new AuthenticatorAttachment[] { CROSS_PLATFORM, PLATFORM };
}
@Serial
private Object readResolve() throws ObjectStreamException {
return valueOf(this.value);
}
}
|
are
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/embeddable/EmbeddableInheritanceAssciationsTest.java
|
{
"start": 11386,
"end": 11760
}
|
class ____ extends ParentEmbeddable {
@OneToMany( fetch = FetchType.EAGER )
@JoinColumn
private List<AssociatedEntity> oneToMany = new ArrayList<>();
public AssociationChildThree() {
}
public AssociationChildThree(String parentProp) {
super( parentProp );
}
public List<AssociatedEntity> getOneToMany() {
return oneToMany;
}
}
}
|
AssociationChildThree
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/isdirty/IsDirtyTest.java
|
{
"start": 2819,
"end": 2911
}
|
class ____ {
@Id @GeneratedValue
Long id;
@ManyToOne(fetch = FetchType.LAZY)
X x;
}
}
|
Y
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/pool/DruidDataSourceTest_initSql.java
|
{
"start": 251,
"end": 1029
}
|
class ____ extends TestCase {
private DruidDataSource dataSource;
protected void setUp() throws Exception {
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
List<Object> sqlList = new ArrayList<Object>();
sqlList.add("select 123");
sqlList.add(null);
sqlList.add("");
dataSource.setConnectionInitSqls(sqlList);
}
protected void tearDown() throws Exception {
dataSource.close();
}
public void testDefault() throws Exception {
Connection conn = dataSource.getConnection();
MockConnection mockConn = conn.unwrap(MockConnection.class);
assertEquals("select 123", mockConn.getLastSql());
conn.close();
}
}
|
DruidDataSourceTest_initSql
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.