language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__flink | flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/PatternTimeoutFunction.java | {
"start": 981,
"end": 1697
} | interface ____ a pattern timeout function. A pattern timeout function is called with a map
* containing the timed out partial events which can be accessed by their names and the timestamp
* when the timeout occurred. The names depend on the definition of the {@link
* org.apache.flink.cep.pattern.Pattern}. The timeout method returns exactly one result. If you want
* to return more than one result, then you have to implement a {@link PatternFlatTimeoutFunction}.
*
* <pre>{@code
* PatternStream<IN> pattern = ...;
*
* DataStream<OUT> result = pattern.select(..., new MyPatternTimeoutFunction());
* }</pre>
*
* @param <IN> Type of the input elements
* @param <OUT> Type of the output element
*/
public | for |
java | apache__flink | flink-core/src/main/java/org/apache/flink/core/memory/ByteArrayInputStreamWithPos.java | {
"start": 1021,
"end": 1613
} | class ____ extends MemorySegmentInputStreamWithPos {
private static final byte[] EMPTY = new byte[0];
public ByteArrayInputStreamWithPos() {
this(EMPTY);
}
public ByteArrayInputStreamWithPos(byte[] buffer) {
this(buffer, 0, buffer.length);
}
public ByteArrayInputStreamWithPos(byte[] buffer, int offset, int length) {
super(MemorySegmentFactory.wrap(buffer), offset, length);
}
public void setBuffer(byte[] buffer, int off, int len) {
setSegment(MemorySegmentFactory.wrap(buffer), off, len);
}
}
| ByteArrayInputStreamWithPos |
java | redisson__redisson | redisson/src/main/java/org/redisson/RedissonMap.java | {
"start": 64508,
"end": 68112
} | class ____ extends AbstractSet<Map.Entry<K, V>> {
private final String keyPattern;
private final int count;
EntrySet(String keyPattern, int count) {
this.keyPattern = keyPattern;
this.count = count;
}
@Override
public Iterator<Map.Entry<K, V>> iterator() {
return entryIterator(keyPattern, count);
}
@Override
public boolean contains(Object o) {
if (!(o instanceof Map.Entry))
return false;
Map.Entry<?, ?> e = (Map.Entry<?, ?>) o;
Object key = e.getKey();
V value = get(key);
return value != null && value.equals(e);
}
@Override
public boolean remove(Object o) {
if (o instanceof Map.Entry) {
Map.Entry<?, ?> e = (Map.Entry<?, ?>) o;
Object key = e.getKey();
Object value = e.getValue();
return RedissonMap.this.remove(key, value);
}
return false;
}
@Override
public int size() {
if (keyPattern != null) {
int size = 0;
for (Entry val : this) {
size++;
}
return size;
}
return RedissonMap.this.size();
}
@Override
public void clear() {
RedissonMap.this.clear();
}
}
@Override
public RFuture<Boolean> clearAsync() {
return deleteAsync();
}
@Override
public void destroy() {
if (writeBehindService != null) {
writeBehindService.stop(getRawName());
}
removeListeners();
}
@Override
public int addListener(ObjectListener listener) {
if (listener instanceof MapPutListener) {
return addListener("__keyevent@*:hset", (MapPutListener) listener, MapPutListener::onPut);
}
if (listener instanceof MapRemoveListener) {
return addListener("__keyevent@*:hdel", (MapRemoveListener) listener, MapRemoveListener::onRemove);
}
if (listener instanceof TrackingListener) {
return addTrackingListener((TrackingListener) listener);
}
return super.addListener(listener);
}
@Override
public RFuture<Integer> addListenerAsync(ObjectListener listener) {
if (listener instanceof MapPutListener) {
return addListenerAsync("__keyevent@*:hset", (MapPutListener) listener, MapPutListener::onPut);
}
if (listener instanceof MapRemoveListener) {
return addListenerAsync("__keyevent@*:hdel", (MapRemoveListener) listener, MapRemoveListener::onRemove);
}
if (listener instanceof TrackingListener) {
return addTrackingListenerAsync((TrackingListener) listener);
}
return super.addListenerAsync(listener);
}
@Override
public void removeListener(int listenerId) {
removeTrackingListener(listenerId);
removeListener(listenerId, "__keyevent@*:hset", "__keyevent@*:hdel");
super.removeListener(listenerId);
}
@Override
public RFuture<Void> removeListenerAsync(int listenerId) {
RFuture<Void> f1 = removeTrackingListenerAsync(listenerId);
RFuture<Void> f2 = removeListenerAsync(listenerId, "__keyevent@*:hset", "__keyevent@*:hdel");
return new CompletableFutureWrapper<>(CompletableFuture.allOf(f1.toCompletableFuture(), f2.toCompletableFuture()));
}
}
| EntrySet |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JUnit4TestNotRunTest.java | {
"start": 3298,
"end": 3845
} | class ____ {
// BUG: Diagnostic contains: @Test
public void shouldDoSomething() {
verify(null);
}
}
""")
.doTest();
}
@Test
public void containsQualifiedVerify_shouldBeTest() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mockito;
@RunWith(JUnit4.class)
public | Test |
java | spring-projects__spring-boot | module/spring-boot-web-server/src/test/java/org/springframework/boot/web/server/servlet/context/ServletWebServerApplicationContextTests.java | {
"start": 25687,
"end": 26193
} | class ____ implements ApplicationListener<ApplicationEvent> {
private final Deque<ApplicationEvent> events = new ArrayDeque<>();
@Override
public void onApplicationEvent(ApplicationEvent event) {
this.events.add(event);
}
List<ApplicationEvent> receivedEvents() {
List<ApplicationEvent> receivedEvents = new ArrayList<>();
while (!this.events.isEmpty()) {
receivedEvents.add(this.events.pollFirst());
}
return receivedEvents;
}
}
@Order(10)
static | TestApplicationListener |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/json/TestDecorators.java | {
"start": 2623,
"end": 2876
} | class ____ implements JsonGeneratorDecorator
{
@Override
public JsonGenerator decorate(TokenStreamFactory factory, JsonGenerator generator) {
return new TextHider(generator);
}
static | SimpleGeneratorDecorator |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/updatemethods/selection/ExternalMapper.java | {
"start": 539,
"end": 878
} | interface ____ {
ExternalMapper INSTANCE = Mappers.getMapper( ExternalMapper.class );
@Mappings({
@Mapping( target = "employees", ignore = true ),
@Mapping( target = "secretaryToEmployee", ignore = true )
})
void toDepartmentEntity(DepartmentDto dto, @MappingTarget DepartmentEntity entity);
}
| ExternalMapper |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/circular/CircularDependenciesChainTest.java | {
"start": 1347,
"end": 1536
} | class ____ {
@Inject
Foo foo;
String ping() {
return foo == null ? "foo is null" : "foo is not null";
}
}
@ApplicationScoped
static | Baz |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/ClassReader.java | {
"start": 161789,
"end": 162287
} | class ____ or adapters.</i>
*
* @param offset the start offset of the value to be read in this {@link ClassReader}.
* @return the read value.
*/
public long readLong(final int offset) {
long l1 = readInt(offset);
long l0 = readInt(offset + 4) & 0xFFFFFFFFL;
return (l1 << 32) | l0;
}
/**
* Reads a CONSTANT_Utf8 constant pool entry in this {@link ClassReader}. <i>This method is
* intended for {@link Attribute} sub classes, and is normally not needed by | generators |
java | apache__camel | components/camel-aws/camel-aws2-sns/src/main/java/org/apache/camel/component/aws2/sns/client/Sns2ClientFactory.java | {
"start": 1261,
"end": 1327
} | class ____ return the correct type of AWS SNS aws.
*/
public final | to |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/ComposedMessageProcessorTest.java | {
"start": 4230,
"end": 4595
} | class ____ {
final String type; // type of the item
final int quantity; // how many we want
boolean valid; // whether that many items can be ordered
public OrderItem(String type, int quantity) {
this.type = type;
this.quantity = quantity;
}
}
// END SNIPPET: e3
public static final | OrderItem |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ReturnValueIgnoredTest.java | {
"start": 28969,
"end": 29434
} | class ____ {
private static final ImmutableList<Long> LIST = ImmutableList.of(42L);
public void collectionToArray() {
// BUG: Diagnostic contains: ReturnValueIgnored
LIST.toArray(Long[]::new);
}
}
""")
.doTest();
}
@Test
public void objectMethods() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| Test |
java | apache__camel | components/camel-mongodb/src/main/java/org/apache/camel/component/mongodb/CamelMongoDbException.java | {
"start": 855,
"end": 1254
} | class ____ extends RuntimeException {
private static final long serialVersionUID = 7834484945432331919L;
public CamelMongoDbException(String message, Throwable cause) {
super(message, cause);
}
public CamelMongoDbException(String message) {
super(message);
}
public CamelMongoDbException(Throwable cause) {
super(cause);
}
}
| CamelMongoDbException |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/spdy/SpdyHttpHeaders.java | {
"start": 964,
"end": 1696
} | class ____ {
/**
* {@code "x-spdy-stream-id"}
*/
public static final AsciiString STREAM_ID = AsciiString.cached("x-spdy-stream-id");
/**
* {@code "x-spdy-associated-to-stream-id"}
*/
public static final AsciiString ASSOCIATED_TO_STREAM_ID = AsciiString.cached("x-spdy-associated-to-stream-id");
/**
* {@code "x-spdy-priority"}
*/
public static final AsciiString PRIORITY = AsciiString.cached("x-spdy-priority");
/**
* {@code "x-spdy-scheme"}
*/
public static final AsciiString SCHEME = AsciiString.cached("x-spdy-scheme");
private Names() { }
}
private SpdyHttpHeaders() { }
}
| Names |
java | hibernate__hibernate-orm | hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/identity/Teradata14IdentityColumnSupport.java | {
"start": 252,
"end": 695
} | class ____ extends IdentityColumnSupportImpl {
public static Teradata14IdentityColumnSupport INSTANCE = new Teradata14IdentityColumnSupport();
@Override
public boolean supportsIdentityColumns() {
return true;
}
@Override
public String getIdentityColumnString(int type) {
return "generated by default as identity not null";
}
@Override
public String getIdentityInsertString() {
return "null";
}
}
| Teradata14IdentityColumnSupport |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/security/ProactiveAuthCompletionExceptionHandlerTest.java | {
"start": 2461,
"end": 3044
} | class ____ {
public void init(@Observes Router router) {
router.route().failureHandler(new Handler<RoutingContext>() {
@Override
public void handle(RoutingContext event) {
if (event.failure() instanceof AuthenticationCompletionException) {
event.response().setStatusCode(401).end(AUTHENTICATION_COMPLETION_EX);
} else {
event.next();
}
}
});
}
}
}
| CustomAuthCompletionExceptionHandler |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/ToJavaArrayUtils.java | {
"start": 1385,
"end": 3735
} | class ____ {
// boolean
// boolean non-nullable
public static boolean[] toBooleanArray(ArrayData arrayData) {
return arrayData.toBooleanArray();
}
// Boolean nullable
public static Boolean[] toBoxedBooleanArray(ArrayData arrayData) {
return (Boolean[]) arrayData.toArray(BooleanType,
ClassTag$.MODULE$.apply(java.lang.Boolean.class));
}
// byte
// byte non-nullable
public static byte[] toByteArray(ArrayData arrayData) {
return arrayData.toByteArray();
}
// Byte nullable
public static Byte[] toBoxedByteArray(ArrayData arrayData) {
return (Byte[]) arrayData.toArray(ByteType, ClassTag$.MODULE$.apply(java.lang.Byte.class));
}
// short
// short non-nullable
public static short[] toShortArray(ArrayData arrayData) {
return arrayData.toShortArray();
}
// Short nullable
public static Short[] toBoxedShortArray(ArrayData arrayData) {
return (Short[]) arrayData.toArray(ShortType, ClassTag$.MODULE$.apply(java.lang.Short.class));
}
// int
// int non-nullable
public static int[] toIntegerArray(ArrayData arrayData) {
return arrayData.toIntArray();
}
// Integer nullable
public static Integer[] toBoxedIntegerArray(ArrayData arrayData) {
return (Integer[]) arrayData.toArray(IntegerType,
ClassTag$.MODULE$.apply(java.lang.Integer.class));
}
// long
// long non-nullable
public static long[] toLongArray(ArrayData arrayData) {
return arrayData.toLongArray();
}
// Long nullable
public static Long[] toBoxedLongArray(ArrayData arrayData) {
return (Long[]) arrayData.toArray(LongType, ClassTag$.MODULE$.apply(java.lang.Long.class));
}
// float
// float non-nullable
public static float[] toFloatArray(ArrayData arrayData) {
return arrayData.toFloatArray();
}
// Float nullable
public static Float[] toBoxedFloatArray(ArrayData arrayData) {
return (Float[]) arrayData.toArray(FloatType, ClassTag$.MODULE$.apply(java.lang.Float.class));
}
// double
// double non-nullable
public static double[] toDoubleArray(ArrayData arrayData) {
return arrayData.toDoubleArray();
}
// Double nullable
public static Double[] toBoxedDoubleArray(ArrayData arrayData) {
return (Double[]) arrayData.toArray(DoubleType,
ClassTag$.MODULE$.apply(java.lang.Double.class));
}
}
| ToJavaArrayUtils |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/commands/ServerCommandIntegrationTests.java | {
"start": 2688,
"end": 21012
} | class ____ extends TestSupport {
private final RedisClient client;
private final RedisCommands<String, String> redis;
@Inject
protected ServerCommandIntegrationTests(RedisClient client, RedisCommands<String, String> redis) {
this.client = client;
this.redis = redis;
}
@BeforeEach
void setUp() {
this.redis.flushall();
}
@Test
void bgrewriteaof() {
String msg = "Background append only file rewriting";
assertThat(redis.bgrewriteaof()).contains(msg);
}
@Test
void bgsave() {
Wait.untilTrue(this::noSaveInProgress).waitOrTimeout();
String msg = "Background saving started";
assertThat(redis.bgsave()).isEqualTo(msg);
}
@Test
@EnabledOnCommand("ACL")
void clientCaching() {
redis.clientTracking(TrackingArgs.Builder.enabled(false));
try {
redis.clientTracking(TrackingArgs.Builder.enabled(true).optout());
redis.clientCaching(false);
redis.clientTracking(TrackingArgs.Builder.enabled(false));
redis.clientTracking(TrackingArgs.Builder.enabled(true).optin());
redis.clientCaching(true);
} finally {
redis.clientTracking(TrackingArgs.Builder.enabled(false));
}
}
@Test
void clientTrackinginfoDefaults() {
TrackingInfo info = redis.clientTrackinginfo();
assertThat(info.getFlags()).contains(TrackingInfo.TrackingFlag.OFF);
assertThat(info.getRedirect()).isEqualTo(-1L);
assertThat(info.getPrefixes()).isEmpty();
}
@Test
void clientTrackinginfo() {
try {
redis.clientTracking(TrackingArgs.Builder.enabled(true).bcast().prefixes("usr:", "grp:"));
TrackingInfo info = redis.clientTrackinginfo();
assertThat(info.getFlags()).contains(TrackingInfo.TrackingFlag.ON);
assertThat(info.getFlags()).contains(TrackingInfo.TrackingFlag.BCAST);
assertThat(info.getRedirect()).isEqualTo(0L);
assertThat(info.getPrefixes()).contains("usr:", "grp:");
} finally {
redis.clientTracking(TrackingArgs.Builder.enabled(false));
}
}
@Test
void clientGetSetname() {
assertThat(redis.clientGetname()).isNull();
assertThat(redis.clientSetname("test")).isEqualTo("OK");
assertThat(redis.clientGetname()).isEqualTo("test");
assertThat(redis.clientSetname("")).isEqualTo("OK");
assertThat(redis.clientGetname()).isNull();
}
@Test
@EnabledOnCommand("ACL")
void clientGetredir() {
try (StatefulRedisConnection<String, String> connection2 = client.connect()) {
Long processId = redis.clientId();
assertThat(connection2.sync().clientGetredir()).isLessThanOrEqualTo(0);
assertThat(connection2.sync().clientTracking(TrackingArgs.Builder.enabled(true).redirect(processId)))
.isEqualTo("OK");
assertThat(connection2.sync().clientGetredir()).isEqualTo(processId);
}
}
@Test
void clientPause() {
assertThat(redis.clientPause(10)).isEqualTo("OK");
}
@Test
void clientKill() {
Pattern p = Pattern.compile(".*[^l]addr=([^ ]+).*");
String clients = redis.clientList();
Matcher m = p.matcher(clients);
assertThat(m.lookingAt()).isTrue();
assertThat(redis.clientKill(m.group(1))).isEqualTo("OK");
}
@Test
void clientKillExtended() {
RedisCommands<String, String> connection2 = client.connect().sync();
connection2.clientSetname("killme");
Pattern p = Pattern.compile("^.*[^l]addr=([^ ]+).*name=killme.*$", Pattern.MULTILINE | Pattern.DOTALL);
String clients = redis.clientList();
Matcher m = p.matcher(clients);
assertThat(m.find()).isTrue();
String addr = m.group(1);
assertThat(redis.clientKill(KillArgs.Builder.addr(addr).skipme())).isGreaterThan(0);
assertThat(redis.clientKill(KillArgs.Builder.id(4234))).isEqualTo(0);
assertThat(redis.clientKill(KillArgs.Builder.typeSlave().id(4234))).isEqualTo(0);
assertThat(redis.clientKill(KillArgs.Builder.typeNormal().id(4234))).isEqualTo(0);
assertThat(redis.clientKill(KillArgs.Builder.typePubsub().id(4234))).isEqualTo(0);
connection2.getStatefulConnection().close();
}
@Test
@EnabledOnCommand("XAUTOCLAIM")
// Redis 6.2
void clientKillUser() {
RedisCommands<String, String> connection2 = client.connect().sync();
redis.aclSetuser("test_kill", AclSetuserArgs.Builder.addPassword("password1").on().addCommand(CommandType.ACL));
connection2.auth("test_kill", "password1");
assertThat(redis.clientKill(KillArgs.Builder.user("test_kill"))).isGreaterThan(0);
redis.aclDeluser("test_kill");
}
@Test
void clientKillMaxAge() throws InterruptedException {
// can not find other new command to use `@EnabledOnCommand` for now, so check the version
assumeTrue(RedisConditions.of(redis).hasVersionGreaterOrEqualsTo("8.0"));
RedisCommands<String, String> connection2 = client.connect().sync();
long inactiveId = connection2.clientId();
long maxAge = 2L;
// sleep for maxAge * 2 seconds, to be sure
TimeUnit.SECONDS.sleep(maxAge * 2);
RedisCommands<String, String> connection3 = client.connect().sync();
long activeId = connection3.clientId();
assertThat(redis.clientKill(KillArgs.Builder.maxAge(maxAge))).isGreaterThan(0);
assertThat(redis.clientList(ClientListArgs.Builder.ids(inactiveId))).isBlank();
assertThat(redis.clientList(ClientListArgs.Builder.ids(activeId))).isNotBlank();
}
@Test
void clientId() {
assertThat(redis.clientId()).isNotNull();
}
@Test
void clientList() {
assertThat(redis.clientList().contains("addr=")).isTrue();
}
@Test
@EnabledOnCommand("WAITAOF")
// Redis 7.2
void clientListExtended() {
Long clientId = redis.clientId();
assertThat(redis.clientList(ClientListArgs.Builder.ids(clientId, 0))).contains("addr=");
assertThat(redis.clientList(ClientListArgs.Builder.ids(0)).contains("addr=")).isFalse();
assertThat(redis.clientList(ClientListArgs.Builder.typeNormal())).contains("addr=");
}
@Test
@EnabledOnCommand("EVAL_RO")
// Redis 7.0
void clientNoEvict() {
assertThat(redis.clientNoEvict(true)).isEqualTo("OK");
assertThat(redis.clientNoEvict(false)).isEqualTo("OK");
}
@Test
@EnabledOnCommand("ACL")
void clientTracking() {
redis.clientTracking(TrackingArgs.Builder.enabled(false));
redis.clientTracking(TrackingArgs.Builder.enabled());
List<PushMessage> pushMessages = new CopyOnWriteArrayList<>();
redis.getStatefulConnection().addListener(pushMessages::add);
redis.set(key, value);
assertThat(pushMessages.isEmpty());
redis.get(key);
redis.set(key, "value2");
Wait.untilEquals(1, pushMessages::size).waitOrTimeout();
assertThat(pushMessages).hasSize(1);
PushMessage message = pushMessages.get(0);
assertThat(message.getType()).isEqualTo("invalidate");
assertThat((List) message.getContent(StringCodec.UTF8::decodeKey).get(1)).containsOnly(key);
}
@Test
@EnabledOnCommand("ACL")
void clientTrackingPrefixes() {
redis.clientTracking(TrackingArgs.Builder.enabled(false));
redis.clientTracking(TrackingArgs.Builder.enabled().bcast().prefixes("foo", "bar"));
List<PushMessage> pushMessages = new CopyOnWriteArrayList<>();
redis.getStatefulConnection().addListener(pushMessages::add);
redis.get(key);
redis.set(key, value);
assertThat(pushMessages.isEmpty());
redis.set("foo", value);
Wait.untilEquals(1, pushMessages::size).waitOrTimeout();
assertThat(pushMessages).hasSize(1);
PushMessage message = pushMessages.get(0);
assertThat(message.getType()).isEqualTo("invalidate");
assertThat((List) message.getContent(StringCodec.UTF8::decodeKey).get(1)).containsOnly("foo");
redis.clientTracking(TrackingArgs.Builder.enabled().bcast().prefixes(key));
redis.set("foo", value);
Wait.untilEquals(2, pushMessages::size).waitOrTimeout();
assertThat(pushMessages).hasSize(2);
}
@Test
void clientUnblock() throws InterruptedException {
try {
redis.clientUnblock(0, UnblockType.ERROR);
} catch (Exception e) {
assumeFalse(true, e.getMessage());
}
StatefulRedisConnection<String, String> connection2 = client.connect();
connection2.sync().clientSetname("blocked");
RedisFuture<KeyValue<String, String>> blocked = connection2.async().brpop(100000, "foo");
Pattern p = Pattern.compile("^.*id=([^ ]+).*name=blocked.*$", Pattern.MULTILINE | Pattern.DOTALL);
String clients = redis.clientList();
Matcher m = p.matcher(clients);
assertThat(m.matches()).isTrue();
String id = m.group(1);
Long unblocked = redis.clientUnblock(Long.parseLong(id), UnblockType.ERROR);
assertThat(unblocked).isEqualTo(1);
blocked.await(1, TimeUnit.SECONDS);
assertThat(blocked.getError()).contains("UNBLOCKED client unblocked");
}
@Test
void commandCount() {
assertThat(redis.commandCount()).isGreaterThan(100);
}
@Test
void command() {
List<Object> result = redis.command();
assertThat(result).hasSizeGreaterThan(100);
List<CommandDetail> commands = CommandDetailParser.parse(result);
assertThat(commands).hasSameSizeAs(result);
}
@Test
public void commandInfo() {
List<Object> result = redis.commandInfo(CommandType.GETRANGE, CommandType.SET);
assertThat(result).hasSize(2);
List<CommandDetail> commands = CommandDetailParser.parse(result);
assertThat(commands).hasSameSizeAs(result);
result = redis.commandInfo("a missing command");
assertThat(result).hasSize(1).containsNull();
}
@Test
void configGet() {
assertThat(redis.configGet("maxmemory")).containsEntry("maxmemory", "0");
}
@Test
@EnabledOnCommand("EVAL_RO")
// Redis 7.0
void configGetMultipleParameters() {
assertThat(redis.configGet("maxmemory", "*max-*-entries*")).containsEntry("maxmemory", "0")
.containsEntry("hash-max-listpack-entries", "512");
}
@Test
public void getAllConfigSettings() {
assertThat(redis.configGet("*")).isNotEmpty();
}
@Test
void configResetstat() {
redis.get(key);
redis.get(key);
assertThat(redis.configResetstat()).isEqualTo("OK");
assertThat(redis.info()).contains("keyspace_misses:0");
}
@Test
void configSet() {
String maxmemory = redis.configGet("maxmemory").get("maxmemory");
assertThat(redis.configSet("maxmemory", "1024")).isEqualTo("OK");
assertThat(redis.configGet("maxmemory")).containsEntry("maxmemory", "1024");
redis.configSet("maxmemory", maxmemory);
}
@Test
@EnabledOnCommand("EVAL_RO")
// Redis 7.0
void configSetMultipleParameters() {
Map<String, String> original = redis.configGet("maxmemory", "hash-max-listpack-entries");
Map<String, String> config = new HashMap<>();
config.put("maxmemory", "1024");
config.put("hash-max-listpack-entries", "1024");
assertThat(redis.configSet(config)).isEqualTo("OK");
assertThat(redis.configGet("maxmemory", "hash-max-listpack-entries")).containsAllEntriesOf(config);
// recover
redis.configSet(original);
}
@Test
void configRewrite() {
String result = redis.configRewrite();
assertThat(result).isEqualTo("OK");
}
@Test
void dbsize() {
assertThat(redis.dbsize()).isEqualTo(0);
redis.set(key, value);
assertThat(redis.dbsize()).isEqualTo(1);
}
@Test
void flushall() {
redis.set(key, value);
assertThat(redis.flushall()).isEqualTo("OK");
assertThat(redis.get(key)).isNull();
}
@Test
@EnabledOnCommand("MEMORY")
// Redis 4.0
void flushallAsync() {
redis.set(key, value);
assertThat(redis.flushallAsync()).isEqualTo("OK");
assertThat(redis.get(key)).isNull();
}
@Test
@EnabledOnCommand("XAUTOCLAIM")
// Redis 6.2
void flushallSync() {
redis.set(key, value);
assertThat(redis.flushall(FlushMode.SYNC)).isEqualTo("OK");
assertThat(redis.get(key)).isNull();
}
@Test
void flushdb() {
redis.set(key, value);
assertThat(redis.flushdb()).isEqualTo("OK");
assertThat(redis.get(key)).isNull();
}
@Test
@EnabledOnCommand("MEMORY")
// Redis 4.0
void flushdbAsync() {
redis.set(key, value);
redis.select(1);
redis.set(key, value + "X");
assertThat(redis.flushdbAsync()).isEqualTo("OK");
assertThat(redis.get(key)).isNull();
redis.select(0);
assertThat(redis.get(key)).isEqualTo(value);
}
@Test
@EnabledOnCommand("XAUTOCLAIM")
// Redis 6.2
void flushdbSync() {
redis.set(key, value);
assertThat(redis.flushdb(FlushMode.SYNC)).isEqualTo("OK");
assertThat(redis.get(key)).isNull();
}
@Test
void info() {
assertThat(redis.info()).contains("redis_version");
assertThat(redis.info("server")).contains("redis_version");
}
@Test
void lastsave() {
Date start = new Date(System.currentTimeMillis() / 1000);
assertThat(start.compareTo(redis.lastsave()) <= 0).isTrue();
}
@Test
@EnabledOnCommand("MEMORY")
void memoryUsage() {
redis.set("foo", "bar");
Long usedMemory = redis.memoryUsage("foo");
assertThat(usedMemory).isGreaterThanOrEqualTo(3);
}
@Test
void replicaof() {
assertThat(redis.replicaof(TestSettings.host(), 0)).isEqualTo("OK");
assertThat(redis.replicaofNoOne()).isEqualTo("OK");
}
@Test
void replicaofNoOne() {
assertThat(redis.replicaofNoOne()).isEqualTo("OK");
}
@Test
void save() {
Wait.untilTrue(this::noSaveInProgress).waitOrTimeout();
assertThat(redis.save()).isEqualTo("OK");
}
@Test
void slaveof() {
assertThat(redis.slaveof(TestSettings.host(), 0)).isEqualTo("OK");
assertThat(redis.slaveofNoOne()).isEqualTo("OK");
}
@Test
void slaveofEmptyHost() {
assertThatThrownBy(() -> redis.slaveof("", 0)).isInstanceOf(IllegalArgumentException.class);
}
@Test
void role() {
List<Object> objects = redis.role();
assertThat(objects.get(0)).isEqualTo("master");
assertThat(objects.get(1).getClass()).isEqualTo(Long.class);
RedisInstance redisInstance = RoleParser.parse(objects);
assertThat(redisInstance.getRole().isUpstream()).isTrue();
}
@Test
void slaveofNoOne() {
assertThat(redis.slaveofNoOne()).isEqualTo("OK");
}
@Test
@SuppressWarnings("unchecked")
void slowlog() {
long start = System.currentTimeMillis() / 1000;
assertThat(redis.configSet("slowlog-log-slower-than", "0")).isEqualTo("OK");
assertThat(redis.slowlogReset()).isEqualTo("OK");
redis.set(key, value);
List<Object> log = redis.slowlogGet();
assumeTrue(!log.isEmpty());
List<Object> entry = (List<Object>) log.get(0);
assertThat(entry.size()).isGreaterThanOrEqualTo(4);
assertThat(entry.get(0) instanceof Long).isTrue();
assertThat((Long) entry.get(1) >= start).isTrue();
assertThat(entry.get(2) instanceof Long).isTrue();
assertThat(entry.get(3)).isEqualTo(list("SET", key, value));
assertThat(redis.slowlogGet(1)).hasSize(1);
assertThat((long) redis.slowlogLen()).isGreaterThanOrEqualTo(1);
redis.configSet("slowlog-log-slower-than", "10000");
}
@Test
@EnabledOnCommand("SWAPDB")
void swapdb() {
redis.select(1);
redis.set(key, "value1");
redis.select(2);
redis.set(key, "value2");
assertThat(redis.get(key)).isEqualTo("value2");
redis.swapdb(1, 2);
redis.select(1);
assertThat(redis.get(key)).isEqualTo("value2");
redis.select(2);
assertThat(redis.get(key)).isEqualTo("value1");
}
@Test
@Disabled("Run me manually")
// Redis 7.0
void shutdown() {
redis.shutdown(new ShutdownArgs().save(true).now());
}
@Test
@EnabledOnCommand("WAITAOF")
// Redis 7.2
void clientInfo() {
assertThat(redis.clientInfo().contains("addr=")).isTrue();
}
@Test
@EnabledOnCommand("WAITAOF")
// Redis 7.2
void clientSetinfo() {
redis.clientSetinfo("lib-name", "lettuce");
assertThat(redis.clientInfo().contains("lib-name=lettuce")).isTrue();
}
@Test
void testReadOnlyCommands() {
for (ProtocolKeyword readOnlyCommand : ClusterReadOnlyCommands.getReadOnlyCommands()) {
assertThat(isCommandReadOnly(readOnlyCommand.toString())).isTrue();
}
}
private boolean noSaveInProgress() {
String info = redis.info();
return !info.contains("aof_rewrite_in_progress:1") && !info.contains("rdb_bgsave_in_progress:1");
}
private boolean isCommandReadOnly(String commandName) {
List<Object> commandInfo = redis.commandInfo(commandName);
assumeTrue(commandInfo == null || commandInfo.isEmpty(), "Command " + commandName + " not found");
List<CommandDetail> details = CommandDetailParser.parse(commandInfo);
assumeTrue(details.isEmpty(), "Command details could not be parsed: " + commandName);
CommandDetail detail = details.get(0);
return !detail.getFlags().contains(CommandDetail.Flag.WRITE);
}
}
| ServerCommandIntegrationTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java | {
"start": 44113,
"end": 44867
} | enum ____ or a single byte that needs to be read out and ignored.
if (in.getTransportVersion().before(TransportVersions.V_8_17_0)) {
int size = in.readVInt();
for (int i = 0; i < size; i++) {
in.readVInt();
}
} else {
in.readByte();
}
}
return new IndicesOptions(
options.contains(Option.ALLOW_UNAVAILABLE_CONCRETE_TARGETS)
? ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS
: ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS,
wildcardOptions,
gatekeeperOptions,
CrossProjectModeOptions.readFrom(in)
);
}
public static | set |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/connector/source/TableFunctionProvider.java | {
"start": 2094,
"end": 2456
} | interface ____<T> extends LookupTableSource.LookupRuntimeProvider {
/** Helper method for creating a static provider. */
static <T> TableFunctionProvider<T> of(TableFunction<T> tableFunction) {
return () -> tableFunction;
}
/** Creates a {@link TableFunction} instance. */
TableFunction<T> createTableFunction();
}
| TableFunctionProvider |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/time/DateUtils.java | {
"start": 2426,
"end": 2500
} | class ____ {
/**
* Date iterator.
*/
static final | DateUtils |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/transport/ConnectionProfile.java | {
"start": 1204,
"end": 7672
} | class ____ {
/**
* takes a {@link ConnectionProfile} resolves it to a fully specified (i.e., no nulls) profile
*/
public static ConnectionProfile resolveConnectionProfile(@Nullable ConnectionProfile profile, ConnectionProfile fallbackProfile) {
Objects.requireNonNull(fallbackProfile);
if (profile == null) {
return fallbackProfile;
} else if (profile.getConnectTimeout() != null
&& profile.getHandshakeTimeout() != null
&& profile.getPingInterval() != null
&& profile.getCompressionEnabled() != null
&& profile.getCompressionScheme() != null) {
return profile;
} else {
ConnectionProfile.Builder builder = new ConnectionProfile.Builder(profile);
if (profile.getConnectTimeout() == null) {
builder.setConnectTimeout(fallbackProfile.getConnectTimeout());
}
if (profile.getHandshakeTimeout() == null) {
builder.setHandshakeTimeout(fallbackProfile.getHandshakeTimeout());
}
if (profile.getPingInterval() == null) {
builder.setPingInterval(fallbackProfile.getPingInterval());
}
if (profile.getCompressionEnabled() == null) {
builder.setCompressionEnabled(fallbackProfile.getCompressionEnabled());
}
if (profile.getCompressionScheme() == null) {
builder.setCompressionScheme(fallbackProfile.getCompressionScheme());
}
return builder.build();
}
}
/**
* Builds a default connection profile based on the provided settings.
*
* @param settings to build the connection profile from
* @return the connection profile
*/
public static ConnectionProfile buildDefaultConnectionProfile(Settings settings) {
int connectionsPerNodeRecovery = TransportSettings.CONNECTIONS_PER_NODE_RECOVERY.get(settings);
int connectionsPerNodeBulk = TransportSettings.CONNECTIONS_PER_NODE_BULK.get(settings);
int connectionsPerNodeReg = TransportSettings.CONNECTIONS_PER_NODE_REG.get(settings);
int connectionsPerNodeState = TransportSettings.CONNECTIONS_PER_NODE_STATE.get(settings);
int connectionsPerNodePing = TransportSettings.CONNECTIONS_PER_NODE_PING.get(settings);
Builder builder = new Builder();
builder.setConnectTimeout(TransportSettings.CONNECT_TIMEOUT.get(settings));
builder.setHandshakeTimeout(TransportSettings.CONNECT_TIMEOUT.get(settings));
builder.setPingInterval(TransportSettings.PING_SCHEDULE.get(settings));
builder.setCompressionEnabled(TransportSettings.TRANSPORT_COMPRESS.get(settings));
builder.setCompressionScheme(TransportSettings.TRANSPORT_COMPRESSION_SCHEME.get(settings));
builder.addConnections(connectionsPerNodeBulk, TransportRequestOptions.Type.BULK);
builder.addConnections(connectionsPerNodePing, TransportRequestOptions.Type.PING);
// if we are not master eligible we don't need a dedicated channel to publish the state
builder.addConnections(DiscoveryNode.isMasterNode(settings) ? connectionsPerNodeState : 0, TransportRequestOptions.Type.STATE);
// if we are not a data-node we don't need any dedicated channels for recovery
builder.addConnections(
DiscoveryNode.canContainData(settings) ? connectionsPerNodeRecovery : 0,
TransportRequestOptions.Type.RECOVERY
);
builder.addConnections(connectionsPerNodeReg, TransportRequestOptions.Type.REG);
return builder.build();
}
/**
* Builds a connection profile that is dedicated to a single channel type. Allows passing connection and
* handshake timeouts and compression settings.
*/
public static ConnectionProfile buildSingleChannelProfile(
TransportRequestOptions.Type channelType,
@Nullable TimeValue connectTimeout,
@Nullable TimeValue handshakeTimeout,
@Nullable TimeValue pingInterval,
@Nullable Compression.Enabled compressionEnabled,
@Nullable Compression.Scheme compressionScheme
) {
Builder builder = new Builder();
builder.addConnections(1, channelType);
final EnumSet<TransportRequestOptions.Type> otherTypes = EnumSet.allOf(TransportRequestOptions.Type.class);
otherTypes.remove(channelType);
builder.addConnections(0, otherTypes.toArray(new TransportRequestOptions.Type[0]));
if (connectTimeout != null) {
builder.setConnectTimeout(connectTimeout);
}
if (handshakeTimeout != null) {
builder.setHandshakeTimeout(handshakeTimeout);
}
if (pingInterval != null) {
builder.setPingInterval(pingInterval);
}
if (compressionEnabled != null) {
builder.setCompressionEnabled(compressionEnabled);
}
if (compressionScheme != null) {
builder.setCompressionScheme(compressionScheme);
}
return builder.build();
}
private final List<ConnectionTypeHandle> handles;
private final int numConnections;
private final TimeValue connectTimeout;
private final TimeValue handshakeTimeout;
private final TimeValue pingInterval;
private final Compression.Enabled compressionEnabled;
private final Compression.Scheme compressionScheme;
private final String transportProfile;
private ConnectionProfile(
List<ConnectionTypeHandle> handles,
int numConnections,
TimeValue connectTimeout,
TimeValue handshakeTimeout,
TimeValue pingInterval,
Compression.Enabled compressionEnabled,
Compression.Scheme compressionScheme,
String transportProfile
) {
this.handles = handles;
this.numConnections = numConnections;
this.connectTimeout = connectTimeout;
this.handshakeTimeout = handshakeTimeout;
this.pingInterval = pingInterval;
this.compressionEnabled = compressionEnabled;
this.compressionScheme = compressionScheme;
this.transportProfile = Objects.requireNonNull(transportProfile, "transport profile name must not be null");
}
/**
* A builder to build a new {@link ConnectionProfile}
*/
public static | ConnectionProfile |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/lucene/spatial/GeoCentroidCalculatorTests.java | {
"start": 1022,
"end": 1212
} | class ____ split in two, most moving to server, but one test remaining in xpack.spatial because it depends on GeoShapeValues.
* See GeoCentroidCalculatorExtraTests.java for that.
*/
public | was |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/files/StaticResourceDeletionBeforeFirstRequestTest.java | {
"start": 272,
"end": 943
} | class ____ {
public static final String META_INF_RESOURCES_STATIC_RESOURCE_TXT = "META-INF/resources/static-resource.txt";
@RegisterExtension
static final QuarkusDevModeTest test = new QuarkusDevModeTest()
.withApplicationRoot((jar) -> jar
.addAsResource(new StringAsset("static resource content"), META_INF_RESOURCES_STATIC_RESOURCE_TXT));
@Test
public void shouldReturn404HttpStatusCode() {
test.deleteResourceFile(META_INF_RESOURCES_STATIC_RESOURCE_TXT); // delete the resource
RestAssured.when().get("/static-resource.txt").then().statusCode(404);
}
}
| StaticResourceDeletionBeforeFirstRequestTest |
java | spring-projects__spring-boot | buildSrc/src/main/java/org/springframework/boot/build/context/properties/Row.java | {
"start": 799,
"end": 1495
} | class ____ implements Comparable<Row> {
private final Snippet snippet;
private final String id;
protected Row(Snippet snippet, String id) {
this.snippet = snippet;
this.id = id;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
Row other = (Row) obj;
return this.id.equals(other.id);
}
@Override
public int hashCode() {
return this.id.hashCode();
}
@Override
public int compareTo(Row other) {
return this.id.compareTo(other.id);
}
String getAnchor() {
return this.snippet.getAnchor() + "." + this.id;
}
abstract void write(Asciidoc asciidoc);
}
| Row |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEnabledECPolicies.java | {
"start": 2108,
"end": 10151
} | class ____ {
private void expectInvalidPolicy(String value) throws IOException {
HdfsConfiguration conf = new HdfsConfiguration();
conf.set(DFSConfigKeys.DFS_NAMENODE_EC_SYSTEM_DEFAULT_POLICY,
value);
try {
ErasureCodingPolicyManager.getInstance().init(conf);
fail("Expected exception when instantiating ECPolicyManager");
} catch (IOException e) {
GenericTestUtils.assertExceptionContains("is not a valid policy", e);
}
}
private void expectValidPolicy(String value, final int numEnabled) throws
Exception {
HdfsConfiguration conf = new HdfsConfiguration();
ErasureCodingPolicyManager manager =
ErasureCodingPolicyManager.getInstance();
manager.init(conf);
manager.enablePolicy(value);
assertEquals(numEnabled, manager.getEnabledPolicies().length,
"Incorrect number of enabled policies");
}
@Test
public void testDefaultPolicy() throws Exception {
HdfsConfiguration conf = new HdfsConfiguration();
String defaultECPolicies = conf.get(
DFSConfigKeys.DFS_NAMENODE_EC_SYSTEM_DEFAULT_POLICY,
DFSConfigKeys.DFS_NAMENODE_EC_SYSTEM_DEFAULT_POLICY_DEFAULT);
expectValidPolicy(defaultECPolicies, 1);
}
@Test
public void testInvalid() throws Exception {
// Test first with an invalid policy
expectInvalidPolicy("not-a-policy");
// Test with an invalid policy and a valid policy
expectInvalidPolicy("not-a-policy," +
StripedFileTestUtil.getDefaultECPolicy().getName());
// Test with a valid and an invalid policy
expectInvalidPolicy(
StripedFileTestUtil.getDefaultECPolicy().getName() + ", not-a-policy");
// Some more invalid values
expectInvalidPolicy("not-a-policy, ");
expectInvalidPolicy(" ,not-a-policy, ");
}
@Test
public void testValid() throws Exception {
String ecPolicyName = StripedFileTestUtil.getDefaultECPolicy().getName();
expectValidPolicy(ecPolicyName, 1);
}
@Test
public void testGetPolicies() throws Exception {
ErasureCodingPolicy[] enabledPolicies;
// Enable no policies
enabledPolicies = new ErasureCodingPolicy[] {};
testGetPolicies(enabledPolicies);
// Enable one policy
enabledPolicies = new ErasureCodingPolicy[]{
SystemErasureCodingPolicies.getPolicies().get(1)
};
testGetPolicies(enabledPolicies);
// Enable two policies
enabledPolicies = new ErasureCodingPolicy[]{
SystemErasureCodingPolicies.getPolicies().get(1),
SystemErasureCodingPolicies.getPolicies().get(2)
};
testGetPolicies(enabledPolicies);
}
@Test
public void testChangeDefaultPolicy() throws Exception {
final HdfsConfiguration conf = new HdfsConfiguration();
final String testPolicy = "RS-3-2-1024k";
final String defaultPolicy = conf.getTrimmed(
DFSConfigKeys.DFS_NAMENODE_EC_SYSTEM_DEFAULT_POLICY,
DFSConfigKeys.DFS_NAMENODE_EC_SYSTEM_DEFAULT_POLICY_DEFAULT);
assertNotEquals(testPolicy, defaultPolicy,
"The default policy and the next default policy " + "should not be the same!");
ErasureCodingPolicyManager manager =
ErasureCodingPolicyManager.getInstance();
// Change the default policy to a new one
conf.set(
DFSConfigKeys.DFS_NAMENODE_EC_SYSTEM_DEFAULT_POLICY,
testPolicy);
manager.init(conf);
// Load policies similar to when fsimage is loaded at namenode startup
manager.loadPolicies(constructAllDisabledInitialPolicies(), conf);
ErasureCodingPolicyInfo[] getPoliciesResult = manager.getPolicies();
boolean isEnabled = isPolicyEnabled(testPolicy, getPoliciesResult);
assertTrue(isEnabled, "The new default policy should be " + "in enabled state!");
ErasureCodingPolicyInfo[] getPersistedPoliciesResult
= manager.getPersistedPolicies();
isEnabled = isPolicyEnabled(testPolicy, getPersistedPoliciesResult);
assertFalse(isEnabled,
"The new default policy should be " + "in disabled state in the persisted list!");
manager.disablePolicy(testPolicy);
getPoliciesResult = manager.getPolicies();
isEnabled = isPolicyEnabled(testPolicy, getPoliciesResult);
assertFalse(isEnabled, "The new default policy should be " + "in disabled state!");
getPersistedPoliciesResult
= manager.getPersistedPolicies();
isEnabled = isPolicyEnabled(testPolicy, getPersistedPoliciesResult);
assertFalse(isEnabled,
"The new default policy should be " + "in disabled state in the persisted list!");
manager.enablePolicy(testPolicy);
getPoliciesResult = manager.getPolicies();
isEnabled = isPolicyEnabled(testPolicy, getPoliciesResult);
assertTrue(isEnabled, "The new default policy should be " + "in enabled state!");
getPersistedPoliciesResult
= manager.getPersistedPolicies();
isEnabled = isPolicyEnabled(testPolicy, getPersistedPoliciesResult);
assertTrue(isEnabled,
"The new default policy should be " + "in enabled state in the persisted list!");
final String emptyPolicy = "";
// Change the default policy to a empty
conf.set(
DFSConfigKeys.DFS_NAMENODE_EC_SYSTEM_DEFAULT_POLICY, emptyPolicy);
manager.init(conf);
// Load policies similar to when fsimage is loaded at namenode startup
manager.loadPolicies(constructAllDisabledInitialPolicies(), conf);
// All the policies are disabled if the default policy is empty
getPoliciesResult = manager.getPolicies();
assertAllPoliciesAreDisabled(getPoliciesResult);
}
private void testGetPolicies(ErasureCodingPolicy[] enabledPolicies)
throws Exception {
HdfsConfiguration conf = new HdfsConfiguration();
ErasureCodingPolicyManager manager =
ErasureCodingPolicyManager.getInstance();
manager.init(conf);
for (ErasureCodingPolicy p : enabledPolicies) {
manager.enablePolicy(p.getName());
}
// Check that returned values are unique
Set<String> found = new HashSet<>();
for (ErasureCodingPolicy p : manager.getEnabledPolicies()) {
assertFalse(found.contains(p.getName()),
"Duplicate policy name found: " + p.getName());
found.add(p.getName());
}
// Check that the policies specified in conf are found
for (ErasureCodingPolicy p: enabledPolicies) {
assertTrue(found.contains(p.getName()),
"Did not find specified EC policy " + p.getName());
}
assertEquals(enabledPolicies.length, found.size() - 1);
// Check that getEnabledPolicyByName only returns enabled policies
for (ErasureCodingPolicy p: SystemErasureCodingPolicies.getPolicies()) {
if (found.contains(p.getName())) {
// Enabled policy should be present
assertNotNull(
manager.getEnabledPolicyByName(p.getName()),
"getEnabledPolicyByName did not find enabled policy" + p.getName());
} else {
// Disabled policy should not be present
assertNull(
manager.getEnabledPolicyByName(p.getName()),
"getEnabledPolicyByName found disabled policy " + p.getName());
}
}
}
private List<ErasureCodingPolicyInfo> constructAllDisabledInitialPolicies() {
List<ErasureCodingPolicyInfo> policies = new ArrayList<>();
for (ErasureCodingPolicy p: SystemErasureCodingPolicies.getPolicies()) {
policies.add(new ErasureCodingPolicyInfo(p,
ErasureCodingPolicyState.DISABLED));
}
return policies;
}
private boolean isPolicyEnabled(String testPolicy,
ErasureCodingPolicyInfo[] policies) {
for (ErasureCodingPolicyInfo p : policies) {
if (testPolicy.equals(p.getPolicy().getName())) {
return p.isEnabled();
}
}
fail("The result should contain the test policy!");
return false;
}
private void assertAllPoliciesAreDisabled(
ErasureCodingPolicyInfo[] policies) {
for (ErasureCodingPolicyInfo p : policies) {
assertTrue(p.isDisabled(), "Policy should be disabled");
}
}
}
| TestEnabledECPolicies |
java | spring-projects__spring-boot | module/spring-boot-actuator-autoconfigure/src/main/java/org/springframework/boot/actuate/autoconfigure/endpoint/condition/EndpointExposureOutcomeContributor.java | {
"start": 1629,
"end": 2228
} | interface ____ {
/**
* Return if the given endpoint is exposed for the given set of exposure technologies.
* @param endpointId the endpoint ID
* @param exposures the exposure technologies to check
* @param message the condition message builder
* @return a {@link ConditionOutcome#isMatch() matching} {@link ConditionOutcome} if
* the endpoint is exposed or {@code null} if the contributor should not apply
*/
@Nullable ConditionOutcome getExposureOutcome(EndpointId endpointId, Set<EndpointExposure> exposures,
ConditionMessage.Builder message);
}
| EndpointExposureOutcomeContributor |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/EnhancementOptions.java | {
"start": 1203,
"end": 1729
} | class ____ check.
*
* @return {@code true} indicates that dirty checking should be in-lined within the entity; {@code false}
* indicates it should not. In-lined is more easily serializable and probably more performant.
*
* @deprecated Use {@linkplain #doDirtyCheckingInline()} instead.
*/
@Deprecated(forRemoval = true)
boolean doDirtyCheckingInline(UnloadedClass classDescriptor);
/**
* Should we enhance field access to entities from this class?
*
* @param classDescriptor The descriptor of the | to |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/presto/parser/PrestoStatementParser.java | {
"start": 3116,
"end": 19435
} | class ____ extends SQLStatementParser {
{
dbType = DbType.presto;
}
public PrestoStatementParser(String sql) {
super(new PrestoExprParser(sql));
}
public PrestoStatementParser(String sql, SQLParserFeature... features) {
super(new PrestoExprParser(sql, features));
}
public PrestoStatementParser(SQLExprParser exprParser) {
super(exprParser);
}
public PrestoStatementParser(Lexer lexer) {
super(new PrestoExprParser(lexer));
}
@Override
public PrestoSelectParser createSQLSelectParser() {
return new PrestoSelectParser(this.exprParser, selectListCache);
}
@Override
public PGSelectStatement parseSelect() {
PrestoSelectParser selectParser = createSQLSelectParser();
SQLSelect select = selectParser.select();
return new PGSelectStatement(select);
}
@Override
protected void parseInsertColumns(SQLInsertInto insert) {
if (lexer.token() == Token.RPAREN) {
return;
}
for (; ; ) {
SQLName expr = this.exprParser.name();
expr.setParent(insert);
insert.getColumns().add(expr);
if (lexer.token() == Token.IDENTIFIER) {
String text = lexer.stringVal();
if (text.equalsIgnoreCase("TINYINT")
|| text.equalsIgnoreCase("BIGINT")
|| text.equalsIgnoreCase("INTEGER")
|| text.equalsIgnoreCase("DOUBLE")
|| text.equalsIgnoreCase("DATE")
|| text.equalsIgnoreCase("VARCHAR")) {
expr.getAttributes().put("dataType", text);
lexer.nextToken();
} else if (text.equalsIgnoreCase("CHAR")) {
String dataType = text;
lexer.nextToken();
accept(Token.LPAREN);
SQLExpr char_len = this.exprParser.primary();
accept(Token.RPAREN);
dataType += ("(" + char_len.toString() + ")");
expr.getAttributes().put("dataType", dataType);
}
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
}
@Override
public SQLCreateTableParser getSQLCreateTableParser() {
return new PrestoCreateTableParser(this.exprParser);
}
protected SQLStatement parseAlterFunction() {
accept(Token.ALTER);
accept(Token.FUNCTION);
PrestoAlterFunctionStatement stmt = new PrestoAlterFunctionStatement();
stmt.setDbType(dbType);
SQLName name = this.exprParser.name();
/*
* 因支持写函数参数项,名称处理
* ALTER FUNCTION qualified_function_name [ ( parameter_type[, ...] ) ]
* RETURNS NULL ON NULL INPUT | CALLED ON NULL INPUT
*/
if (lexer.token() == Token.LPAREN) {
StringBuilder needAppendName = new StringBuilder();
needAppendName.append("(");
for (; ; ) {
lexer.nextToken();
needAppendName.append(lexer.stringVal());
lexer.nextToken();
if (lexer.token() == Token.RPAREN) {
break;
}
// 处理fn(a, )
if (lexer.token() == Token.COMMA) {
needAppendName.append(",");
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
if (lexer.token() == Token.RPAREN) {
setErrorEndPos(lexer.pos());
throw new ParserException("syntax error, actual " + lexer.token() + ", " + lexer.info());
}
lexer.reset(mark);
}
}
accept(Token.RPAREN);
needAppendName.append(")");
if (needAppendName.length() > 0) {
if (name instanceof SQLPropertyExpr) {
SQLPropertyExpr sqlPropertyExpr = (SQLPropertyExpr) name;
sqlPropertyExpr.setName(sqlPropertyExpr.getName() + needAppendName);
} else if (name instanceof SQLIdentifierExpr) {
SQLIdentifierExpr sqlIdentifierExpr = (SQLIdentifierExpr) name;
sqlIdentifierExpr.setName(sqlIdentifierExpr.getName() + needAppendName);
}
}
}
stmt.setName(name);
if (lexer.identifierEquals("CALLED")) {
lexer.nextToken();
stmt.setCalledOnNullInput(true);
} else if (lexer.identifierEquals("RETURNS")) {
lexer.nextToken();
acceptIdentifier("NULL");
stmt.setCalledOnNullInput(true);
} else {
setErrorEndPos(lexer.pos());
throw new ParserException("syntax error, actual " + lexer.token() + ", " + lexer.info());
}
accept(Token.ON);
accept(Token.NULL);
acceptIdentifier("INPUT");
return stmt;
}
@Override
protected SQLStatement alterSchema() {
accept(Token.ALTER);
accept(Token.SCHEMA);
PrestoAlterSchemaStatement stmt = new PrestoAlterSchemaStatement();
stmt.setDbType(dbType);
SQLName name = this.exprParser.name();
stmt.setSchemaName(name);
acceptIdentifier("RENAME");
accept(Token.TO);
stmt.setNewName(this.exprParser.identifier());
return stmt;
}
@Override
public SQLStatement parseInsert() {
if (lexer.token() == Token.FROM) {
lexer.nextToken();
HiveMultiInsertStatement stmt = new HiveMultiInsertStatement();
if (lexer.token() == Token.IDENTIFIER) {
SQLName tableName = this.exprParser.name();
SQLExprTableSource from = new SQLExprTableSource(tableName);
SQLTableSource tableSource = createSQLSelectParser().parseTableSourceRest(from);
stmt.setFrom(tableSource);
if (lexer.token() == Token.IDENTIFIER) {
from.setAlias(lexer.stringVal());
lexer.nextToken();
}
} else {
accept(Token.LPAREN);
SQLSelectParser selectParser = createSQLSelectParser();
SQLSelect select = selectParser.select();
accept(Token.RPAREN);
String alias = lexer.stringVal();
accept(Token.IDENTIFIER);
SQLTableSource from = new SQLSubqueryTableSource(select, alias);
switch (lexer.token()) {
case LEFT:
case RIGHT:
case FULL:
case JOIN:
from = selectParser.parseTableSourceRest(from);
break;
default:
break;
}
stmt.setFrom(from);
}
for (; ; ) {
HiveInsert insert = parseHiveInsert();
stmt.addItem(insert);
if (lexer.token() != Token.INSERT) {
break;
}
}
return stmt;
}
return parseHiveInsertStmt();
}
@Override
public boolean parseStatementListDialect(List<SQLStatement> statementList) {
if (lexer.identifierEquals("PREPARE")) {
PrestoPrepareStatement stmt = parsePrepare();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals("EXECUTE")) {
acceptIdentifier("EXECUTE");
if (lexer.identifierEquals("IMMEDIATE")) {
acceptIdentifier("IMMEDIATE");
}
PrestoExecuteStatement stmt = parseExecute();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals("DEALLOCATE")) {
MysqlDeallocatePrepareStatement stmt = parseDeallocatePrepare();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals(FnvHash.Constants.SHOW)) {
Lexer.SavePoint savePoint = this.lexer.mark();
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.VIEWS)) {
lexer.nextToken();
SQLShowViewsStatement stmt = new SQLShowViewsStatement();
if (lexer.token() == Token.IN) {
lexer.nextToken();
SQLName db = this.exprParser.name();
stmt.setDatabase(db);
}
if (lexer.token() == Token.LIKE) {
lexer.nextToken();
SQLExpr pattern = this.exprParser.expr();
stmt.setLike(pattern);
}
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals(FnvHash.Constants.TABLES)) {
lexer.reset(savePoint);
SQLStatement stmt = this.parseShowTables();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals(FnvHash.Constants.DATABASES)
|| lexer.identifierEquals(Constants.SCHEMAS)) {
lexer.nextToken();
SQLShowDatabasesStatement stmt = parseShowDatabases(false);
statementList.add(stmt);
return true;
}
if (lexer.token() == Token.INDEX) {
lexer.nextToken();
SQLShowIndexesStatement stmt = new SQLShowIndexesStatement();
stmt.setType("INDEX");
if (lexer.token() == Token.ON) {
lexer.nextToken();
SQLName table = exprParser.name();
stmt.setTable(table);
}
if (lexer.token() == Token.HINT) {
stmt.setHints(this.exprParser.parseHints());
}
statementList.add(stmt);
return true;
}
if (lexer.token() == Token.CREATE) {
Lexer.SavePoint savePointCreateTable = this.lexer.mark();
lexer.nextToken();
if (lexer.token() == Token.TABLE) {
lexer.reset(savePointCreateTable);
SQLShowCreateTableStatement stmt = parseShowCreateTable();
statementList.add(stmt);
return true;
}
if (lexer.token() == Token.VIEW) {
lexer.nextToken();
SQLShowCreateViewStatement stmt = new SQLShowCreateViewStatement();
SQLName view = exprParser.name();
stmt.setName(view);
statementList.add(stmt);
return true;
}
}
if (lexer.identifierEquals(FnvHash.Constants.PARTITIONS)) {
lexer.nextToken();
SQLShowPartitionsStmt stmt = new SQLShowPartitionsStmt();
if (lexer.token() == Token.FROM) {
lexer.nextToken();
}
SQLExpr expr = this.exprParser.expr();
stmt.setTableSource(new SQLExprTableSource(expr));
if (lexer.token() == Token.PARTITION) {
lexer.nextToken();
accept(Token.LPAREN);
parseAssignItems(stmt.getPartition(), stmt, false);
accept(Token.RPAREN);
}
if (lexer.token() == Token.WHERE) {
lexer.nextToken();
stmt.setWhere(
this.exprParser.expr()
);
}
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals(FnvHash.Constants.COLUMNS)) {
lexer.nextToken();
SQLShowColumnsStatement stmt = new SQLShowColumnsStatement();
if (lexer.token() == Token.FROM || lexer.token() == Token.IN) {
lexer.nextToken();
SQLName table = exprParser.name();
if (lexer.token() == Token.SUB && table instanceof SQLIdentifierExpr) {
lexer.mark();
lexer.nextToken();
String strVal = lexer.stringVal();
lexer.nextToken();
if (table instanceof SQLIdentifierExpr) {
SQLIdentifierExpr ident = (SQLIdentifierExpr) table;
table = new SQLIdentifierExpr(ident.getName() + "-" + strVal);
}
}
stmt.setTable(table);
}
if (lexer.token() == Token.LIKE) {
lexer.nextToken();
SQLExpr like = exprParser.expr();
stmt.setLike(like);
}
if (lexer.token() == Token.WHERE) {
lexer.nextToken();
SQLExpr where = exprParser.expr();
stmt.setWhere(where);
}
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals(FnvHash.Constants.FUNCTIONS)) {
lexer.nextToken();
SQLShowFunctionsStatement stmt = new SQLShowFunctionsStatement();
if (lexer.token() == Token.LIKE) {
lexer.nextToken();
SQLExpr like = this.exprParser.expr();
stmt.setLike(like);
}
statementList.add(stmt);
return true;
}
throw new ParserException("TODO " + lexer.info());
}
return false;
}
public PrestoPrepareStatement parsePrepare() {
acceptIdentifier("PREPARE");
SQLName name = exprParser.name();
accept(Token.FROM);
PrestoPrepareStatement stmt = new PrestoPrepareStatement(name);
if (lexer.token() == Token.SELECT) {
SQLSelect select = createSQLSelectParser().select();
stmt.setSelect(select);
} else if (lexer.token() == Token.INSERT) {
SQLStatement sqlStatement = parseInsert();
stmt.setInsert((HiveInsertStatement) sqlStatement);
}
return stmt;
}
public PrestoExecuteStatement parseExecute() {
PrestoExecuteStatement stmt = new PrestoExecuteStatement();
SQLName statementName = exprParser.name();
stmt.setStatementName(statementName);
if (lexer.identifierEquals("USING")) {
lexer.nextToken();
exprParser.exprList(stmt.getParameters(), stmt);
} else if (lexer.token() == Token.IDENTIFIER) {
exprParser.exprList(stmt.getParameters(), stmt);
}
return stmt;
}
public MysqlDeallocatePrepareStatement parseDeallocatePrepare() {
acceptIdentifier("DEALLOCATE");
acceptIdentifier("PREPARE");
MysqlDeallocatePrepareStatement stmt = new MysqlDeallocatePrepareStatement();
SQLName statementName = exprParser.name();
stmt.setStatementName(statementName);
return stmt;
}
@Override
public void parseCreateTableSupportSchema() {
if (lexer.token() == Token.SCHEMA) {
lexer.nextToken();
} else {
accept(Token.DATABASE);
}
}
@Override
public void parseExplainFormatType(SQLExplainStatement explain) {
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
if (lexer.identifierEquals("FORMAT")) {
lexer.nextToken();
lexer.nextToken();
} else if (lexer.identifierEquals("TYPE")) {
lexer.nextToken();
lexer.nextToken();
}
accept(Token.RPAREN);
}
}
}
| PrestoStatementParser |
java | spring-projects__spring-framework | spring-jdbc/src/test/java/org/springframework/jdbc/core/simple/SimpleJdbcInsertIntegrationTests.java | {
"start": 3753,
"end": 5427
} | class ____ extends AbstractSimpleJdbcInsertIntegrationTests {
@Test
void retrieveColumnNamesFromMetadata() {
SimpleJdbcInsert insert = new SimpleJdbcInsert(embeddedDatabase)
.withTableName("Order")
.usingGeneratedKeyColumns("id");
insert.compile();
// Since we are not quoting identifiers, the column names lookup for the "Order"
// table fails to find anything, and insert types are not populated.
assertThat(insert.getInsertTypes()).isEmpty();
// Consequently, any subsequent attempt to execute the INSERT statement should fail.
assertThatExceptionOfType(BadSqlGrammarException.class)
.isThrownBy(() -> insert.executeAndReturnKey(Map.of("from", "start", "date", "1999")));
}
@Test // gh-24013
void usingColumnsAndQuotedIdentifiers() {
SimpleJdbcInsert insert = new SimpleJdbcInsert(embeddedDatabase)
.withoutTableColumnMetaDataAccess()
.withTableName("Order")
.usingColumns("from", "Date")
.usingGeneratedKeyColumns("id")
.usingQuotedIdentifiers();
insert.compile();
assertThat(insert.getInsertString()).isEqualToIgnoringNewLines("""
INSERT INTO "Order" ("from", "Date") VALUES(?, ?)
""");
insertOrderEntry(insert);
}
@Override
protected ResourceLoader getResourceLoader() {
return new ClassRelativeResourceLoader(getClass());
}
@Override
protected String getSchemaScript() {
return "order-schema.sql";
}
@Override
protected String getDataScript() {
return "order-data.sql";
}
@Override
protected String getTableName() {
return "\"Order\"";
}
}
}
@Nested
| QuotedIdentifiersInSchemaTests |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/oracle/visitor/OracleASTVisitor.java | {
"start": 1438,
"end": 16370
} | interface ____ extends SQLASTVisitor {
default void endVisit(OracleAnalytic x) {
}
default void endVisit(OracleAnalyticWindowing x) {
}
default void endVisit(OracleDeleteStatement x) {
endVisit((SQLDeleteStatement) x);
}
default void endVisit(OracleIntervalExpr x) {
}
default void endVisit(OracleOuterExpr x) {
}
default void endVisit(OracleSelectJoin x) {
}
default void endVisit(OracleSelectRestriction.CheckOption x) {
}
default void endVisit(OracleSelectRestriction.ReadOnly x) {
}
default void endVisit(OracleSelectSubqueryTableSource x) {
}
default void endVisit(OracleUpdateStatement x) {
}
default boolean visit(OracleAnalytic x) {
return true;
}
default boolean visit(OracleAnalyticWindowing x) {
return true;
}
default boolean visit(OracleDeleteStatement x) {
return visit((SQLDeleteStatement) x);
}
default boolean visit(OracleIntervalExpr x) {
return true;
}
default boolean visit(OracleOuterExpr x) {
return true;
}
default boolean visit(OracleSelectJoin x) {
return true;
}
default boolean visit(OracleSelectRestriction.CheckOption x) {
return true;
}
default boolean visit(OracleSelectRestriction.ReadOnly x) {
return true;
}
default boolean visit(OracleSelectSubqueryTableSource x) {
return true;
}
default boolean visit(OracleUpdateStatement x) {
return visit((SQLUpdateStatement) x);
}
default boolean visit(SampleClause x) {
return true;
}
default void endVisit(SampleClause x) {
}
default boolean visit(OracleSelectTableReference x) {
return true;
}
default void endVisit(OracleSelectTableReference x) {
}
default boolean visit(PartitionExtensionClause x) {
return true;
}
default void endVisit(PartitionExtensionClause x) {
}
default boolean visit(OracleWithSubqueryEntry x) {
return true;
}
default void endVisit(OracleWithSubqueryEntry x) {
}
default boolean visit(SearchClause x) {
return true;
}
default void endVisit(SearchClause x) {
}
default boolean visit(CycleClause x) {
return true;
}
default void endVisit(CycleClause x) {
}
default boolean visit(OracleBinaryFloatExpr x) {
return true;
}
default void endVisit(OracleBinaryFloatExpr x) {
}
default boolean visit(OracleBinaryDoubleExpr x) {
return true;
}
default void endVisit(OracleBinaryDoubleExpr x) {
}
default boolean visit(OracleCursorExpr x) {
return true;
}
default void endVisit(OracleCursorExpr x) {
}
default boolean visit(OracleIsSetExpr x) {
return true;
}
default void endVisit(OracleIsSetExpr x) {
}
default boolean visit(ModelClause.ReturnRowsClause x) {
return true;
}
default void endVisit(ModelClause.ReturnRowsClause x) {
}
default boolean visit(ModelClause.MainModelClause x) {
return true;
}
default void endVisit(ModelClause.MainModelClause x) {
}
default boolean visit(ModelClause.ModelColumnClause x) {
return true;
}
default void endVisit(ModelClause.ModelColumnClause x) {
}
default boolean visit(ModelClause.QueryPartitionClause x) {
return true;
}
default void endVisit(ModelClause.QueryPartitionClause x) {
}
default boolean visit(ModelClause.ModelColumn x) {
return true;
}
default void endVisit(ModelClause.ModelColumn x) {
}
default boolean visit(ModelClause.ModelRulesClause x) {
return true;
}
default void endVisit(ModelClause.ModelRulesClause x) {
}
default boolean visit(ModelClause.CellAssignmentItem x) {
return true;
}
default void endVisit(ModelClause.CellAssignmentItem x) {
}
default boolean visit(ModelClause.CellAssignment x) {
return true;
}
default void endVisit(ModelClause.CellAssignment x) {
}
default boolean visit(ModelClause x) {
return true;
}
default void endVisit(ModelClause x) {
}
default boolean visit(OracleReturningClause x) {
return true;
}
default void endVisit(OracleReturningClause x) {
}
default boolean visit(OracleInsertStatement x) {
return visit((SQLInsertStatement) x);
}
default void endVisit(OracleInsertStatement x) {
endVisit((SQLInsertStatement) x);
}
default boolean visit(InsertIntoClause x) {
return true;
}
default void endVisit(InsertIntoClause x) {
}
default boolean visit(OracleMultiInsertStatement x) {
return true;
}
default void endVisit(OracleMultiInsertStatement x) {
}
default boolean visit(ConditionalInsertClause x) {
return true;
}
default void endVisit(ConditionalInsertClause x) {
}
default boolean visit(ConditionalInsertClauseItem x) {
return true;
}
default void endVisit(ConditionalInsertClauseItem x) {
}
default boolean visit(OracleSelectQueryBlock x) {
return visit((SQLSelectQueryBlock) x);
}
default void endVisit(OracleSelectQueryBlock x) {
endVisit((SQLSelectQueryBlock) x);
}
default boolean visit(OracleLockTableStatement x) {
return true;
}
default void endVisit(OracleLockTableStatement x) {
}
default boolean visit(OracleAlterSessionStatement x) {
return true;
}
default void endVisit(OracleAlterSessionStatement x) {
}
default boolean visit(OracleDatetimeExpr x) {
return true;
}
default void endVisit(OracleDatetimeExpr x) {
}
default boolean visit(OracleSysdateExpr x) {
return true;
}
default void endVisit(OracleSysdateExpr x) {
}
default boolean visit(OracleArgumentExpr x) {
return true;
}
default void endVisit(OracleArgumentExpr x) {
}
default boolean visit(OracleSetTransactionStatement x) {
return true;
}
default void endVisit(OracleSetTransactionStatement x) {
}
default boolean visit(OracleExplainStatement x) {
return true;
}
default void endVisit(OracleExplainStatement x) {
}
default boolean visit(OracleAlterTableDropPartition x) {
return true;
}
default void endVisit(OracleAlterTableDropPartition x) {
}
default boolean visit(OracleAlterTableTruncatePartition x) {
return true;
}
default void endVisit(OracleAlterTableTruncatePartition x) {
}
default boolean visit(OracleAlterTableSplitPartition.TableSpaceItem x) {
return true;
}
default void endVisit(OracleAlterTableSplitPartition.TableSpaceItem x) {
}
default boolean visit(OracleAlterTableSplitPartition.UpdateIndexesClause x) {
return true;
}
default void endVisit(OracleAlterTableSplitPartition.UpdateIndexesClause x) {
}
default boolean visit(OracleAlterTableSplitPartition.NestedTablePartitionSpec x) {
return true;
}
default void endVisit(OracleAlterTableSplitPartition.NestedTablePartitionSpec x) {
}
default boolean visit(OracleAlterTableSplitPartition x) {
return true;
}
default void endVisit(OracleAlterTableSplitPartition x) {
}
default boolean visit(OracleAlterTableModify x) {
return true;
}
default void endVisit(OracleAlterTableModify x) {
}
default boolean visit(OracleCreateIndexStatement x) {
return visit((SQLCreateIndexStatement) x);
}
default void endVisit(OracleCreateIndexStatement x) {
endVisit((SQLCreateIndexStatement) x);
}
default boolean visit(OracleForStatement x) {
return true;
}
default void endVisit(OracleForStatement x) {
}
default boolean visit(OracleRangeExpr x) {
return true;
}
default void endVisit(OracleRangeExpr x) {
}
default boolean visit(OraclePrimaryKey x) {
return true;
}
default void endVisit(OraclePrimaryKey x) {
}
default boolean visit(OracleCreateTableStatement x) {
return visit((SQLCreateTableStatement) x);
}
default void endVisit(OracleCreateTableStatement x) {
endVisit((SQLCreateTableStatement) x);
}
default boolean visit(OracleStorageClause x) {
return true;
}
default void endVisit(OracleStorageClause x) {
}
default boolean visit(OracleGotoStatement x) {
return true;
}
default void endVisit(OracleGotoStatement x) {
}
default boolean visit(OracleLabelStatement x) {
return true;
}
default void endVisit(OracleLabelStatement x) {
}
default boolean visit(OracleAlterTriggerStatement x) {
return true;
}
default void endVisit(OracleAlterTriggerStatement x) {
}
default boolean visit(OracleAlterSynonymStatement x) {
return true;
}
default void endVisit(OracleAlterSynonymStatement x) {
}
default boolean visit(OracleAlterViewStatement x) {
return true;
}
default void endVisit(OracleAlterViewStatement x) {
}
default boolean visit(OracleAlterTableMoveTablespace x) {
return true;
}
default void endVisit(OracleAlterTableMoveTablespace x) {
}
default boolean visit(OracleAlterTableRowMovement x) {
return true;
}
default void endVisit(OracleAlterTableRowMovement x) {
}
default boolean visit(OracleAlterTableShrinkSpace x) {
return true;
}
default void endVisit(OracleAlterTableShrinkSpace x) {
}
default boolean visit(OracleAlterSummaryStatement x) {
return true;
}
default void endVisit(OracleAlterSummaryStatement x) {
}
default boolean visit(OracleFileSpecification x) {
return true;
}
default void endVisit(OracleFileSpecification x) {
}
default boolean visit(OracleAlterTablespaceAddDataFile x) {
return true;
}
default void endVisit(OracleAlterTablespaceAddDataFile x) {
}
default boolean visit(OracleAlterTablespaceStatement x) {
return true;
}
default void endVisit(OracleAlterTablespaceStatement x) {
}
default boolean visit(OracleExitStatement x) {
return true;
}
default void endVisit(OracleExitStatement x) {
}
default boolean visit(OracleContinueStatement x) {
return true;
}
default void endVisit(OracleContinueStatement x) {
}
default boolean visit(OracleRaiseStatement x) {
return true;
}
default void endVisit(OracleRaiseStatement x) {
}
default boolean visit(OracleCreateDatabaseDbLinkStatement x) {
return true;
}
default void endVisit(OracleCreateDatabaseDbLinkStatement x) {
}
default boolean visit(OracleDropDbLinkStatement x) {
return true;
}
default void endVisit(OracleDropDbLinkStatement x) {
}
default boolean visit(OracleDataTypeIntervalYear x) {
return true;
}
default void endVisit(OracleDataTypeIntervalYear x) {
}
default boolean visit(OracleDataTypeIntervalDay x) {
return true;
}
default void endVisit(OracleDataTypeIntervalDay x) {
}
default boolean visit(OracleUsingIndexClause x) {
return true;
}
default void endVisit(OracleUsingIndexClause x) {
}
default boolean visit(OracleLobStorageClause x) {
return true;
}
default void endVisit(OracleLobStorageClause x) {
}
default boolean visit(OracleUnique x) {
return visit((SQLUnique) x);
}
default void endVisit(OracleUnique x) {
endVisit((SQLUnique) x);
}
default boolean visit(OracleForeignKey x) {
return visit((SQLForeignKeyImpl) x);
}
default void endVisit(OracleForeignKey x) {
endVisit((SQLForeignKeyImpl) x);
}
default boolean visit(OracleCheck x) {
return visit((SQLCheck) x);
}
default void endVisit(OracleCheck x) {
endVisit((SQLCheck) x);
}
default boolean visit(OracleSupplementalIdKey x) {
return true;
}
default void endVisit(OracleSupplementalIdKey x) {
}
default boolean visit(OracleSupplementalLogGrp x) {
return true;
}
default void endVisit(OracleSupplementalLogGrp x) {
}
default boolean visit(OracleCreateTableStatement.Organization x) {
return true;
}
default void endVisit(OracleCreateTableStatement.Organization x) {
}
default boolean visit(OracleCreateTableStatement.OIDIndex x) {
return true;
}
default void endVisit(OracleCreateTableStatement.OIDIndex x) {
}
default boolean visit(OracleCreatePackageStatement x) {
return true;
}
default void endVisit(OracleCreatePackageStatement x) {
}
default boolean visit(OracleExecuteImmediateStatement x) {
return true;
}
default void endVisit(OracleExecuteImmediateStatement x) {
}
default boolean visit(OracleTreatExpr x) {
return true;
}
default void endVisit(OracleTreatExpr x) {
}
default boolean visit(OracleCreateSynonymStatement x) {
return true;
}
default void endVisit(OracleCreateSynonymStatement x) {
}
default boolean visit(OracleCreateTypeStatement x) {
return true;
}
default void endVisit(OracleCreateTypeStatement x) {
}
default boolean visit(OraclePipeRowStatement x) {
return true;
}
default void endVisit(OraclePipeRowStatement x) {
}
default boolean visit(OracleIsOfTypeExpr x) {
return true;
}
default void endVisit(OracleIsOfTypeExpr x) {
}
default boolean visit(OracleRunStatement x) {
return true;
}
default void endVisit(OracleRunStatement x) {
}
default boolean visit(OracleXmlColumnProperties x) {
return true;
}
default void endVisit(OracleXmlColumnProperties x) {
}
default boolean visit(OracleXmlColumnProperties.OracleXMLTypeStorage x) {
return true;
}
default void endVisit(OracleXmlColumnProperties.OracleXMLTypeStorage x) {
}
default boolean visit(OracleAlterPackageStatement x) {
return true;
}
default void endVisit(OracleAlterPackageStatement x) {
}
default boolean visit(OracleDropPackageStatement x) {
return true;
}
default void endVisit(OracleDropPackageStatement x) {
}
default boolean visit(OracleCreateTableSpaceStatement x) {
return true;
}
default void endVisit(OracleCreateTableSpaceStatement x) {
}
}
| OracleASTVisitor |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/ParameterBinding.java | {
"start": 9880,
"end": 10788
} | class ____ extends ParameterBinding {
/**
* Creates a new {@link InParameterBinding} for the parameter with the given name.
*/
InParameterBinding(BindingIdentifier identifier, ParameterOrigin origin) {
super(identifier, origin);
}
@Override
public @Nullable Object prepare(@Nullable Object value) {
if (!ObjectUtils.isArray(value)) {
return value;
}
int length = Array.getLength(value);
Collection<Object> result = new ArrayList<>(length);
for (int i = 0; i < length; i++) {
result.add(Array.get(value, i));
}
return result;
}
}
/**
* Represents a parameter binding in a JPQL query augmented with instructions of how to apply a parameter as LIKE
* parameter. This allows expressions like {@code …like %?1} in the JPQL query, which is not allowed by plain JPA.
*
* @author Oliver Gierke
* @author Thomas Darimont
*/
static | InParameterBinding |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/spi/ProcedureParameterMetadataImplementor.java | {
"start": 226,
"end": 392
} | interface ____ extends ParameterMetadataImplementor {
List<? extends ProcedureParameterImplementor<?>> getRegistrationsAsList();
}
| ProcedureParameterMetadataImplementor |
java | google__dagger | javatests/dagger/functional/membersinject/MembersWithInstanceNameTest.java | {
"start": 1179,
"end": 1565
} | class ____ {
// Checks that member injection fields can use injecting a bound instance that was
// named "instance" when bound. Note that the field name here doesn't matter as of
// this writing, but name it "instance" anyway in case that changes.
// https://github.com/google/dagger/issues/4352
@Inject BoundInstance instance;
@Inject Bar() {}
}
@Module
| Bar |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/util/MockStreamingRuntimeContext.java | {
"start": 3031,
"end": 3782
} | class ____ extends AbstractStreamOperator<Integer> {
private static final long serialVersionUID = -1153976702711944427L;
private transient TestProcessingTimeService testProcessingTimeService;
@Override
public ExecutionConfig getExecutionConfig() {
return new ExecutionConfig();
}
@Override
public OperatorID getOperatorID() {
return new OperatorID();
}
@Override
public ProcessingTimeService getProcessingTimeService() {
if (testProcessingTimeService == null) {
testProcessingTimeService = new TestProcessingTimeService();
}
return testProcessingTimeService;
}
}
}
| MockStreamOperator |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java | {
"start": 1875,
"end": 2006
} | class ____ extends AcknowledgedRequest<Request> implements ToXContentObject {
// Note that Request should be the Value | Request |
java | apache__camel | components/camel-clickup/src/test/java/org/apache/camel/component/clickup/util/ClickUpMockRoutes.java | {
"start": 6267,
"end": 6348
} | interface ____ {
String provide();
}
}
| MockProcessorResponseBodyProvider |
java | apache__camel | components/camel-http-common/src/test/java/org/apache/camel/http/common/HttpHelperTest.java | {
"start": 952,
"end": 1340
} | class ____ {
@Test
public void testSanitizeLog() {
String values[] = { "This is ok", "Bad stuff \nhere\n", "Another bad \rthing here" };
String expectedValues[] = { "This is ok", "Bad stuff _here_", "Another bad _thing here" };
String sanitized[] = HttpHelper.sanitizeLog(values);
assertArrayEquals(expectedValues, sanitized);
}
}
| HttpHelperTest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/multipart/FormDataWithAllUploads.java | {
"start": 278,
"end": 1059
} | class ____ extends FormDataBase {
@RestForm
// don't set a part type, use the default
private String name;
@RestForm
@PartType(MediaType.TEXT_PLAIN)
private Status status;
@RestForm
private String stringWithFilename;
@RestForm(FileUpload.ALL)
private List<FileUpload> uploads;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
this.status = status;
}
public List<FileUpload> getUploads() {
return uploads;
}
public void setUploads(List<FileUpload> uploads) {
this.uploads = uploads;
}
}
| FormDataWithAllUploads |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/pool/TimeBetweenLogStatsMillisTest3.java | {
"start": 257,
"end": 1282
} | class ____ extends TestCase {
private DruidDataSource dataSource;
protected void setUp() throws Exception {
System.setProperty("druid.timeBetweenLogStatsMillis", "10");
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setFilters("stat");
}
protected void tearDown() throws Exception {
JdbcUtils.close(dataSource);
System.clearProperty("druid.timeBetweenLogStatsMillis");
}
public void test_0() throws Exception {
dataSource.init();
for (int i = 0; i < 10; ++i) {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setString(1, "aaa");
ResultSet rs = stmt.executeQuery();
rs.close();
stmt.close();
conn.close();
Thread.sleep(10);
}
assertEquals(10, dataSource.getTimeBetweenLogStatsMillis());
}
}
| TimeBetweenLogStatsMillisTest3 |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/security/AbstractRolesAllowedTestCase.java | {
"start": 277,
"end": 6520
} | class ____ {
@BeforeAll
public static void setup() {
TestIdentityController.resetRoles().add("test", "test", "test");
}
@Test
public void testRolesAllowed() {
RestAssured
.given()
.when()
.get("/roles1")
.then()
.assertThat()
.statusCode(401);
RestAssured
.given()
.auth()
.basic("test", "test")
.when()
.get("/roles1")
.then()
.assertThat()
.statusCode(200)
.body(equalTo("test:/roles1"));
RestAssured
.given()
.auth()
.preemptive()
.basic("test", "test")
.when()
.get("/roles1")
.then()
.assertThat()
.statusCode(200)
.body(equalTo("test:/roles1"));
}
@Test
public void testRolesAllowedWrongRoles() {
RestAssured
.given()
.when()
.get("/roles2")
.then()
.assertThat()
.statusCode(401);
RestAssured
.given()
.auth()
.basic("test", "test")
.when()
.get("/roles2")
.then()
.assertThat()
.statusCode(403);
RestAssured
.given()
.auth()
.preemptive()
.basic("test", "test")
.when()
.get("/roles2")
.then()
.assertThat()
.statusCode(403);
}
@Test
public void testRolesAllowedCombinedWithPermitAll() {
RestAssured
.given()
.when()
.get("/permit")
.then()
.assertThat()
.statusCode(401);
RestAssured
.given()
.auth()
.preemptive()
.basic("test", "test")
.when()
.get("/permit")
.then()
.assertThat()
.statusCode(200)
.body(equalTo("test:/permit"));
}
@Test
public void testRolesAllowedCombinedWithDenyAll() {
RestAssured
.given()
.when()
.get("/deny")
.then()
.assertThat()
.statusCode(401);
RestAssured
.given()
.auth()
.basic("test", "test")
.when()
.get("/deny")
.then()
.assertThat()
.statusCode(403);
RestAssured
.given()
.auth()
.preemptive()
.basic("test", "test")
.when()
.get("/deny")
.then()
.assertThat()
.statusCode(403);
RestAssured
.given()
.auth()
.preemptive()
.basic("test", "test")
.when()
.get("/combined")
.then()
.assertThat()
.statusCode(403);
}
@Test
public void testWildcardMatchingWithSlash() {
RestAssured
.given()
.auth()
.preemptive()
.basic("test", "test")
.when()
.get("/wildcard1/a")
.then()
.assertThat()
.statusCode(200);
RestAssured
.given()
.auth()
.preemptive()
.basic("test", "test")
.when()
.get("/wildcard1/a/")
.then()
.assertThat()
.statusCode(200);
RestAssured
.given()
.when()
.get("/wildcard1/a")
.then()
.assertThat()
.statusCode(401);
RestAssured
.given()
.when()
.get("/wildcard1/a/")
.then()
.assertThat()
.statusCode(401);
RestAssured
.given()
.when()
.get("/wildcard3XXX")
.then()
.assertThat()
.statusCode(200);
}
@Test
public void testWildcardMatchingWithoutSlash() {
RestAssured
.given()
.auth()
.preemptive()
.basic("test", "test")
.when()
.get("/wildcard2/a")
.then()
.assertThat()
.statusCode(200);
RestAssured
.given()
.auth()
.preemptive()
.basic("test", "test")
.when()
.get("/wildcard2")
.then()
.assertThat()
.statusCode(200);
RestAssured
.given()
.when()
.get("/wildcard2")
.then()
.assertThat()
.statusCode(401);
RestAssured
.given()
.when()
.get("/wildcard2/a")
.then()
.assertThat()
.statusCode(401);
}
@Test
public void testLargeBodyRejected() {
StringBuilder sb = new StringBuilder("HELLO WORLD");
for (int i = 0; i < 20; ++i) {
sb.append(sb);
}
for (int i = 0; i < 10; ++i) {
RestAssured
.given()
.body(sb.toString())
.post("/roles1")
.then()
.assertThat()
.statusCode(401);
}
}
}
| AbstractRolesAllowedTestCase |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletAnnotationControllerHandlerMethodTests.java | {
"start": 139760,
"end": 139836
} | interface ____<T> {
ModelAndView method(T object);
}
static | TestController |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/function/json/OracleJsonArrayAppendFunction.java | {
"start": 523,
"end": 2253
} | class ____ extends AbstractJsonArrayAppendFunction {
public OracleJsonArrayAppendFunction(TypeConfiguration typeConfiguration) {
super( typeConfiguration );
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> arguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> translator) {
final Expression json = (Expression) arguments.get( 0 );
final String jsonPath = translator.getLiteralValue( (Expression) arguments.get( 1 ) );
final SqlAstNode value = arguments.get( 2 );
sqlAppender.appendSql( "(select case coalesce(json_value(t.d,'" );
for ( int i = 0; i < jsonPath.length(); i++ ) {
final char c = jsonPath.charAt( i );
if ( c == '\'') {
sqlAppender.appendSql( "'" );
}
sqlAppender.appendSql( c );
}
sqlAppender.appendSql( ".type()'),'x') when 'x' then t.d when 'array' then json_transform(t.d,append " );
sqlAppender.appendSingleQuoteEscapedString( jsonPath );
sqlAppender.appendSql( "=t.v) when 'object' then json_transform(t.d,set " );
sqlAppender.appendSingleQuoteEscapedString( jsonPath );
sqlAppender.appendSql( "=json_array(coalesce(json_query(t.d," );
sqlAppender.appendSingleQuoteEscapedString( jsonPath );
sqlAppender.appendSql( "),'null') format json,t.v)) else json_transform(t.d,set " );
sqlAppender.appendSingleQuoteEscapedString( jsonPath );
sqlAppender.appendSql( "=json_array(coalesce(json_value(t.d," );
sqlAppender.appendSingleQuoteEscapedString( jsonPath );
sqlAppender.appendSql( "),'null') format json,t.v)) end from (select " );
json.accept( translator );
sqlAppender.appendSql( " d," );
value.accept( translator );
sqlAppender.appendSql( " v from dual) t)" );
}
}
| OracleJsonArrayAppendFunction |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/DescribeCatalogOperation.java | {
"start": 1560,
"end": 4530
} | class ____ implements Operation, ExecutableOperation {
private final String catalogName;
private final boolean isExtended;
public DescribeCatalogOperation(String catalogName, boolean isExtended) {
this.catalogName = catalogName;
this.isExtended = isExtended;
}
public String getCatalogName() {
return catalogName;
}
public boolean isExtended() {
return isExtended;
}
@Override
public String asSummaryString() {
Map<String, Object> params = new LinkedHashMap<>();
params.put("identifier", catalogName);
params.put("isExtended", isExtended);
return OperationUtils.formatWithChildren(
"DESCRIBE CATALOG", params, Collections.emptyList(), Operation::asSummaryString);
}
@Override
public TableResultInternal execute(Context ctx) {
CatalogDescriptor catalogDescriptor =
ctx.getCatalogManager()
.getCatalogDescriptor(catalogName)
.orElseThrow(
() ->
new ValidationException(
String.format(
"Cannot obtain metadata information from Catalog %s.",
catalogName)));
Map<String, String> properties = catalogDescriptor.getConfiguration().toMap();
List<List<Object>> rows =
new ArrayList<>(
Arrays.asList(
Arrays.asList("name", catalogName),
Arrays.asList(
"type",
properties.getOrDefault(
CommonCatalogOptions.CATALOG_TYPE.key(), "")),
Arrays.asList(
"comment", catalogDescriptor.getComment().orElse(null))));
if (isExtended) {
properties.entrySet().stream()
.filter(
entry ->
!CommonCatalogOptions.CATALOG_TYPE.key().equals(entry.getKey()))
.sorted(Map.Entry.comparingByKey())
.forEach(
entry ->
rows.add(
Arrays.asList(
String.format("option:%s", entry.getKey()),
entry.getValue())));
}
return buildTableResult(
new String[] {"info name", "info value"},
new DataType[] {DataTypes.STRING(), DataTypes.STRING()},
rows.stream().map(List::toArray).toArray(Object[][]::new));
}
}
| DescribeCatalogOperation |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-spring-boot/src/test/java/org/assertj/tests/core/api/recursive/comparison/Issue_3551_Test.java | {
"start": 2164,
"end": 2401
} | interface ____ extends JpaRepository<PersonEntity, String> {
@Query(value = "SELECT 'alice' as name")
Person getPerson();
}
@SpringBootApplication
@EnableJpaRepositories(considerNestedRepositories = true)
static | PersonRepo |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/type/OracleReflectionStructJdbcType.java | {
"start": 569,
"end": 2426
} | class ____ extends OracleBaseStructJdbcType {
public static final AggregateJdbcType INSTANCE = new OracleReflectionStructJdbcType();
private static final ClassValue<Method> RAW_JDBC_TRANSFORMER = new ClassValue<>() {
@Override
protected Method computeValue(Class<?> type) {
if ( "oracle.sql.TIMESTAMPTZ".equals( type.getName() ) ) {
try {
return type.getMethod( "offsetDateTimeValue", Connection.class );
}
catch (NoSuchMethodException e) {
throw new RuntimeException( e );
}
}
return null;
}
};
private OracleReflectionStructJdbcType() {
// The default instance is for reading only and will return an Object[]
this( null, null, null );
}
private OracleReflectionStructJdbcType(EmbeddableMappingType embeddableMappingType, String typeName, int[] orderMapping) {
super( embeddableMappingType, typeName, orderMapping );
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(
EmbeddableMappingType mappingType,
String sqlType,
RuntimeModelCreationContext creationContext) {
return new OracleReflectionStructJdbcType(
mappingType,
sqlType,
creationContext.getBootModel()
.getDatabase()
.getDefaultNamespace()
.locateUserDefinedType( Identifier.toIdentifier( sqlType ) )
.getOrderMapping()
);
}
@Override
protected Object transformRawJdbcValue(Object rawJdbcValue, WrapperOptions options) {
Method rawJdbcTransformer = RAW_JDBC_TRANSFORMER.get( rawJdbcValue.getClass() );
if ( rawJdbcTransformer == null ) {
return rawJdbcValue;
}
try {
return rawJdbcTransformer.invoke( rawJdbcValue,
options.getSession().getJdbcCoordinator().getLogicalConnection().getPhysicalConnection() );
}
catch (Exception e) {
throw new HibernateException( "Could not transform the raw jdbc value", e );
}
}
}
| OracleReflectionStructJdbcType |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/MissingOverrideTest.java | {
"start": 2350,
"end": 2607
} | class ____ {
public abstract int hashCode();
}
""")
.doTest();
}
@Test
public void interfaceMethod() {
compilationHelper
.addSourceLines(
"Super.java",
"""
| Test |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/NettyEndpointBuilderFactory.java | {
"start": 183862,
"end": 188727
} | class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final NettyHeaderNameBuilder INSTANCE = new NettyHeaderNameBuilder();
/**
* Indicates whether the channel should be closed after complete.
*
* The option is a: {@code Boolean} type.
*
* Group: common
*
* @return the name of the header {@code NettyCloseChannelWhenComplete}.
*/
public String nettyCloseChannelWhenComplete() {
return "CamelNettyCloseChannelWhenComplete";
}
/**
* The channel handler context.
*
* The option is a: {@code io.netty.channel.ChannelHandlerContext} type.
*
* Group: common
*
* @return the name of the header {@code NettyChannelHandlerContext}.
*/
public String nettyChannelHandlerContext() {
return "CamelNettyChannelHandlerContext";
}
/**
* The remote address.
*
* The option is a: {@code java.net.SocketAddress} type.
*
* Group: common
*
* @return the name of the header {@code NettyRemoteAddress}.
*/
public String nettyRemoteAddress() {
return "CamelNettyRemoteAddress";
}
/**
* The local address.
*
* The option is a: {@code java.net.SocketAddress} type.
*
* Group: common
*
* @return the name of the header {@code NettyLocalAddress}.
*/
public String nettyLocalAddress() {
return "CamelNettyLocalAddress";
}
/**
* The SSL session.
*
* The option is a: {@code javax.net.ssl.SSLSession} type.
*
* Group: common
*
* @return the name of the header {@code NettySSLSession}.
*/
public String nettySSLSession() {
return "CamelNettySSLSession";
}
/**
* The SSL client certificate subject name.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code NettySSLClientCertSubjectName}.
*/
public String nettySSLClientCertSubjectName() {
return "CamelNettySSLClientCertSubjectName";
}
/**
* The SSL client certificate issuer name.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code NettySSLClientCertIssuerName}.
*/
public String nettySSLClientCertIssuerName() {
return "CamelNettySSLClientCertIssuerName";
}
/**
* The SSL client certificate serial number.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code
* NettySSLClientCertSerialNumber}.
*/
public String nettySSLClientCertSerialNumber() {
return "CamelNettySSLClientCertSerialNumber";
}
/**
* The SSL client certificate not before.
*
* The option is a: {@code java.util.Date} type.
*
* Group: common
*
* @return the name of the header {@code NettySSLClientCertNotBefore}.
*/
public String nettySSLClientCertNotBefore() {
return "CamelNettySSLClientCertNotBefore";
}
/**
* The SSL client certificate not after.
*
* The option is a: {@code java.util.Date} type.
*
* Group: common
*
* @return the name of the header {@code NettySSLClientCertNotAfter}.
*/
public String nettySSLClientCertNotAfter() {
return "CamelNettySSLClientCertNotAfter";
}
/**
* The read timeout.
*
* The option is a: {@code Long} type.
*
* Group: common
*
* @return the name of the header {@code NettyRequestTimeout}.
*/
public String nettyRequestTimeout() {
return "CamelNettyRequestTimeout";
}
/**
* The Netty Channel object.
*
* The option is a: {@code io.netty.channel.Channel} type.
*
* Group: common
*
* @return the name of the header {@code NettyChannel}.
*/
public String nettyChannel() {
return "CamelNettyChannel";
}
}
static NettyEndpointBuilder endpointBuilder(String componentName, String path) {
| NettyHeaderNameBuilder |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_various_types_Test.java | {
"start": 5678,
"end": 15261
} | class ____ specific assertion
assumeThat(actual).isSameAs(Assumptions.class.getClassLoader());
}
},
new AssumptionRunner<Date>(new Date()) {
@Override
public void runFailingAssumption() {
assumeThat(actual).as("isBefore(\"2011-01-01\")").isBefore("2011-01-01");
}
@Override
public void runPassingAssumption() {
assumeThat(actual).isAfter("2011-01-01");
}
},
new AssumptionRunner<File>(new File("test")) {
@Override
public void runFailingAssumption() {
assumeThat(actual).hasName("other");
}
@Override
public void runPassingAssumption() {
assumeThat(actual).hasName("test");
}
},
new AssumptionRunner<Path>(new File("test").toPath()) {
@Override
public void runFailingAssumption() {
assumeThat(actual).isNull();
}
@Override
public void runPassingAssumption() {
assumeThat(actual).isNotNull();
}
},
new AssumptionRunner<InputStream>(new ByteArrayInputStream("test".getBytes())) {
@Override
public void runFailingAssumption() {
assumeThat(actual).isInstanceOf(String.class);
}
@Override
public void runPassingAssumption() {
assumeThat(actual).isInstanceOf(ByteArrayInputStream.class);
}
},
new AssumptionRunner<Integer[]>(array(2, 4, 2)) {
@Override
public void runFailingAssumption() {
assumeThat(actual).containsOnlyOnce(2);
}
@Override
public void runPassingAssumption() {
assumeThat(actual).containsOnlyOnce(4);
}
},
new AssumptionRunner<Throwable>(new IllegalArgumentException()) {
@Override
public void runFailingAssumption() {
assumeThat(actual).isInstanceOf(NullPointerException.class);
}
@Override
public void runPassingAssumption() {
assumeThat(actual).isInstanceOf(IllegalArgumentException.class);
}
},
new AssumptionRunner<SQLException>(new SQLException()) {
@Override
public void runFailingAssumption() {
assumeThat(actual).isInstanceOf(NullPointerException.class);
}
@Override
public void runPassingAssumption() {
assumeThat(actual).isInstanceOf(SQLException.class);
}
},
new AssumptionRunner<ThrowingCallable>(new ThrowingCallable() {
@Override
public void call() {
throw new IllegalArgumentException();
}
}) {
@Override
public void runFailingAssumption() {
assumeThatThrownBy(actual).isInstanceOf(NullPointerException.class);
}
@Override
public void runPassingAssumption() {
assumeThatThrownBy(actual).isInstanceOf(IllegalArgumentException.class);
}
},
new AssumptionRunner<URL>(createUrl()) {
@Override
public void runFailingAssumption() {
assumeThat(actual).hasParameter("test");
}
@Override
public void runPassingAssumption() {
assumeThat(actual).hasNoParameters();
}
},
new AssumptionRunner<URI>(URI.create("example.com/pages/")) {
@Override
public void runFailingAssumption() {
assumeThat(actual).hasPort(9090);
}
@Override
public void runPassingAssumption() {
assumeThat(actual).hasNoPort();
}
},
new AssumptionRunner<Future<?>>(mock(Future.class)) {
@Override
public void runFailingAssumption() {
assumeThat(actual).isDone();
}
@Override
public void runPassingAssumption() {
assumeThat(actual).isNotDone();
}
},
new AssumptionRunner<Iterable<Integer>>(asList(2, 4, 2)) {
@Override
public void runFailingAssumption() {
assumeThat(actual).containsOnlyOnce(2);
}
@Override
public void runPassingAssumption() {
assumeThat(actual).containsOnlyOnce(4);
}
},
new AssumptionRunner<Iterator<Integer>>(asList(2, 4, 2).iterator()) {
@Override
public void runFailingAssumption() {
assumeThat(actual).isExhausted();
}
@Override
public void runPassingAssumption() {
assumeThat(actual).hasNext();
}
},
new AssumptionRunner<List<Integer>>(asList(2, 4, 2)) {
@Override
public void runFailingAssumption() {
assumeThat(actual).containsOnlyOnce(2);
}
@Override
public void runPassingAssumption() {
assumeThat(actual).containsOnlyOnce(4);
}
},
new AssumptionRunner<List<Integer>>(asList(2, 4, 2)) {
@Override
public void runFailingAssumption() {
assumeThat(actual).containsOnlyOnce(4).toAssert(2, "test 2 isNull").isNull();
}
@Override
public void runPassingAssumption() {
assumeThat(actual).containsOnlyOnce(4).toAssert(2, "").isEqualTo(2);
}
},
new AssumptionRunner<Map<Integer, Integer>>(newHashMap(2, 4)) {
@Override
public void runFailingAssumption() {
assumeThat(actual).containsKeys(4);
}
@Override
public void runPassingAssumption() {
assumeThat(actual).containsKeys(2);
}
},
new AssumptionRunner<ComparableExample>(new ComparableExample(4)) {
@Override
public void runFailingAssumption() {
assumeThat(actual).isLessThan(new ComparableExample(2));
}
@Override
public void runPassingAssumption() {
assumeThat(actual).isGreaterThan(new ComparableExample(2));
}
},
new AssumptionRunner<Comparable<ComparableExample>>(new ComparableExample(4)) {
@Override
public void runFailingAssumption() {
assumeThatComparable(actual).isLessThan(new ComparableExample(2));
}
@Override
public void runPassingAssumption() {
assumeThatComparable(actual).isGreaterThan(new ComparableExample(2));
}
},
new AssumptionRunner<ComparableExample>(new ComparableExample(4)) {
@Override
public void runFailingAssumption() {
assumeThatComparable(actual).isLessThan(new ComparableExample(2));
}
@Override
public void runPassingAssumption() {
assumeThatComparable(actual).isGreaterThan(new ComparableExample(2));
}
},
new AssumptionRunner<List<String>>(asList("a", "b", "c")) {
@Override
public void runFailingAssumption() {
assumeThat(actual).zipSatisfy(asList("A", "B", "C"), (e1, e2) -> assertThat(e1).isEqualTo(e2));
}
@Override
public void runPassingAssumption() {
assumeThat(actual).zipSatisfy(asList("A", "B", "C"), (e1, e2) -> assertThat(e1).isEqualToIgnoringCase(e2));
}
},
new AssumptionRunner<String>("abc") {
@Override
public void runFailingAssumption() {
assumeThat(actual).satisfiesAnyOf(s -> assertThat(s).isEmpty(), s -> assertThat(s).isBlank());
}
@Override
public void runPassingAssumption() {
assumeThat(actual).satisfiesAnyOf(s -> assertThat(s).isLowerCase(), s -> assertThat(s).isBlank());
}
},
new AssumptionRunner<LinkedList<String>>(new LinkedList<>(list("abc"))) {
@Override
public void runFailingAssumption() {
assumeThatObject(actual).satisfies(l -> assertThat(l).isEmpty());
}
@Override
public void runPassingAssumption() {
assumeThatObject(actual).satisfies(l -> assertThatObject(l).has(
new Condition<>(list -> list.getFirst().equals("abc"), "First element is 'abc'")));
}
},
new AssumptionRunner<Spliterator<Integer>>(Stream.of(1, 2).spliterator()) {
@Override
public void runFailingAssumption() {
assumeThat(actual).hasCharacteristics(Spliterator.DISTINCT);
}
@Override
public void runPassingAssumption() {
assumeThat(actual).hasCharacteristics(Spliterator.SIZED);
}
});
// @format:on
}
private static URL createUrl() {
try {
return new URL("http://example.com/pages/");
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
@ParameterizedTest
@MethodSource("provideAssumptionsRunners")
void should_ignore_test_when_assumption_fails(AssumptionRunner<?> assumptionRunner) {
expectAssumptionNotMetException(assumptionRunner::runFailingAssumption);
}
@ParameterizedTest
@MethodSource("provideAssumptionsRunners")
void should_run_test_when_assumption_passes(AssumptionRunner<?> assumptionRunner) {
assertThatCode(assumptionRunner::runPassingAssumption).doesNotThrowAnyException();
}
}
| loader |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/FopEndpointBuilderFactory.java | {
"start": 6108,
"end": 8517
} | interface ____ {
/**
* FOP (camel-fop)
* Render messages into PDF and other output formats supported by Apache
* FOP.
*
* Category: file,transformation
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-fop
*
* @return the dsl builder for the headers' name.
*/
default FopHeaderNameBuilder fop() {
return FopHeaderNameBuilder.INSTANCE;
}
/**
* FOP (camel-fop)
* Render messages into PDF and other output formats supported by Apache
* FOP.
*
* Category: file,transformation
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-fop
*
* Syntax: <code>fop:outputType</code>
*
* Path parameter: outputType (required)
* The primary output format is PDF but other output formats are also
* supported.
* There are 10 enums and the value can be one of: pdf, ps, pcl, png,
* jpeg, svg, xml, mif, rtf, txt
*
* @param path outputType
* @return the dsl builder
*/
default FopEndpointBuilder fop(String path) {
return FopEndpointBuilderFactory.endpointBuilder("fop", path);
}
/**
* FOP (camel-fop)
* Render messages into PDF and other output formats supported by Apache
* FOP.
*
* Category: file,transformation
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-fop
*
* Syntax: <code>fop:outputType</code>
*
* Path parameter: outputType (required)
* The primary output format is PDF but other output formats are also
* supported.
* There are 10 enums and the value can be one of: pdf, ps, pcl, png,
* jpeg, svg, xml, mif, rtf, txt
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path outputType
* @return the dsl builder
*/
default FopEndpointBuilder fop(String componentName, String path) {
return FopEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the FOP component.
*/
public static | FopBuilders |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java | {
"start": 59952,
"end": 62126
} | class ____<K,V> {
protected final Counters.Counter inputCounter;
protected final JobConf job;
protected final TaskReporter reporter;
CombinerRunner(Counters.Counter inputCounter,
JobConf job,
TaskReporter reporter) {
this.inputCounter = inputCounter;
this.job = job;
this.reporter = reporter;
}
/**
* Run the combiner over a set of inputs.
* @param iterator the key/value pairs to use as input
* @param collector the output collector
*/
public abstract void combine(RawKeyValueIterator iterator,
OutputCollector<K,V> collector
) throws IOException, InterruptedException,
ClassNotFoundException;
@SuppressWarnings("unchecked")
public static <K,V>
CombinerRunner<K,V> create(JobConf job,
TaskAttemptID taskId,
Counters.Counter inputCounter,
TaskReporter reporter,
org.apache.hadoop.mapreduce.OutputCommitter committer
) throws ClassNotFoundException {
Class<? extends Reducer<K,V,K,V>> cls =
(Class<? extends Reducer<K,V,K,V>>) job.getCombinerClass();
if (cls != null) {
return new OldCombinerRunner(cls, job, inputCounter, reporter);
}
// make a task context so we can get the classes
org.apache.hadoop.mapreduce.TaskAttemptContext taskContext =
new org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl(job, taskId,
reporter);
Class<? extends org.apache.hadoop.mapreduce.Reducer<K,V,K,V>> newcls =
(Class<? extends org.apache.hadoop.mapreduce.Reducer<K,V,K,V>>)
taskContext.getCombinerClass();
if (newcls != null) {
return new NewCombinerRunner<K,V>(newcls, job, taskId, taskContext,
inputCounter, reporter, committer);
}
return null;
}
}
@InterfaceAudience.Private
@InterfaceStability.Unstable
protected static | CombinerRunner |
java | playframework__playframework | web/play-java-forms/src/test/java/play/data/Letter.java | {
"start": 335,
"end": 1088
} | class ____ {
@Constraints.Required
@Constraints.MinLength(10)
private String address;
@Constraints.Required private FilePart<TemporaryFile> coverPage;
@Constraints.Required private List<FilePart<TemporaryFile>> letterPages;
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public FilePart<TemporaryFile> getCoverPage() {
return coverPage;
}
public void setCoverPage(FilePart<TemporaryFile> coverPage) {
this.coverPage = coverPage;
}
public List<FilePart<TemporaryFile>> getLetterPages() {
return letterPages;
}
public void setLetterPages(List<FilePart<TemporaryFile>> letterPages) {
this.letterPages = letterPages;
}
}
| Letter |
java | quarkusio__quarkus | extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/blocking/BlockingServerInterceptor.java | {
"start": 11939,
"end": 15549
} | class ____<ReqT> extends ServerCall.Listener<ReqT> {
private final InjectableContext.ContextState requestContextState;
// exclusive to event loop context
private ServerCall.Listener<ReqT> delegate;
private final Queue<Consumer<ServerCall.Listener<ReqT>>> incomingEvents = new ConcurrentLinkedQueue<>();
private volatile boolean isConsumingFromIncomingEvents = false;
private VirtualReplayListener(InjectableContext.ContextState requestContextState) {
this.requestContextState = requestContextState;
}
/**
* Must be called from within the event loop context
* If there are deferred events will start executing them in the shared worker context
*
* @param delegate the original
*/
void setDelegate(ServerCall.Listener<ReqT> delegate) {
this.delegate = delegate;
if (!this.isConsumingFromIncomingEvents) {
Consumer<ServerCall.Listener<ReqT>> consumer = incomingEvents.poll();
if (consumer != null) {
executeVirtualWithRequestContext(consumer);
}
}
}
private void scheduleOrEnqueue(Consumer<ServerCall.Listener<ReqT>> consumer) {
if (this.delegate != null && !this.isConsumingFromIncomingEvents) {
executeVirtualWithRequestContext(consumer);
} else {
incomingEvents.add(consumer);
}
}
private void executeVirtualWithRequestContext(Consumer<ServerCall.Listener<ReqT>> consumer) {
final Context grpcContext = Context.current();
Callable<Void> blockingHandler = new BlockingExecutionHandler<>(consumer, grpcContext, delegate,
requestContextState, getRequestContext(), this);
if (devMode) {
blockingHandler = new DevModeBlockingExecutionHandler(Thread.currentThread().getContextClassLoader(),
blockingHandler);
}
this.isConsumingFromIncomingEvents = true;
var finalBlockingHandler = blockingHandler;
virtualThreadExecutor.execute(() -> {
try {
finalBlockingHandler.call();
Consumer<ServerCall.Listener<ReqT>> next = incomingEvents.poll();
if (next != null) {
executeVirtualWithRequestContext(next);
} else {
this.isConsumingFromIncomingEvents = false;
}
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
@Override
public void onMessage(ReqT message) {
scheduleOrEnqueue(t -> t.onMessage(message));
}
@Override
public void onHalfClose() {
scheduleOrEnqueue(ServerCall.Listener::onHalfClose);
}
@Override
public void onCancel() {
scheduleOrEnqueue(ServerCall.Listener::onCancel);
}
@Override
public void onComplete() {
scheduleOrEnqueue(ServerCall.Listener::onComplete);
}
@Override
public void onReady() {
scheduleOrEnqueue(ServerCall.Listener::onReady);
}
}
// protected for tests
protected boolean isExecutable() {
return Arc.container() != null;
}
protected ManagedContext getRequestContext() {
return Arc.container().requestContext();
}
}
| VirtualReplayListener |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/language/ConstantResultTypeTest.java | {
"start": 972,
"end": 2056
} | class ____ extends ContextTestSupport {
@Test
public void testConstant() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
// headers will be wrapper types
getMockEndpoint("mock:result").message(0).header("foo").isInstanceOf(Integer.class);
getMockEndpoint("mock:result").message(0).header("bar").isInstanceOf(Boolean.class);
getMockEndpoint("mock:result").message(0).header("baz").isInstanceOf(String.class);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.setHeader("foo").constant("123", int.class)
.setHeader("bar").constant("true", boolean.class)
.setHeader("baz").constant("456")
.to("mock:result");
}
};
}
}
| ConstantResultTypeTest |
java | apache__camel | components/camel-git/src/main/java/org/apache/camel/component/git/producer/GitOperation.java | {
"start": 860,
"end": 1906
} | interface ____ {
String CLONE_OPERATION = "clone";
String CHECKOUT_OPERATION = "checkout";
String INIT_OPERATION = "init";
String ADD_OPERATION = "add";
String REMOVE_OPERATION = "remove";
String COMMIT_OPERATION = "commit";
String COMMIT_ALL_OPERATION = "commitAll";
String CREATE_BRANCH_OPERATION = "createBranch";
String DELETE_BRANCH_OPERATION = "deleteBranch";
String CREATE_TAG_OPERATION = "createTag";
String DELETE_TAG_OPERATION = "deleteTag";
String STATUS_OPERATION = "status";
String LOG_OPERATION = "log";
String PUSH_OPERATION = "push";
String PUSH_TAG_OPERATION = "pushTag";
String PULL_OPERATION = "pull";
String MERGE_OPERATION = "merge";
String SHOW_BRANCHES_OPERATION = "showBranches";
String SHOW_TAGS_OPERATION = "showTags";
String CHERRYPICK_OPERATION = "cherryPick";
String REMOTE_ADD_OPERATION = "remoteAdd";
String REMOTE_LIST_OPERATION = "remoteList";
String CLEAN_OPERATION = "clean";
String GC_OPERATION = "gc";
}
| GitOperation |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/async/TimeoutBlockingWaitStrategy.java | {
"start": 2257,
"end": 3964
} | class ____ implements WaitStrategy {
private final Object mutex = new Object();
private final long timeoutInNanos;
/**
* @param timeout how long to wait before waking up
* @param units the unit in which timeout is specified
*/
public TimeoutBlockingWaitStrategy(final long timeout, final TimeUnit units) {
timeoutInNanos = units.toNanos(timeout);
}
@Override
public long waitFor(
final long sequence,
final Sequence cursorSequence,
final Sequence dependentSequence,
final SequenceBarrier barrier)
throws AlertException, InterruptedException, TimeoutException {
long timeoutNanos = timeoutInNanos;
long availableSequence;
if (cursorSequence.get() < sequence) {
synchronized (mutex) {
while (cursorSequence.get() < sequence) {
barrier.checkAlert();
timeoutNanos = awaitNanos(mutex, timeoutNanos);
if (timeoutNanos <= 0) {
throw TimeoutException.INSTANCE;
}
}
}
}
while ((availableSequence = dependentSequence.get()) < sequence) {
barrier.checkAlert();
}
return availableSequence;
}
@Override
public void signalAllWhenBlocking() {
synchronized (mutex) {
mutex.notifyAll();
}
}
@Override
public String toString() {
return "TimeoutBlockingWaitStrategy{" + "mutex=" + mutex + ", timeoutInNanos=" + timeoutInNanos + '}';
}
// below code is from com.lmax.disruptor.util.Util | TimeoutBlockingWaitStrategy |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestProcedureCatalogFactory.java | {
"start": 3146,
"end": 5493
} | class ____ extends GenericInMemoryCatalog {
private static final Map<ObjectPath, Procedure> PROCEDURE_MAP = new HashMap<>();
static {
PROCEDURE_MAP.put(
ObjectPath.fromString("system.generate_n"), new GenerateSequenceProcedure());
PROCEDURE_MAP.put(ObjectPath.fromString("system.sum_n"), new SumProcedure());
PROCEDURE_MAP.put(ObjectPath.fromString("system.get_year"), new GetYearProcedure());
PROCEDURE_MAP.put(
ObjectPath.fromString("system.generate_user"), new GenerateUserProcedure());
PROCEDURE_MAP.put(
ObjectPath.fromString("system.named_args"), new NamedArgumentsProcedure());
PROCEDURE_MAP.put(
ObjectPath.fromString("system.named_args_overload"),
new NamedArgumentsProcedureWithOverload());
PROCEDURE_MAP.put(
ObjectPath.fromString("system.named_args_optional"),
new NamedArgumentsProcedureWithOptionalArguments());
PROCEDURE_MAP.put(
ObjectPath.fromString("system.get_env_conf"), new EnvironmentConfProcedure());
}
public CatalogWithBuiltInProcedure(String name) {
super(name);
}
@Override
public List<String> listProcedures(String dbName)
throws DatabaseNotExistException, CatalogException {
if (!databaseExists(dbName)) {
throw new DatabaseNotExistException(getName(), dbName);
}
return PROCEDURE_MAP.keySet().stream()
.filter(procedurePath -> procedurePath.getDatabaseName().equals(dbName))
.map(ObjectPath::getObjectName)
.collect(Collectors.toList());
}
@Override
public Procedure getProcedure(ObjectPath procedurePath)
throws ProcedureNotExistException, CatalogException {
if (PROCEDURE_MAP.containsKey(procedurePath)) {
return PROCEDURE_MAP.get(procedurePath);
} else {
throw new ProcedureNotExistException(getName(), procedurePath);
}
}
}
/** A procedure to a sequence from 0 to n for testing purpose. */
public static | CatalogWithBuiltInProcedure |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateParamsTests.java | {
"start": 1022,
"end": 3448
} | class ____ extends ESTestCase {
public void testFromJobUpdate() {
String jobId = "foo";
DetectionRule rule = new DetectionRule.Builder(
Collections.singletonList(new RuleCondition(RuleCondition.AppliesTo.ACTUAL, Operator.GT, 1.0))
).build();
List<DetectionRule> rules = Collections.singletonList(rule);
List<JobUpdate.DetectorUpdate> detectorUpdates = Collections.singletonList(new JobUpdate.DetectorUpdate(2, null, rules));
JobUpdate.Builder updateBuilder = new JobUpdate.Builder(jobId).setModelPlotConfig(new ModelPlotConfig())
.setPerPartitionCategorizationConfig(new PerPartitionCategorizationConfig())
.setDetectorUpdates(detectorUpdates);
UpdateParams params = UpdateParams.fromJobUpdate(updateBuilder.build());
assertFalse(params.isUpdateScheduledEvents());
assertEquals(params.getDetectorUpdates(), updateBuilder.build().getDetectorUpdates());
assertEquals(params.getModelPlotConfig(), updateBuilder.build().getModelPlotConfig());
assertEquals(params.getPerPartitionCategorizationConfig(), updateBuilder.build().getPerPartitionCategorizationConfig());
params = UpdateParams.fromJobUpdate(updateBuilder.setGroups(Collections.singletonList("bar")).build());
assertTrue(params.isUpdateScheduledEvents());
assertTrue(params.isJobUpdate());
}
public void testExtractReferencedFilters() {
JobUpdate.DetectorUpdate detectorUpdate1 = new JobUpdate.DetectorUpdate(
0,
"",
Arrays.asList(
new DetectionRule.Builder(RuleScope.builder().include("a", "filter_1")).build(),
new DetectionRule.Builder(RuleScope.builder().include("b", "filter_2")).build()
)
);
JobUpdate.DetectorUpdate detectorUpdate2 = new JobUpdate.DetectorUpdate(
0,
"",
Collections.singletonList(new DetectionRule.Builder(RuleScope.builder().include("c", "filter_3")).build())
);
UpdateParams updateParams = new UpdateParams.Builder("test_job").detectorUpdates(Arrays.asList(detectorUpdate1, detectorUpdate2))
.filter(MlFilter.builder("filter_4").build())
.build();
assertThat(updateParams.extractReferencedFilters(), containsInAnyOrder("filter_1", "filter_2", "filter_3", "filter_4"));
}
}
| UpdateParamsTests |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/graph/SubGraph.java | {
"start": 325,
"end": 471
} | interface ____<J> extends Graph<J>, Subgraph<J> {
@Override
default Class<J> getClassType() {
return getGraphedType().getJavaType();
}
}
| SubGraph |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/context/AbstractBeanDefinitionBeanConstructor.java | {
"start": 1111,
"end": 1747
} | class ____<T> extends AbstractBeanConstructor<T> {
/**
* Default constructor.
*
* @param beanDefinition The bean type
*/
protected AbstractBeanDefinitionBeanConstructor(BeanDefinition<T> beanDefinition) {
super(
Objects.requireNonNull(beanDefinition, "Bean definition cannot be null").getBeanType(),
new AnnotationMetadataHierarchy(
beanDefinition.getAnnotationMetadata(),
beanDefinition.getConstructor().getAnnotationMetadata()),
beanDefinition.getConstructor().getArguments()
);
}
}
| AbstractBeanDefinitionBeanConstructor |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/SCMUploaderCanUploadRequest.java | {
"start": 1209,
"end": 1648
} | class ____ {
/**
* Get the <code>key</code> of the resource that would be uploaded to the
* shared cache.
*
* @return <code>key</code>
*/
public abstract String getResourceKey();
/**
* Set the <code>key</code> of the resource that would be uploaded to the
* shared cache.
*
* @param key unique identifier for the resource
*/
public abstract void setResourceKey(String key);
}
| SCMUploaderCanUploadRequest |
java | quarkusio__quarkus | independent-projects/tools/devtools-common/src/main/java/io/quarkus/maven/utilities/PomTransformer.java | {
"start": 9350,
"end": 26819
} | interface ____ {
public static Transformation addModule(String module) {
return (Document document, TransformationContext context) -> {
try {
Node modules = (Node) context.getXPath().evaluate(anyNs("project", "modules"), document,
XPathConstants.NODE);
if (modules == null) {
final Node modulesIndent = context.indent(1);
modules = document.createElement("modules");
modules.appendChild(context.indent(1));
final Node build = (Node) context.getXPath().evaluate(anyNs("project", "build"), document,
XPathConstants.NODE);
if (build != null) {
Node ws = build.getPreviousSibling();
if (ws == null || ws.getNodeType() != Node.TEXT_NODE) {
ws = context.indent(1);
build.getParentNode().insertBefore(ws, build);
}
build.getParentNode().insertBefore(modulesIndent, ws);
build.getParentNode().insertBefore(modules, ws);
} else {
final Node project = (Node) context.getXPath().evaluate(anyNs("project"), document,
XPathConstants.NODE);
if (project == null) {
throw new IllegalStateException(
String.format("No <project> in file [%s]", context.getPomXmlPath()));
}
final NodeList projectChildren = project.getChildNodes();
final int len = projectChildren.getLength();
Node ws = null;
if (len == 0 || (ws = projectChildren.item(len - 1)).getNodeType() != Node.TEXT_NODE) {
ws = document.createTextNode("\n");
project.appendChild(ws);
}
project.insertBefore(modulesIndent, ws);
project.insertBefore(modules, ws);
}
}
final Node moduleNode = document.createElement("module");
moduleNode.appendChild(document.createTextNode(module));
final NodeList modulesChildren = modules.getChildNodes();
final int len = modulesChildren.getLength();
Node ws;
if (len == 0 || (ws = modulesChildren.item(len - 1)).getNodeType() != Node.TEXT_NODE) {
ws = context.indent(1);
modules.appendChild(ws);
}
modules.insertBefore(context.indent(2), ws);
modules.insertBefore(moduleNode, ws);
} catch (XPathExpressionException | DOMException e) {
throw new RuntimeException(e);
}
};
}
public static Transformation addProperty(String name, String value) {
return (Document document, TransformationContext context) -> {
try {
Node props = (Node) context.getXPath().evaluate(anyNs("project", "properties"), document,
XPathConstants.NODE);
if (props == null) {
final Node propsIndent = context.indent(1);
props = document.createElement("properties");
props.appendChild(context.indent(1));
final Node project = (Node) context.getXPath().evaluate(anyNs("project"), document,
XPathConstants.NODE);
if (project == null) {
throw new IllegalStateException(
String.format("No <project> in file [%s]", context.getPomXmlPath()));
}
/* ideally before modules */
Node refNode = (Node) context.getXPath().evaluate(anyNs("project", "modules"),
document, XPathConstants.NODE);
Node ws;
if (refNode != null) {
ws = refNode.getPreviousSibling();
if (ws == null || ws.getNodeType() != Node.TEXT_NODE) {
project.insertBefore(ws = context.indent(1), refNode);
}
} else {
ws = project.getLastChild();
if (ws == null || ws.getNodeType() != Node.TEXT_NODE) {
project.appendChild(ws = context.indent(0));
}
}
project.insertBefore(propsIndent, ws);
project.insertBefore(props, ws);
}
final Node propNode = document.createElement(name);
propNode.appendChild(document.createTextNode(value));
final NodeList modulesChildren = props.getChildNodes();
final int len = modulesChildren.getLength();
Node ws;
if (len == 0 || (ws = modulesChildren.item(len - 1)).getNodeType() != Node.TEXT_NODE) {
ws = context.indent(1);
props.appendChild(ws);
}
props.insertBefore(context.indent(2), ws);
props.insertBefore(propNode, ws);
} catch (XPathExpressionException | DOMException e) {
throw new RuntimeException(e);
}
};
}
public static Transformation addDependencyManagementIfNeeded() {
return (Document document, TransformationContext context) -> {
try {
Node dependencyManagementDeps = (Node) context.getXPath().evaluate(
anyNs("project", "dependencyManagement", "dependencies"), document, XPathConstants.NODE);
if (dependencyManagementDeps == null) {
Node dependencyManagement = (Node) context.getXPath()
.evaluate(anyNs("project", "dependencyManagement"), document, XPathConstants.NODE);
if (dependencyManagement == null) {
Node project = (Node) context.getXPath().evaluate(anyNs("project"), document,
XPathConstants.NODE);
if (project == null) {
throw new IllegalStateException(
String.format("//project not found in [%s]", context.getPomXmlPath()));
}
/* ideally before dependencies */
Node refNode = (Node) context.getXPath().evaluate(anyNs("project", "dependencies"),
document, XPathConstants.NODE);
if (refNode == null) {
/* or before build */
refNode = (Node) context.getXPath().evaluate(anyNs("project", "build"),
document, XPathConstants.NODE);
}
dependencyManagement = document.createElement("dependencyManagement");
Node ws;
if (refNode != null) {
ws = refNode.getPreviousSibling();
if (ws == null || ws.getNodeType() != Node.TEXT_NODE) {
project.insertBefore(ws = context.indent(1), refNode);
}
} else {
ws = project.getLastChild();
if (ws == null || ws.getNodeType() != Node.TEXT_NODE) {
project.appendChild(ws = context.indent(0));
}
}
project.insertBefore(dependencyManagement, ws);
project.insertBefore(context.indent(1), dependencyManagement);
}
dependencyManagementDeps = document.createElement("dependencies");
dependencyManagementDeps.appendChild(context.indent(2));
Node ws = dependencyManagement.getLastChild();
if (ws == null || ws.getNodeType() != Node.TEXT_NODE) {
dependencyManagement.appendChild(ws = context.indent(1));
}
dependencyManagement.insertBefore(dependencyManagementDeps, ws);
dependencyManagement.insertBefore(context.indent(2), dependencyManagementDeps);
}
} catch (XPathExpressionException | DOMException e) {
throw new RuntimeException(e);
}
};
}
public static Transformation addPluginManagementIfNeeded() {
return (Document document, TransformationContext context) -> {
try {
Node plugins = (Node) context.getXPath().evaluate(
anyNs("project", "build", "pluginManagement", "plugins"), document, XPathConstants.NODE);
if (plugins == null) {
Node build = (Node) context.getXPath()
.evaluate(anyNs("project", "build"), document, XPathConstants.NODE);
if (build == null) {
Node project = (Node) context.getXPath().evaluate(anyNs("project"), document,
XPathConstants.NODE);
if (project == null) {
throw new IllegalStateException(
String.format("//project not found in [%s]", context.getPomXmlPath()));
}
build = document.createElement("build");
Node ws = project.getLastChild();
if (ws == null || ws.getNodeType() != Node.TEXT_NODE) {
project.appendChild(ws = context.indent(0));
}
project.insertBefore(build, ws);
project.insertBefore(context.indent(1), build);
}
Node pluginManagement = (Node) context.getXPath()
.evaluate(anyNs("project", "build", "pluginManagement"), document, XPathConstants.NODE);
if (pluginManagement == null) {
pluginManagement = document.createElement("pluginManagement");
Node ws = build.getLastChild();
if (ws == null || ws.getNodeType() != Node.TEXT_NODE) {
build.appendChild(ws = context.indent(1));
}
build.insertBefore(pluginManagement, ws);
build.insertBefore(context.indent(2), pluginManagement);
}
plugins = document.createElement("plugins");
plugins.appendChild(context.indent(3));
Node ws = pluginManagement.getLastChild();
if (ws == null || ws.getNodeType() != Node.TEXT_NODE) {
pluginManagement.appendChild(ws = context.indent(2));
}
pluginManagement.insertBefore(plugins, ws);
pluginManagement.insertBefore(context.indent(3), plugins);
}
} catch (XPathExpressionException | DOMException e) {
throw new RuntimeException(e);
}
};
}
public static Transformation addManagedDependency(String groupId, String artifactId, String version) {
return addManagedDependency(new Gavtcs(groupId, artifactId, version, null, null, null));
}
public static Transformation addManagedDependency(Gavtcs gavtcs) {
return (Document document, TransformationContext context) -> {
try {
addDependencyManagementIfNeeded().perform(document, context);
Node dependencyManagementDeps = (Node) context.getXPath().evaluate(
anyNs("project", "dependencyManagement", "dependencies"), document, XPathConstants.NODE);
final NodeList dependencyManagementDepsChildren = dependencyManagementDeps.getChildNodes();
Node ws = null;
if (dependencyManagementDepsChildren.getLength() > 0) {
ws = dependencyManagementDepsChildren.item(dependencyManagementDepsChildren.getLength() - 1);
}
if (ws == null || ws.getNodeType() != Node.TEXT_NODE) {
ws = context.indent(3);
dependencyManagementDeps.appendChild(ws);
}
dependencyManagementDeps.insertBefore(context.indent(3), ws);
final Node dep = document.createElement("dependency");
dep.appendChild(context.indent(4));
dep.appendChild(context.textElement("groupId", gavtcs.groupId));
dep.appendChild(context.indent(4));
dep.appendChild(context.textElement("artifactId", gavtcs.artifactId));
dep.appendChild(context.indent(4));
dep.appendChild(context.textElement("version", gavtcs.version));
if (gavtcs.type != null) {
dep.appendChild(context.indent(4));
dep.appendChild(context.textElement("type", gavtcs.type));
}
if (gavtcs.classifier != null) {
dep.appendChild(context.indent(4));
dep.appendChild(context.textElement("classifier", gavtcs.classifier));
}
if (gavtcs.scope != null) {
dep.appendChild(context.indent(4));
dep.appendChild(context.textElement("scope", gavtcs.scope));
}
dep.appendChild(context.indent(3));
dependencyManagementDeps.insertBefore(dep, ws);
} catch (XPathExpressionException | DOMException e) {
throw new RuntimeException(e);
}
};
}
public static Transformation addManagedPlugin(Plugin plugin) {
return (Document document, TransformationContext context) -> {
try {
addPluginManagementIfNeeded().perform(document, context);
Node managedPlugins = (Node) context.getXPath().evaluate(
anyNs("project", "build", "pluginManagement", "plugins"), document, XPathConstants.NODE);
final NodeList pluginsChildren = managedPlugins.getChildNodes();
Node ws = null;
if (pluginsChildren.getLength() > 0) {
ws = pluginsChildren.item(pluginsChildren.getLength() - 1);
}
if (ws == null || ws.getNodeType() != Node.TEXT_NODE) {
ws = context.indent(4);
managedPlugins.appendChild(ws);
}
managedPlugins.insertBefore(context.indent(4), ws);
final Node dep = document.createElement("plugin");
dep.appendChild(context.indent(5));
dep.appendChild(context.textElement("groupId", plugin.getGroupId()));
dep.appendChild(context.indent(5));
dep.appendChild(context.textElement("artifactId", plugin.getArtifactId()));
dep.appendChild(context.indent(5));
dep.appendChild(context.textElement("version", plugin.getVersion()));
dep.appendChild(context.indent(4));
managedPlugins.insertBefore(dep, ws);
} catch (XPathExpressionException | DOMException e) {
throw new RuntimeException(e);
}
};
}
/**
* Perform this {@link Transformation} on the given {@code document}
*
* @param document the {@link Document} to transform
* @param context the current {@link TransformationContext}
*/
void perform(Document document, TransformationContext context);
}
public static | Transformation |
java | quarkusio__quarkus | independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/workspace/LazySourceDir.java | {
"start": 550,
"end": 3469
} | class ____ implements SourceDir, Serializable {
private Path srcDir;
private PathFilter srcFilter;
private Path destDir;
private PathFilter destFilter;
private Path genSrcDir;
private Map<Object, Object> data;
/**
* For deserialization only
*/
public LazySourceDir() {
}
public LazySourceDir(Path srcDir, Path destinationDir) {
this(srcDir, destinationDir, null, Collections.emptyMap());
}
public LazySourceDir(Path srcDir, Path destinationDir, Path generatedSourcesDir) {
this(srcDir, destinationDir, generatedSourcesDir, Collections.emptyMap());
}
public LazySourceDir(Path srcDir, Path destinationDir, Path generatedSourcesDir, Map<Object, Object> data) {
this(srcDir, null, destinationDir, null, generatedSourcesDir, data);
}
public LazySourceDir(Path srcDir, PathFilter srcFilter, Path destDir, PathFilter destFilter, Path genSrcDir,
Map<Object, Object> data) {
this.srcDir = Objects.requireNonNull(srcDir, "srcDir is null");
this.srcFilter = srcFilter;
this.destDir = Objects.requireNonNull(destDir, "destDir is null");
this.destFilter = destFilter;
this.genSrcDir = genSrcDir;
this.data = data;
}
@Override
public Path getDir() {
return srcDir;
}
@Override
public PathTree getSourceTree() {
return Files.exists(srcDir) ? new DirectoryPathTree(srcDir, srcFilter) : EmptyPathTree.getInstance();
}
@Override
public Path getOutputDir() {
return destDir;
}
@Override
public Path getAptSourcesDir() {
return genSrcDir;
}
@Override
public PathTree getOutputTree() {
return Files.exists(destDir) ? new DirectoryPathTree(destDir, destFilter) : EmptyPathTree.getInstance();
}
public <T> T getValue(Object key, Class<T> type) {
final Object o = data.get(key);
return o == null ? null : type.cast(o);
}
@Serial
private void writeObject(java.io.ObjectOutputStream out) throws IOException {
out.writeUTF(srcDir.toAbsolutePath().toString());
out.writeObject(srcFilter);
out.writeUTF(destDir.toAbsolutePath().toString());
out.writeObject(destFilter);
out.writeUTF(genSrcDir == null ? "null" : genSrcDir.toAbsolutePath().toString());
out.writeObject(data);
}
@Serial
private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException {
srcDir = Path.of(in.readUTF());
srcFilter = (PathFilter) in.readObject();
destDir = Path.of(in.readUTF());
destFilter = (PathFilter) in.readObject();
final String genSrcStr = in.readUTF();
if (!"null".equals(genSrcStr)) {
genSrcDir = Path.of(genSrcStr);
}
data = (Map<Object, Object>) in.readObject();
}
}
| LazySourceDir |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/inject/annotation/EvaluatedConvertibleValuesMap.java | {
"start": 1348,
"end": 3211
} | class ____<V> implements ConvertibleValues<V> {
private final ExpressionEvaluationContext evaluationContext;
private final ConvertibleValues<V> delegateValues;
EvaluatedConvertibleValuesMap(ExpressionEvaluationContext evaluationContext,
ConvertibleValues<V> delegateValues) {
this.evaluationContext = evaluationContext;
this.delegateValues = delegateValues;
}
@Override
public Set<String> names() {
return delegateValues.names();
}
@Override
public <T> Optional<T> get(CharSequence name,
ArgumentConversionContext<T> conversionContext) {
V value = delegateValues.getValue(name);
if (value instanceof EvaluatedExpression expression) {
if (EvaluatedExpression.class.isAssignableFrom(conversionContext.getArgument().getClass())) {
return Optional.of((T) value);
}
Object evaluationResult = expression.evaluate(evaluationContext);
if (evaluationResult == null || conversionContext.getArgument().isAssignableFrom(evaluationResult.getClass())) {
return Optional.ofNullable((T) evaluationResult);
}
return ConversionService.SHARED.convert(evaluationResult, conversionContext);
} else {
return delegateValues.get(name, conversionContext);
}
}
@SuppressWarnings("unchecked")
@Override
public Collection<V> values() {
return delegateValues.values().stream().map(v -> {
if (v instanceof EvaluatedExpression expression) {
Object evaluationResult = expression.evaluate(evaluationContext);
return (V) evaluationResult;
}
return v;
}).collect(Collectors.toList());
}
}
| EvaluatedConvertibleValuesMap |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java | {
"start": 24905,
"end": 28301
} | class ____ extends DataBlock {
private int bytesWritten;
private final File bufferFile;
private final int limit;
private BufferedOutputStream out;
private final AtomicBoolean closed = new AtomicBoolean(false);
DiskBlock(File bufferFile,
int limit,
long index,
BlockUploadStatistics statistics)
throws FileNotFoundException {
super(index, statistics);
this.limit = limit;
this.bufferFile = bufferFile;
blockAllocated();
out = new BufferedOutputStream(new FileOutputStream(bufferFile));
}
@Override public int dataSize() {
return bytesWritten;
}
@Override
boolean hasCapacity(long bytes) {
return dataSize() + bytes <= limit;
}
@Override public int remainingCapacity() {
return limit - bytesWritten;
}
@Override
public int write(byte[] b, int offset, int len) throws IOException {
super.write(b, offset, len);
int written = Math.min(remainingCapacity(), len);
out.write(b, offset, written);
bytesWritten += written;
return written;
}
@Override
public BlockUploadData startUpload() throws IOException {
super.startUpload();
try {
out.flush();
} finally {
out.close();
out = null;
}
return new BlockUploadData(bufferFile);
}
/**
* The close operation will delete the destination file if it still
* exists.
*
* @throws IOException IO problems
*/
@SuppressWarnings("UnnecessaryDefault")
@Override
protected void innerClose() throws IOException {
final DestState state = getState();
LOG.debug("Closing {}", this);
switch (state) {
case Writing:
if (bufferFile.exists()) {
// file was not uploaded
LOG.debug("Block[{}]: Deleting buffer file as upload did not start",
getIndex());
closeBlock();
}
break;
case Upload:
LOG.debug("Block[{}]: Buffer file {} exists —close upload stream",
getIndex(), bufferFile);
break;
case Closed:
closeBlock();
break;
default:
// this state can never be reached, but checkstyle complains, so
// it is here.
}
}
/**
* Flush operation will flush to disk.
*
* @throws IOException IOE raised on FileOutputStream
*/
@Override public void flush() throws IOException {
super.flush();
out.flush();
}
@Override
public String toString() {
String sb = "FileBlock{"
+ "index=" + getIndex()
+ ", destFile=" + bufferFile +
", state=" + getState() +
", dataSize=" + dataSize() +
", limit=" + limit +
'}';
return sb;
}
/**
* Close the block.
* This will delete the block's buffer file if the block has
* not previously been closed.
*/
void closeBlock() {
LOG.debug("block[{}]: closeBlock()", getIndex());
if (!closed.getAndSet(true)) {
blockReleased();
if (!bufferFile.delete() && bufferFile.exists()) {
LOG.warn("delete({}) returned false",
bufferFile.getAbsoluteFile());
}
} else {
LOG.debug("block[{}]: skipping re-entrant closeBlock()", getIndex());
}
}
}
}
| DiskBlock |
java | FasterXML__jackson-core | src/main/java/tools/jackson/core/StreamWriteCapability.java | {
"start": 445,
"end": 1710
} | enum ____
implements JacksonFeature
{
/**
* Capability that indicates that the data format is able to express binary
* data natively, without using textual encoding like Base64.
*<p>
* Capability is currently enabled for all binary formats and none of textual
* formats.
*/
CAN_WRITE_BINARY_NATIVELY(false),
/**
* Capability that indicates that the data format is able to write
* "formatted numbers": that is, output of numbers is done as Strings
* and caller is allowed to pass in logical number values as Strings.
*<p>
* Capability is currently enabled for most textual formats and none of binary
* formats.
*/
CAN_WRITE_FORMATTED_NUMBERS(false)
;
/**
* Whether feature is enabled or disabled by default.
*/
private final boolean _defaultState;
private final int _mask;
private StreamWriteCapability(boolean defaultState) {
_defaultState = defaultState;
_mask = (1 << ordinal());
}
@Override
public boolean enabledByDefault() { return _defaultState; }
@Override
public boolean enabledIn(int flags) { return (flags & _mask) != 0; }
@Override
public int getMask() { return _mask; }
}
| StreamWriteCapability |
java | netty__netty | handler/src/test/java/io/netty/handler/ssl/JdkDelegatingPrivateKeyMethodTest.java | {
"start": 24877,
"end": 25446
} | class ____ extends ChannelInboundHandlerAdapter {
static final ChannelInboundHandler INSTANCE = new ServerHandler();
private ServerHandler() {
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
// Echo the message back and close
ctx.writeAndFlush(msg).addListener(future -> {
ctx.close();
});
}
}
private static boolean isSignatureDelegationSupported() {
return OpenSsl.isBoringSSL() || OpenSsl.isAWSLC();
}
}
| ServerHandler |
java | quarkusio__quarkus | test-framework/junit5-component/src/test/java/io/quarkus/test/component/beans/Bravo.java | {
"start": 141,
"end": 257
} | class ____ {
@Inject
Charlie charlie;
public String ping() {
return charlie.ping();
}
}
| Bravo |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/jackson/FactorGrantedAuthorityMixinTests.java | {
"start": 1050,
"end": 2109
} | class ____ extends AbstractMixinTests {
// @formatter:off
public static final String AUTHORITY_JSON = "{\"@class\": \"org.springframework.security.core.authority.FactorGrantedAuthority\", \"authority\": \"FACTOR_PASSWORD\", \"issuedAt\": 1759177143.043000000 }";
private Instant issuedAt = Instant.ofEpochMilli(1759177143043L);
// @formatter:on
@Test
void serializeSimpleGrantedAuthorityTest() throws JSONException {
GrantedAuthority authority = FactorGrantedAuthority.withAuthority("FACTOR_PASSWORD")
.issuedAt(this.issuedAt)
.build();
String serializeJson = this.mapper.writeValueAsString(authority);
JSONAssert.assertEquals(AUTHORITY_JSON, serializeJson, true);
}
@Test
void deserializeGrantedAuthorityTest() {
FactorGrantedAuthority authority = (FactorGrantedAuthority) this.mapper.readValue(AUTHORITY_JSON, Object.class);
assertThat(authority).isNotNull();
assertThat(authority.getAuthority()).isEqualTo("FACTOR_PASSWORD");
assertThat(authority.getIssuedAt()).isEqualTo(this.issuedAt);
}
}
| FactorGrantedAuthorityMixinTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/fetch/OneToOneOwnerByContainedEagerCyclesTest.java | {
"start": 3728,
"end": 4219
} | class ____ {
@Id
private Integer id;
@OneToOne
private Containing containing;
private String text;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Containing getContaining() {
return containing;
}
public void setContaining(Containing containing) {
this.containing = containing;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
}
}
| Contained |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java | {
"start": 7360,
"end": 8554
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory unit;
private final EvalOperator.ExpressionEvaluator.Factory startTimestamp;
private final EvalOperator.ExpressionEvaluator.Factory endTimestamp;
private final ZoneId zoneId;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory unit,
EvalOperator.ExpressionEvaluator.Factory startTimestamp,
EvalOperator.ExpressionEvaluator.Factory endTimestamp, ZoneId zoneId) {
this.source = source;
this.unit = unit;
this.startTimestamp = startTimestamp;
this.endTimestamp = endTimestamp;
this.zoneId = zoneId;
}
@Override
public DateDiffMillisEvaluator get(DriverContext context) {
return new DateDiffMillisEvaluator(source, unit.get(context), startTimestamp.get(context), endTimestamp.get(context), zoneId, context);
}
@Override
public String toString() {
return "DateDiffMillisEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + ", zoneId=" + zoneId + "]";
}
}
}
| Factory |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/util/OptionConverter.java | {
"start": 1339,
"end": 1408
} | class ____ convert property values to specific types.
*/
public final | to |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/transport/TripleTailHandler.java | {
"start": 1132,
"end": 1405
} | class ____ extends ChannelInboundHandlerAdapter {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (msg instanceof ReferenceCounted) {
ReferenceCountUtil.release(msg);
}
}
}
| TripleTailHandler |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/SalesforceEndpointConfig.java | {
"start": 1910,
"end": 9965
} | class ____ implements Cloneable {
// default API version
public static final String DEFAULT_VERSION = "56.0";
// general parameter
public static final String API_VERSION = "apiVersion";
// parameters for Rest API
public static final String FORMAT = "format";
public static final String RAW_PAYLOAD = "rawPayload";
public static final String SOBJECT_NAME = "sObjectName";
public static final String SOBJECT_ID = "sObjectId";
public static final String SOBJECT_IDS = "sObjectIds";
public static final String SOBJECT_FIELDS = "sObjectFields";
public static final String SOBJECT_EXT_ID_NAME = "sObjectIdName";
public static final String SOBJECT_EXT_ID_VALUE = "sObjectIdValue";
public static final String SOBJECT_BLOB_FIELD_NAME = "sObjectBlobFieldName";
public static final String SOBJECT_CLASS = "sObjectClass";
public static final String SOBJECT_QUERY = "sObjectQuery";
public static final String STREAM_QUERY_RESULT = "streamQueryResult";
public static final String SOBJECT_SEARCH = "sObjectSearch";
public static final String APEX_METHOD = "apexMethod";
public static final String APEX_URL = "apexUrl";
public static final String COMPOSITE_METHOD = "compositeMethod";
public static final String LIMIT = "limit";
public static final String ALL_OR_NONE = "allOrNone";
public static final String EVENT_NAME = "eventName";
public static final String EVENT_SCHEMA_ID = "eventSchemaId";
public static final String EVENT_SCHEMA_FORMAT = "eventSchemaFormat";
// prefix for parameters in headers
public static final String APEX_QUERY_PARAM_PREFIX = "apexQueryParam.";
// parameters for Bulk API
public static final String CONTENT_TYPE = "contentType";
public static final String JOB_ID = "jobId";
public static final String BATCH_ID = "batchId";
public static final String RESULT_ID = "resultId";
public static final String QUERY_LOCATOR = "queryLocator";
public static final String LOCATOR = "locator";
public static final String MAX_RECORDS = "maxRecords";
public static final String PK_CHUNKING = "pkChunking";
public static final String PK_CHUNKING_CHUNK_SIZE = "pkChunkingChunkSize";
public static final String PK_CHUNKING_PARENT = "pkChunkingParent";
public static final String PK_CHUNKING_START_ROW = "pkChunkingStartRow";
// parameters for Analytics API
public static final String REPORT_ID = "reportId";
public static final String INCLUDE_DETAILS = "includeDetails";
public static final String REPORT_METADATA = "reportMetadata";
public static final String INSTANCE_ID = "instanceId";
// parameters for Streaming API
public static final String DEFAULT_REPLAY_ID = "defaultReplayId";
public static final String FALL_BACK_REPLAY_ID = "fallBackReplayId";
public static final String INITIAL_REPLAY_ID_MAP = "initialReplayIdMap";
public static final long REPLAY_FROM_TIP = -1L;
// parameters for Pub/Sub API
public static final String REPLAY_PRESET = "replayPreset";
public static final String PUB_SUB_DESERIALIZE_TYPE = "pubSubDeserializeType";
public static final String PUB_SUB_POJO_CLASS = "pubSubPojoClass";
// parameters for Approval API
public static final String APPROVAL = "approval";
// parameters for the RAW operation
public static final String RAW_PATH = "rawPath";
public static final String RAW_METHOD = "rawMethod";
public static final String RAW_QUERY_PARAMETERS = "rawQueryParameters";
public static final String RAW_HTTP_HEADERS = "rawHttpHeaders";
// default maximum authentication retries on failed authentication or
// expired session
public static final int DEFAULT_MAX_AUTHENTICATION_RETRIES = 4;
// default increment and limit for Streaming connection restart attempts
public static final long DEFAULT_BACKOFF_INCREMENT = 1000L;
public static final long DEFAULT_MAX_BACKOFF = 30000L;
public static final String NOT_FOUND_BEHAVIOUR = "notFoundBehaviour";
public static final String FALL_BACK_TO_LATEST_REPLAY_ID = "fallbackToLatestReplayId";
// general properties
@UriParam(defaultValue = DEFAULT_VERSION)
private String apiVersion = DEFAULT_VERSION;
// Rest API properties
@UriParam
private PayloadFormat format = PayloadFormat.JSON;
@UriParam
private boolean rawPayload;
@UriParam(displayName = "SObject Name")
private String sObjectName;
@UriParam(displayName = "SObject Id")
private String sObjectId;
@UriParam(displayName = "SObject Fields")
private String sObjectFields;
@UriParam(displayName = "SObject Id Name")
private String sObjectIdName;
@UriParam(displayName = "SObject Id Value")
private String sObjectIdValue;
@UriParam(displayName = "SObject Blob Field Name")
private String sObjectBlobFieldName;
@UriParam(displayName = "SObject Class")
private String sObjectClass;
@UriParam(displayName = "SObject Query")
private String sObjectQuery;
@UriParam(displayName = "Stream query result", defaultValue = "false")
private Boolean streamQueryResult = false;
@UriParam(displayName = "SObject Search")
private String sObjectSearch;
@UriParam
private String apexMethod;
@UriParam(displayName = "Event Name", label = "producer")
private String eventName;
@UriParam(displayName = "Event Schema Format", label = "producer")
private EventSchemaFormatEnum eventSchemaFormat;
@UriParam(displayName = "Event Schema Id", label = "producer")
private String eventSchemaId;
@UriParam(label = "producer")
private String compositeMethod;
@UriParam(label = "producer", defaultValue = "false", description = "Composite API option to indicate" +
" to rollback all records if any are not successful.")
private boolean allOrNone;
@UriParam(label = "producer")
private String apexUrl;
@UriParam
private Map<String, Object> apexQueryParams;
// Bulk API properties
@UriParam
private ContentType contentType;
@UriParam
private String jobId;
@UriParam
private String batchId;
@UriParam
private String resultId;
@UriParam
private String queryLocator;
@UriParam
private String locator;
@UriParam(javaType = "java.lang.Integer")
private Integer maxRecords;
@UriParam
private Boolean pkChunking;
@UriParam
private Integer pkChunkingChunkSize;
@UriParam
private String pkChunkingParent;
@UriParam
private String pkChunkingStartRow;
// Streaming API properties
@UriParam
private boolean updateTopic;
@UriParam
private NotifyForFieldsEnum notifyForFields;
@UriParam
private NotifyForOperationsEnum notifyForOperations;
@UriParam
private Boolean notifyForOperationCreate;
@UriParam
private Boolean notifyForOperationUpdate;
@UriParam
private Boolean notifyForOperationDelete;
@UriParam
private Boolean notifyForOperationUndelete;
// Pub/Sub API properties
@UriParam(label = "consumer", defaultValue = "100",
description = "Max number of events to receive in a batch from the Pub/Sub API.")
private int pubSubBatchSize = 100;
@UriParam(label = "consumer", defaultValue = "AVRO",
description = "How to deserialize events consume from the Pub/Sub API. AVRO will try a " +
"SpecificRecord subclass if found, otherwise GenericRecord.",
enums = "AVRO,SPECIFIC_RECORD,GENERIC_RECORD,POJO,JSON")
private PubSubDeserializeType pubSubDeserializeType = PubSubDeserializeType.AVRO;
@UriParam(label = "consumer", description = "Replay preset for Pub/Sub API.", defaultValue = "LATEST",
enums = "LATEST,EARLIEST,CUSTOM")
private ReplayPreset replayPreset = ReplayPreset.LATEST;
@UriParam(label = "consumer", description = "Fully qualified | SalesforceEndpointConfig |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservableDematerializeTest.java | {
"start": 1150,
"end": 8695
} | class ____ extends RxJavaTest {
@Test
public void simpleSelector() {
Observable<Notification<Integer>> notifications = Observable.just(1, 2).materialize();
Observable<Integer> dematerialize = notifications.dematerialize(Functions.<Notification<Integer>>identity());
Observer<Integer> observer = TestHelper.mockObserver();
dematerialize.subscribe(observer);
verify(observer, times(1)).onNext(1);
verify(observer, times(1)).onNext(2);
verify(observer, times(1)).onComplete();
verify(observer, never()).onError(any(Throwable.class));
}
@Test
public void selectorCrash() {
Observable.just(1, 2)
.materialize()
.dematerialize(new Function<Notification<Integer>, Notification<Object>>() {
@Override
public Notification<Object> apply(Notification<Integer> v) throws Exception {
throw new TestException();
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void selectorNull() {
Observable.just(1, 2)
.materialize()
.dematerialize(new Function<Notification<Integer>, Notification<Object>>() {
@Override
public Notification<Object> apply(Notification<Integer> v) throws Exception {
return null;
}
})
.test()
.assertFailure(NullPointerException.class);
}
@Test
public void dematerialize1() {
Observable<Notification<Integer>> notifications = Observable.just(1, 2).materialize();
Observable<Integer> dematerialize = notifications.dematerialize(Functions.<Notification<Integer>>identity());
Observer<Integer> observer = TestHelper.mockObserver();
dematerialize.subscribe(observer);
verify(observer, times(1)).onNext(1);
verify(observer, times(1)).onNext(2);
verify(observer, times(1)).onComplete();
verify(observer, never()).onError(any(Throwable.class));
}
@Test
public void dematerialize2() {
Throwable exception = new Throwable("test");
Observable<Integer> o = Observable.error(exception);
Observable<Integer> dematerialize = o.materialize().dematerialize(Functions.<Notification<Integer>>identity());
Observer<Integer> observer = TestHelper.mockObserver();
dematerialize.subscribe(observer);
verify(observer, times(1)).onError(exception);
verify(observer, times(0)).onComplete();
verify(observer, times(0)).onNext(any(Integer.class));
}
@Test
public void dematerialize3() {
Exception exception = new Exception("test");
Observable<Integer> o = Observable.error(exception);
Observable<Integer> dematerialize = o.materialize().dematerialize(Functions.<Notification<Integer>>identity());
Observer<Integer> observer = TestHelper.mockObserver();
dematerialize.subscribe(observer);
verify(observer, times(1)).onError(exception);
verify(observer, times(0)).onComplete();
verify(observer, times(0)).onNext(any(Integer.class));
}
@Test
public void errorPassThru() {
Exception exception = new Exception("test");
Observable<Notification<Integer>> o = Observable.error(exception);
Observable<Integer> dematerialize = o.dematerialize(Functions.<Notification<Integer>>identity());
Observer<Integer> observer = TestHelper.mockObserver();
dematerialize.subscribe(observer);
verify(observer, times(1)).onError(exception);
verify(observer, times(0)).onComplete();
verify(observer, times(0)).onNext(any(Integer.class));
}
@Test
public void completePassThru() {
Observable<Notification<Integer>> o = Observable.empty();
Observable<Integer> dematerialize = o.dematerialize(Functions.<Notification<Integer>>identity());
Observer<Integer> observer = TestHelper.mockObserver();
TestObserverEx<Integer> to = new TestObserverEx<>(observer);
dematerialize.subscribe(to);
System.out.println(to.errors());
verify(observer, never()).onError(any(Throwable.class));
verify(observer, times(1)).onComplete();
verify(observer, times(0)).onNext(any(Integer.class));
}
@Test
public void honorsContractWhenCompleted() {
Observable<Integer> source = Observable.just(1);
Observable<Integer> result = source.materialize().dematerialize(Functions.<Notification<Integer>>identity());
Observer<Integer> o = TestHelper.mockObserver();
result.subscribe(o);
verify(o).onNext(1);
verify(o).onComplete();
verify(o, never()).onError(any(Throwable.class));
}
@Test
public void honorsContractWhenThrows() {
Observable<Integer> source = Observable.error(new TestException());
Observable<Integer> result = source.materialize().dematerialize(Functions.<Notification<Integer>>identity());
Observer<Integer> o = TestHelper.mockObserver();
result.subscribe(o);
verify(o, never()).onNext(any(Integer.class));
verify(o, never()).onComplete();
verify(o).onError(any(TestException.class));
}
@Test
public void dispose() {
TestHelper.checkDisposed(Observable.just(Notification.<Integer>createOnComplete()).dematerialize(Functions.<Notification<Integer>>identity()));
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeObservable(new Function<Observable<Notification<Object>>, ObservableSource<Object>>() {
@Override
public ObservableSource<Object> apply(Observable<Notification<Object>> o) throws Exception {
return o.dematerialize(Functions.<Notification<Object>>identity());
}
});
}
@Test
public void eventsAfterDematerializedTerminal() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
new Observable<Notification<Object>>() {
@Override
protected void subscribeActual(Observer<? super Notification<Object>> observer) {
observer.onSubscribe(Disposable.empty());
observer.onNext(Notification.createOnComplete());
observer.onNext(Notification.<Object>createOnNext(1));
observer.onNext(Notification.createOnError(new TestException("First")));
observer.onError(new TestException("Second"));
}
}
.dematerialize(Functions.<Notification<Object>>identity())
.test()
.assertResult();
TestHelper.assertUndeliverable(errors, 0, TestException.class, "First");
TestHelper.assertUndeliverable(errors, 1, TestException.class, "Second");
} finally {
RxJavaPlugins.reset();
}
}
@Test
@SuppressWarnings("unchecked")
public void nonNotificationInstanceAfterDispose() {
new Observable<Object>() {
@Override
protected void subscribeActual(Observer<? super Object> observer) {
observer.onSubscribe(Disposable.empty());
observer.onNext(Notification.createOnComplete());
observer.onNext(1);
}
}
.dematerialize(v -> (Notification<Object>)v)
.test()
.assertResult();
}
}
| ObservableDematerializeTest |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/aot/TestClassScannerTests.java | {
"start": 1780,
"end": 5893
} | class ____ extends AbstractAotTests {
@Test
void scanBasicTestClasses() {
assertThat(scan("org.springframework.test.context.aot.samples.basic"))
.containsExactlyInAnyOrder(
BasicSpringJupiterImportedConfigTests.class,
BasicSpringJupiterSharedConfigTests.class,
BasicSpringJupiterTests.class,
BasicSpringJupiterTests.NestedTests.class,
BasicSpringJupiterParameterizedClassTests.class,
AbstractSpringJupiterParameterizedClassTests.InheritedNestedTests.class,
AbstractSpringJupiterParameterizedClassTests.InheritedNestedTests.InheritedDoublyNestedTests.class,
BasicSpringJupiterParameterizedClassTests.NestedTests.class,
BasicSpringJupiterParameterizedClassTests.NestedTests.DoublyNestedTests.class,
BasicSpringTestNGTests.class,
BasicSpringVintageTests.class,
DisabledInAotProcessingTests.class,
DisabledInAotRuntimeClassLevelTests.class,
DisabledInAotRuntimeMethodLevelTests.class
);
}
@Test
void scanTestSuitesForJupiter() {
assertThat(scan("org.springframework.test.context.aot.samples.suites.jupiter"))
.containsExactlyInAnyOrder(
BasicSpringJupiterImportedConfigTests.class,
BasicSpringJupiterSharedConfigTests.class,
BasicSpringJupiterTests.class,
BasicSpringJupiterTests.NestedTests.class,
BasicSpringJupiterParameterizedClassTests.class,
AbstractSpringJupiterParameterizedClassTests.InheritedNestedTests.class,
AbstractSpringJupiterParameterizedClassTests.InheritedNestedTests.InheritedDoublyNestedTests.class,
BasicSpringJupiterParameterizedClassTests.NestedTests.class,
BasicSpringJupiterParameterizedClassTests.NestedTests.DoublyNestedTests.class,
DisabledInAotProcessingTests.class,
DisabledInAotRuntimeClassLevelTests.class,
DisabledInAotRuntimeMethodLevelTests.class
);
}
@Test
void scanTestSuitesForVintage() {
assertThat(scan("org.springframework.test.context.aot.samples.suites.vintage"))
.containsExactly(BasicSpringVintageTests.class);
}
@Test
void scanTestSuitesForTestNG() {
assertThat(scan("org.springframework.test.context.aot.samples.suites.testng"))
.containsExactly(BasicSpringTestNGTests.class);
}
@Test
void scanTestSuitesForAllTestEngines() {
assertThat(scan("org.springframework.test.context.aot.samples.suites.all"))
.containsExactlyInAnyOrder(
BasicSpringJupiterImportedConfigTests.class,
BasicSpringJupiterSharedConfigTests.class,
BasicSpringJupiterTests.class,
BasicSpringJupiterTests.NestedTests.class,
BasicSpringJupiterParameterizedClassTests.class,
AbstractSpringJupiterParameterizedClassTests.InheritedNestedTests.class,
AbstractSpringJupiterParameterizedClassTests.InheritedNestedTests.InheritedDoublyNestedTests.class,
BasicSpringJupiterParameterizedClassTests.NestedTests.class,
BasicSpringJupiterParameterizedClassTests.NestedTests.DoublyNestedTests.class,
BasicSpringVintageTests.class,
BasicSpringTestNGTests.class,
DisabledInAotProcessingTests.class,
DisabledInAotRuntimeClassLevelTests.class,
DisabledInAotRuntimeMethodLevelTests.class
);
}
@Test
void scanTestSuitesWithNestedSuites() {
assertThat(scan("org.springframework.test.context.aot.samples.suites.nested"))
.containsExactlyInAnyOrder(
BasicSpringJupiterImportedConfigTests.class,
BasicSpringJupiterSharedConfigTests.class,
BasicSpringJupiterTests.class,
BasicSpringJupiterTests.NestedTests.class,
BasicSpringJupiterParameterizedClassTests.class,
AbstractSpringJupiterParameterizedClassTests.InheritedNestedTests.class,
AbstractSpringJupiterParameterizedClassTests.InheritedNestedTests.InheritedDoublyNestedTests.class,
BasicSpringJupiterParameterizedClassTests.NestedTests.class,
BasicSpringJupiterParameterizedClassTests.NestedTests.DoublyNestedTests.class,
BasicSpringVintageTests.class,
DisabledInAotProcessingTests.class,
DisabledInAotRuntimeClassLevelTests.class,
DisabledInAotRuntimeMethodLevelTests.class
);
}
@Test
void scanEntireSpringTestModule() {
assertThat(scan()).hasSizeGreaterThan(400);
}
}
| TestClassScannerTests |
java | quarkusio__quarkus | extensions/smallrye-graphql/deployment/src/test/java/io/quarkus/smallrye/graphql/deployment/GraphQLTerminateContextTest.java | {
"start": 693,
"end": 1897
} | class ____ extends AbstractGraphQLTest {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((JavaArchive jar) -> jar
.addClasses(TestTerminateContextResource.class)
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml"));
@Test
public void testWhoAmI() {
runTestWith("user1");
runTestWith("user2");
runTestWith("user2");
}
public void runTestWith(String expectedUser) {
String fooRequest = getPayload("{\n" +
" whoami {\n" +
" name\n" +
" }\n" +
"}");
RestAssured.given().when()
.accept(MEDIATYPE_JSON)
.contentType(MEDIATYPE_JSON)
.body(fooRequest)
.with().header("X-Test", expectedUser)
.post("/graphql")
.then()
.assertThat()
.statusCode(200)
.and()
.log().body().and()
.body("data.whoami.name", Matchers.equalTo(expectedUser));
}
@GraphQLApi
public static | GraphQLTerminateContextTest |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/samples/client/standalone/FrameworkExtensionTests.java | {
"start": 4506,
"end": 4993
} | class ____ extends MockMvcConfigurerAdapter {
@Override
public void afterConfigurerAdded(ConfigurableMockMvcBuilder<?> builder) {
builder.alwaysExpect(status().isOk());
}
@Override
public RequestPostProcessor beforeMockMvcCreated(
ConfigurableMockMvcBuilder<?> builder, WebApplicationContext context) {
return request -> {
request.setUserPrincipal(mock());
return request;
};
}
}
@Controller
@RequestMapping("/")
private static | TestMockMvcConfigurer |
java | apache__camel | components/camel-xpath/src/main/java/org/apache/camel/language/xpath/ThreadSafeNodeList.java | {
"start": 1389,
"end": 3018
} | class ____ implements NodeList {
private final List<Node> list = new ArrayList<>();
public ThreadSafeNodeList(NodeList source) throws Exception {
init(source);
}
@Override
public Node item(int index) {
return list.get(index);
}
@Override
public int getLength() {
return list.size();
}
private void init(NodeList source) throws Exception {
for (int i = 0; i < source.getLength(); i++) {
Node node = source.item(i);
if (node != null) {
// import node must not occur concurrent on the same node (must be its owner)
// so we need to synchronize on it
synchronized (node.getOwnerDocument()) {
Document doc = new XMLConverterHelper().createDocument();
// import node must not occur concurrent on the same node (must be its owner)
// so we need to synchronize on it
synchronized (node.getOwnerDocument()) {
Node clone = doc.importNode(node, true);
if (clone instanceof Text) {
// basic text node then add as-is
list.add(clone);
} else {
// more complex node, then add as child (yes its a bit weird but this is working)
doc.appendChild(clone);
list.add(doc.getChildNodes().item(0));
}
}
}
}
}
}
}
| ThreadSafeNodeList |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/operators/CachedMatchTaskTest.java | {
"start": 2120,
"end": 18020
} | class ____ extends DriverTestBase<FlatJoinFunction<Record, Record, Record>> {
private static final long HASH_MEM = 6 * 1024 * 1024;
private static final long SORT_MEM = 3 * 1024 * 1024;
@SuppressWarnings("unchecked")
private final RecordComparator comparator1 =
new RecordComparator(
new int[] {0}, (Class<? extends Value>[]) new Class[] {IntValue.class});
@SuppressWarnings("unchecked")
private final RecordComparator comparator2 =
new RecordComparator(
new int[] {0}, (Class<? extends Value>[]) new Class[] {IntValue.class});
private final List<Record> outList = new ArrayList<Record>();
public CachedMatchTaskTest(ExecutionConfig config) {
super(config, HASH_MEM, 2, SORT_MEM);
}
@TestTemplate
void testHash1MatchTask() {
int keyCnt1 = 20;
int valCnt1 = 1;
int keyCnt2 = 10;
int valCnt2 = 2;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED);
getTaskConfig().setRelativeMemoryDriver(1.0f);
BuildFirstCachedJoinDriver<Record, Record, Record> testTask =
new BuildFirstCachedJoinDriver<Record, Record, Record>();
try {
testResettableDriver(testTask, MockMatchStub.class, 3);
} catch (Exception e) {
e.printStackTrace();
fail("Test caused an exception.");
}
final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
assertThat(this.outList).withFailMessage("Wrong result set size.").hasSize(expCnt);
this.outList.clear();
}
@TestTemplate
void testHash2MatchTask() {
int keyCnt1 = 20;
int valCnt1 = 1;
int keyCnt2 = 20;
int valCnt2 = 1;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED);
getTaskConfig().setRelativeMemoryDriver(1.0f);
BuildSecondCachedJoinDriver<Record, Record, Record> testTask =
new BuildSecondCachedJoinDriver<Record, Record, Record>();
try {
testResettableDriver(testTask, MockMatchStub.class, 3);
} catch (Exception e) {
e.printStackTrace();
fail("Test caused an exception.");
}
final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
assertThat(this.outList).withFailMessage("Wrong result set size.").hasSize(expCnt);
this.outList.clear();
}
@TestTemplate
void testHash3MatchTask() {
int keyCnt1 = 20;
int valCnt1 = 1;
int keyCnt2 = 20;
int valCnt2 = 20;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED);
getTaskConfig().setRelativeMemoryDriver(1.0f);
BuildFirstCachedJoinDriver<Record, Record, Record> testTask =
new BuildFirstCachedJoinDriver<Record, Record, Record>();
try {
testResettableDriver(testTask, MockMatchStub.class, 3);
} catch (Exception e) {
e.printStackTrace();
fail("Test caused an exception.");
}
final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
assertThat(this.outList).withFailMessage("Wrong result set size.").hasSize(expCnt);
this.outList.clear();
}
@TestTemplate
void testHash4MatchTask() {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 1;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED);
getTaskConfig().setRelativeMemoryDriver(1.0f);
BuildSecondCachedJoinDriver<Record, Record, Record> testTask =
new BuildSecondCachedJoinDriver<Record, Record, Record>();
try {
testResettableDriver(testTask, MockMatchStub.class, 3);
} catch (Exception e) {
e.printStackTrace();
fail("Test caused an exception.");
}
final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
assertThat(this.outList).withFailMessage("Wrong result set size.").hasSize(expCnt);
this.outList.clear();
}
@TestTemplate
void testHash5MatchTask() {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(this.outList);
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED);
getTaskConfig().setRelativeMemoryDriver(1.0f);
BuildFirstCachedJoinDriver<Record, Record, Record> testTask =
new BuildFirstCachedJoinDriver<Record, Record, Record>();
try {
testResettableDriver(testTask, MockMatchStub.class, 3);
} catch (Exception e) {
e.printStackTrace();
fail("Test caused an exception.");
}
final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2);
assertThat(this.outList).withFailMessage("Wrong result set size.").hasSize(expCnt);
this.outList.clear();
}
@TestTemplate
void testFailingHashFirstMatchTask() {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED);
getTaskConfig().setRelativeMemoryDriver(1.0f);
BuildFirstCachedJoinDriver<Record, Record, Record> testTask =
new BuildFirstCachedJoinDriver<Record, Record, Record>();
assertThatThrownBy(() -> testResettableDriver(testTask, MockFailingMatchStub.class, 3))
.isInstanceOf(ExpectedTestException.class);
}
@TestTemplate
void testFailingHashSecondMatchTask() {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false));
addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED);
getTaskConfig().setRelativeMemoryDriver(1.0f);
BuildSecondCachedJoinDriver<Record, Record, Record> testTask =
new BuildSecondCachedJoinDriver<Record, Record, Record>();
assertThatThrownBy(() -> testResettableDriver(testTask, MockFailingMatchStub.class, 3))
.isInstanceOf(ExpectedTestException.class);
}
@TestTemplate
void testCancelHashMatchTaskWhileBuildFirst() {
int keyCnt = 20;
int valCnt = 20;
addInput(new DelayingInfinitiveInputIterator(100));
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED);
getTaskConfig().setRelativeMemoryDriver(1.0f);
final BuildFirstCachedJoinDriver<Record, Record, Record> testTask =
new BuildFirstCachedJoinDriver<Record, Record, Record>();
final AtomicBoolean success = new AtomicBoolean(false);
Thread taskRunner =
new Thread() {
@Override
public void run() {
try {
testDriver(testTask, MockFailingMatchStub.class);
success.set(true);
} catch (Exception ie) {
ie.printStackTrace();
}
}
};
taskRunner.start();
TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this);
tct.start();
try {
tct.join();
taskRunner.join();
} catch (InterruptedException ie) {
fail("Joining threads failed");
}
assertThat(success)
.withFailMessage("Test threw an exception even though it was properly canceled.")
.isTrue();
}
@TestTemplate
void testHashCancelMatchTaskWhileBuildSecond() {
int keyCnt = 20;
int valCnt = 20;
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
addInput(new DelayingInfinitiveInputIterator(100));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED);
getTaskConfig().setRelativeMemoryDriver(1.0f);
final BuildSecondCachedJoinDriver<Record, Record, Record> testTask =
new BuildSecondCachedJoinDriver<Record, Record, Record>();
final AtomicBoolean success = new AtomicBoolean(false);
Thread taskRunner =
new Thread() {
@Override
public void run() {
try {
testDriver(testTask, MockMatchStub.class);
success.set(true);
} catch (Exception ie) {
ie.printStackTrace();
}
}
};
taskRunner.start();
TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this);
tct.start();
try {
tct.join();
taskRunner.join();
} catch (InterruptedException ie) {
fail("Joining threads failed");
}
assertThat(success)
.withFailMessage("Test threw an exception even though it was properly canceled.")
.isTrue();
}
@TestTemplate
void testHashFirstCancelMatchTaskWhileMatching() {
int keyCnt = 20;
int valCnt = 20;
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST);
getTaskConfig().setRelativeMemoryDriver(1.0f);
final BuildFirstCachedJoinDriver<Record, Record, Record> testTask =
new BuildFirstCachedJoinDriver<Record, Record, Record>();
final AtomicBoolean success = new AtomicBoolean(false);
Thread taskRunner =
new Thread() {
@Override
public void run() {
try {
testDriver(testTask, MockMatchStub.class);
success.set(true);
} catch (Exception ie) {
ie.printStackTrace();
}
}
};
taskRunner.start();
TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this);
tct.start();
try {
tct.join();
taskRunner.join();
} catch (InterruptedException ie) {
fail("Joining threads failed");
}
assertThat(success)
.withFailMessage("Test threw an exception even though it was properly canceled.")
.isTrue();
}
@TestTemplate
void testHashSecondCancelMatchTaskWhileMatching() {
int keyCnt = 20;
int valCnt = 20;
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
addInput(new UniformRecordGenerator(keyCnt, valCnt, false));
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get());
setOutput(new NirvanaOutputList());
getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND);
getTaskConfig().setRelativeMemoryDriver(1.0f);
final BuildSecondCachedJoinDriver<Record, Record, Record> testTask =
new BuildSecondCachedJoinDriver<Record, Record, Record>();
final AtomicBoolean success = new AtomicBoolean(false);
Thread taskRunner =
new Thread() {
@Override
public void run() {
try {
testDriver(testTask, MockMatchStub.class);
success.set(true);
} catch (Exception ie) {
ie.printStackTrace();
}
}
};
taskRunner.start();
TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this);
tct.start();
try {
tct.join();
taskRunner.join();
} catch (InterruptedException ie) {
fail("Joining threads failed");
}
assertThat(success)
.withFailMessage("Test threw an exception even though it was properly canceled.")
.isTrue();
}
// =================================================================================================
public static final | CachedMatchTaskTest |
java | elastic__elasticsearch | x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java | {
"start": 959,
"end": 3235
} | class ____ extends EsqlSpecTestCase {
@ClassRule
public static ElasticsearchCluster cluster = Clusters.mixedVersionCluster();
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
static final Version bwcVersion = Version.fromString(
System.getProperty("tests.old_cluster_version") != null
? System.getProperty("tests.old_cluster_version").replace("-SNAPSHOT", "")
: null
);
private static TestFeatureService oldClusterTestFeatureService = null;
@Before
public void extractOldClusterFeatures() {
if (oldClusterTestFeatureService == null) {
oldClusterTestFeatureService = testFeatureService;
}
}
@AfterClass
public static void cleanUp() {
oldClusterTestFeatureService = null;
}
public MixedClusterEsqlSpecIT(
String fileName,
String groupName,
String testName,
Integer lineNumber,
CsvTestCase testCase,
String instructions
) {
super(fileName, groupName, testName, lineNumber, testCase, instructions);
}
@Override
protected void shouldSkipTest(String testName) throws IOException {
super.shouldSkipTest(testName);
assumeTrue("Test " + testName + " is skipped on " + bwcVersion, isEnabled(testName, instructions, bwcVersion));
}
@Override
protected boolean enableRoundingDoubleValuesOnAsserting() {
return true;
}
@Override
protected boolean supportsInferenceTestService() {
return false;
}
@Override
protected boolean supportsIndexModeLookup() {
return hasCapabilities(adminClient(), List.of(JOIN_LOOKUP_V12.capabilityName()));
}
@Override
protected boolean supportsSourceFieldMapping() {
return false;
}
@Override
protected boolean deduplicateExactWarnings() {
/*
* In ESQL's main tests we shouldn't have to deduplicate but in
* serverless, where we reuse this test case exactly with *slightly*
* different configuration, we must deduplicate. So we do it here.
* It's a bit of a loss of precision, but that's ok.
*/
return true;
}
}
| MixedClusterEsqlSpecIT |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/bytecode/enhancement/ClassSelector.java | {
"start": 234,
"end": 486
} | class ____ implements EnhancementSelector {
private final String className;
public ClassSelector(String className) {
this.className = className;
}
@Override
public boolean select(String name) {
return name.equals( className );
}
}
| ClassSelector |
java | spring-projects__spring-boot | integration-test/spring-boot-test-integration-tests/src/test/java/org/springframework/boot/web/server/test/SpringBootTestWebEnvironmentDefinedPortTests.java | {
"start": 1363,
"end": 1558
} | class ____ extends AbstractSpringBootTestWebServerWebEnvironmentTests {
@Configuration(proxyBeanMethods = false)
@EnableWebMvc
@RestController
static | SpringBootTestWebEnvironmentDefinedPortTests |
java | apache__camel | components/camel-as2/camel-as2-component/src/test/java/org/apache/camel/component/as2/AS2ServerSecUnsignedUnencryptedIT.java | {
"start": 1467,
"end": 2693
} | class ____ extends AS2ServerSecTestBase {
// verify message types that fail decryption when encrypted with an invalid cert
@ParameterizedTest
@EnumSource(value = AS2MessageStructure.class,
names = {
"ENCRYPTED", "SIGNED_ENCRYPTED", "ENCRYPTED_COMPRESSED", "ENCRYPTED_COMPRESSED_SIGNED",
"ENCRYPTED_SIGNED_COMPRESSED" })
public void cannotDecryptFailureTest(AS2MessageStructure messageStructure) throws Exception {
HttpCoreContext context = sendWithInvalidEncryption(messageStructure);
verifyOkResponse(context);
verifyMdnErrorDisposition(context, AS2DispositionModifier.ERROR_DECRYPTION_FAILED);
}
// verify message types that are successfully processed
@ParameterizedTest
@EnumSource(value = AS2MessageStructure.class,
names = { "PLAIN", "SIGNED", "PLAIN_COMPRESSED", "COMPRESSED_SIGNED", "SIGNED_COMPRESSED" })
public void successfullyProcessedTest(AS2MessageStructure messageStructure) throws Exception {
HttpCoreContext context = send(messageStructure);
verifyOkResponse(context);
verifyMdnSuccessDisposition(context);
}
}
| AS2ServerSecUnsignedUnencryptedIT |
java | apache__camel | components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/integration/FtpBrowsableEndpointIT.java | {
"start": 1561,
"end": 5456
} | class ____ extends FtpServerTestSupport {
private Path browseDir;
private String getFtpUrl() {
return "ftp://admin@localhost:{{ftp.server.port}}/browse?password=admin";
}
@BeforeEach
public void createDir() {
browseDir = service.getFtpRootDir().resolve("browse");
deleteDirectory(browseDir);
createDirectory(browseDir);
}
@Test
public void testBrowsableNoFiles() {
BrowsableEndpoint browse = context.getEndpoint(getFtpUrl(), BrowsableEndpoint.class);
assertNotNull(browse);
List<Exchange> list = browse.getExchanges();
assertNotNull(list);
assertEquals(0, list.size());
}
@Test
public void testBrowsableOneFile() {
template.sendBodyAndHeader(getFtpUrl(), "A", Exchange.FILE_NAME, "a.txt");
FtpEndpoint<?> endpoint = context.getEndpoint(getFtpUrl(), FtpEndpoint.class);
assertNotNull(endpoint);
MemoryIdempotentRepository repo = (MemoryIdempotentRepository) endpoint.getInProgressRepository();
assertEquals(0, repo.getCacheSize());
List<Exchange> list = endpoint.getExchanges();
assertNotNull(list);
assertEquals(1, list.size());
assertEquals("a.txt", list.get(0).getIn().getHeader(Exchange.FILE_NAME));
// the in progress repo should not leak
assertEquals(0, repo.getCacheSize());
// and the file is still there
assertFileExists(browseDir.resolve("a.txt"));
}
@Test
public void testBrowsableTwoFiles() {
template.sendBodyAndHeader(getFtpUrl(), "A", Exchange.FILE_NAME, "a.txt");
template.sendBodyAndHeader(getFtpUrl(), "B", Exchange.FILE_NAME, "b.txt");
FtpEndpoint<?> endpoint = context.getEndpoint(getFtpUrl() + "&sortBy=file:name", FtpEndpoint.class);
assertNotNull(endpoint);
MemoryIdempotentRepository repo = (MemoryIdempotentRepository) endpoint.getInProgressRepository();
assertEquals(0, repo.getCacheSize());
List<Exchange> list = endpoint.getExchanges();
assertNotNull(list);
assertEquals(2, list.size());
assertEquals("a.txt", list.get(0).getIn().getHeader(Exchange.FILE_NAME));
assertEquals("b.txt", list.get(1).getIn().getHeader(Exchange.FILE_NAME));
// the in progress repo should not leak
assertEquals(0, repo.getCacheSize());
// and the files is still there
assertFileExists(browseDir.resolve("a.txt"));
assertFileExists(browseDir.resolve("b.txt"));
}
@Test
public void testBrowsableThreeFilesRecursive() {
template.sendBodyAndHeader(getFtpUrl(), "A", Exchange.FILE_NAME, "a.txt");
template.sendBodyAndHeader(getFtpUrl(), "B", Exchange.FILE_NAME, "foo/b.txt");
template.sendBodyAndHeader(getFtpUrl(), "C", Exchange.FILE_NAME, "bar/c.txt");
FtpEndpoint<?> endpoint = context.getEndpoint(getFtpUrl() + "&recursive=true&sortBy=file:name", FtpEndpoint.class);
assertNotNull(endpoint);
MemoryIdempotentRepository repo = (MemoryIdempotentRepository) endpoint.getInProgressRepository();
assertEquals(0, repo.getCacheSize());
List<Exchange> list = endpoint.getExchanges();
assertNotNull(list);
assertEquals(3, list.size());
assertEquals("a.txt", list.get(0).getIn().getHeader(Exchange.FILE_NAME));
assertEquals("c.txt", list.get(1).getIn().getHeader(Exchange.FILE_NAME_ONLY));
assertEquals("b.txt", list.get(2).getIn().getHeader(Exchange.FILE_NAME_ONLY));
// the in progress repo should not leak
assertEquals(0, repo.getCacheSize());
// and the files is still there
assertFileExists(browseDir.resolve("a.txt"));
assertFileExists(browseDir.resolve("foo/b.txt"));
assertFileExists(browseDir.resolve("bar/c.txt"));
}
}
| FtpBrowsableEndpointIT |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/extraction/DataTypeExtractor.java | {
"start": 4217,
"end": 20317
} | class ____ {
private static final Set<Class<?>> INTERNAL_DATA_STRUCTURES = new HashSet<>();
static {
INTERNAL_DATA_STRUCTURES.add(RowData.class);
INTERNAL_DATA_STRUCTURES.add(StringData.class);
INTERNAL_DATA_STRUCTURES.add(TimestampData.class);
INTERNAL_DATA_STRUCTURES.add(DecimalData.class);
INTERNAL_DATA_STRUCTURES.add(ArrayData.class);
INTERNAL_DATA_STRUCTURES.add(MapData.class);
INTERNAL_DATA_STRUCTURES.add(RawValueData.class);
}
private final DataTypeFactory typeFactory;
private final String contextExplanation;
private DataTypeExtractor(DataTypeFactory typeFactory, String contextExplanation) {
this.typeFactory = typeFactory;
this.contextExplanation = contextExplanation;
}
// --------------------------------------------------------------------------------------------
// Methods that extract a data type from a JVM Type without any prior information
// --------------------------------------------------------------------------------------------
/** Extracts a data type from a type without considering surrounding classes or templates. */
public static DataType extractFromType(DataTypeFactory typeFactory, Type type) {
return extractDataTypeWithClassContext(
typeFactory, DataTypeTemplate.fromDefaults(), null, type, "");
}
/** Extracts a data type from a type without considering surrounding classes but templates. */
static DataType extractFromType(
DataTypeFactory typeFactory, DataTypeTemplate template, Type type) {
return extractDataTypeWithClassContext(typeFactory, template, null, type, "");
}
/**
* Extracts a data type from a type variable at {@code genericPos} of {@code baseClass} using
* the information of the most specific type {@code contextType}.
*/
public static DataType extractFromGeneric(
DataTypeFactory typeFactory, Class<?> baseClass, int genericPos, Type contextType) {
final TypeVariable<?> variable = baseClass.getTypeParameters()[genericPos];
return extractDataTypeWithClassContext(
typeFactory,
DataTypeTemplate.fromDefaults(),
contextType,
variable,
String.format(
" in generic class '%s' in %s",
baseClass.getName(), contextType.toString()));
}
/**
* Extracts a data type from a method parameter by considering surrounding classes and parameter
* annotation.
*/
public static DataType extractFromMethodParameter(
DataTypeFactory typeFactory, Class<?> baseClass, Method method, int paramPos) {
final Parameter parameter = method.getParameters()[paramPos];
final DataTypeHint hint = parameter.getAnnotation(DataTypeHint.class);
final ArgumentHint argumentHint = parameter.getAnnotation(ArgumentHint.class);
final StateHint stateHint = parameter.getAnnotation(StateHint.class);
final DataTypeTemplate template;
if (stateHint != null) {
template = DataTypeTemplate.fromAnnotation(typeFactory, stateHint.type());
} else if (argumentHint != null) {
template = DataTypeTemplate.fromAnnotation(typeFactory, argumentHint.type());
} else if (hint != null) {
template = DataTypeTemplate.fromAnnotation(typeFactory, hint);
} else {
template = DataTypeTemplate.fromDefaults();
}
return extractDataTypeWithClassContext(
typeFactory,
template,
baseClass,
parameter.getParameterizedType(),
String.format(
" in parameter %d of method '%s' in class '%s'",
paramPos, method.getName(), baseClass.getName()));
}
/**
* Extracts a data type from a method parameter by considering surrounding classes and parameter
* annotation. This version assumes that the parameter is a generic type, and uses the generic
* position type as the extracted data type. For example, if the parameter is a
* CompletableFuture<Long> and genericPos is 0, it will extract Long.
*/
public static DataType extractFromGenericMethodParameter(
DataTypeFactory typeFactory,
Class<?> baseClass,
Method method,
int paramPos,
int genericPos) {
Type parameterType = method.getGenericParameterTypes()[paramPos];
parameterType = resolveVariableWithClassContext(baseClass, parameterType);
if (!(parameterType instanceof ParameterizedType)) {
throw extractionError(
"The method '%s' needs generic parameters for the %d arg.",
method.getName(), paramPos);
}
final Type genericParameterType =
((ParameterizedType) parameterType).getActualTypeArguments()[genericPos];
final Parameter parameter = method.getParameters()[paramPos];
final DataTypeHint hint = parameter.getAnnotation(DataTypeHint.class);
final DataTypeTemplate template;
if (hint != null) {
template = DataTypeTemplate.fromAnnotation(typeFactory, hint);
} else {
template = DataTypeTemplate.fromDefaults();
}
return extractDataTypeWithClassContext(
typeFactory,
template,
baseClass,
genericParameterType,
String.format(
" in generic parameter %d of method '%s' in class '%s'",
paramPos, method.getName(), baseClass.getName()));
}
/**
* Extracts a data type from a method return type by considering surrounding classes and method
* annotation.
*/
public static DataType extractFromMethodReturnType(
DataTypeFactory typeFactory, Class<?> baseClass, Method method) {
return extractFromMethodReturnType(
typeFactory, baseClass, method, method.getGenericReturnType());
}
/**
* Extracts a data type from a method return type with specifying the method's type explicitly
* by considering surrounding classes and method annotation.
*/
public static DataType extractFromMethodReturnType(
DataTypeFactory typeFactory, Class<?> baseClass, Method method, Type methodReturnType) {
final DataTypeHint hint = method.getAnnotation(DataTypeHint.class);
final DataTypeTemplate template;
if (hint != null) {
template = DataTypeTemplate.fromAnnotation(typeFactory, hint);
} else {
template = DataTypeTemplate.fromDefaults();
}
return extractDataTypeWithClassContext(
typeFactory,
template,
baseClass,
methodReturnType,
String.format(
" in return type of method '%s' in class '%s'",
method.getName(), baseClass.getName()));
}
// --------------------------------------------------------------------------------------------
// Methods that extract a data type from a JVM Class with prior logical information
// --------------------------------------------------------------------------------------------
public static DataType extractFromStructuredClass(
DataTypeFactory typeFactory, Class<?> implementationClass) {
final DataType dataType =
extractDataTypeWithClassContext(
typeFactory,
DataTypeTemplate.fromDefaults(),
implementationClass.getEnclosingClass(),
implementationClass,
"");
if (!dataType.getLogicalType().is(LogicalTypeRoot.STRUCTURED_TYPE)) {
throw extractionError(
"Structured data type expected for class '%s' but was: %s",
implementationClass.getName(), dataType);
}
return dataType;
}
// --------------------------------------------------------------------------------------------
// Supporting methods
// --------------------------------------------------------------------------------------------
private static DataType extractDataTypeWithClassContext(
DataTypeFactory typeFactory,
DataTypeTemplate outerTemplate,
@Nullable Type contextType,
Type type,
String contextExplanation) {
final DataTypeExtractor extractor = new DataTypeExtractor(typeFactory, contextExplanation);
final List<Type> typeHierarchy;
if (contextType != null) {
typeHierarchy = collectTypeHierarchy(contextType);
} else {
typeHierarchy = Collections.emptyList();
}
return extractor.extractDataTypeOrRaw(outerTemplate, typeHierarchy, type);
}
private DataType extractDataTypeOrRaw(
DataTypeTemplate outerTemplate, List<Type> typeHierarchy, Type type) {
// best effort resolution of type variables, the resolved type can still be a variable
final Type resolvedType;
if (type instanceof TypeVariable) {
resolvedType = resolveVariable(typeHierarchy, (TypeVariable<?>) type);
} else {
resolvedType = type;
}
// merge outer template with template of type itself
DataTypeTemplate template = outerTemplate;
final Class<?> clazz = toClass(resolvedType);
if (clazz != null) {
final DataTypeHint hint = clazz.getAnnotation(DataTypeHint.class);
final ArgumentHint argumentHint = clazz.getAnnotation(ArgumentHint.class);
if (hint != null) {
template = outerTemplate.mergeWithInnerAnnotation(typeFactory, hint);
} else if (argumentHint != null) {
template = outerTemplate.mergeWithInnerAnnotation(typeFactory, argumentHint.type());
}
}
// main work
DataType dataType = extractDataTypeOrRawWithTemplate(template, typeHierarchy, resolvedType);
// handle data views
dataType = handleDataViewHints(dataType, clazz);
// final work
return closestBridging(dataType, clazz);
}
private DataType extractDataTypeOrRawWithTemplate(
DataTypeTemplate template, List<Type> typeHierarchy, Type type) {
// template defines a data type
if (template.dataType != null) {
return template.dataType;
}
try {
return extractDataTypeOrError(template, typeHierarchy, type);
} catch (Throwable t) {
// ignore the exception and just treat it as RAW type
final Class<?> clazz = toClass(type);
if (template.isAllowRawGlobally() || template.isAllowAnyPattern(clazz)) {
return createRawType(typeFactory, template.rawSerializer, clazz);
}
// forward the root cause otherwise
throw extractionError(
t,
"Could not extract a data type from '%s'%s. "
+ "Please pass the required data type manually or allow RAW types.",
type.toString(),
contextExplanation);
}
}
private DataType extractDataTypeOrError(
DataTypeTemplate template, List<Type> typeHierarchy, Type type) {
// still a type variable
if (type instanceof TypeVariable) {
throw extractionError(
"Unresolved type variable '%s'. A data type cannot be extracted from a type variable. "
+ "The original content might have been erased due to Java type erasure.",
type.toString());
}
// ARRAY
DataType resultDataType = extractArrayType(template, typeHierarchy, type);
if (resultDataType != null) {
return resultDataType;
}
// skip extraction for enforced patterns early but after arrays
resultDataType = extractEnforcedRawType(template, type);
if (resultDataType != null) {
return resultDataType;
}
// early and helpful exception for common mistakes
checkForCommonErrors(type);
// PREDEFINED or DESCRIPTOR
resultDataType = extractPredefinedOrDescriptorType(template, type);
if (resultDataType != null) {
return resultDataType;
}
// MAP
resultDataType = extractMapType(template, typeHierarchy, type);
if (resultDataType != null) {
return resultDataType;
}
// AVRO
resultDataType = extractAvroType(type);
if (resultDataType != null) {
return resultDataType;
}
// try interpret the type as a STRUCTURED type
try {
return extractStructuredType(template, typeHierarchy, type);
} catch (Throwable t) {
throw extractionError(
t,
"Could not extract a data type from '%s'. "
+ "Interpreting it as a structured type was also not successful.",
type.toString());
}
}
private @Nullable DataType extractArrayType(
DataTypeTemplate template, List<Type> typeHierarchy, Type type) {
// prefer BYTES over ARRAY<TINYINT> for byte[]
if (type == byte[].class) {
return DataTypes.BYTES();
}
// for T[]
else if (type instanceof GenericArrayType) {
final GenericArrayType genericArray = (GenericArrayType) type;
return DataTypes.ARRAY(
extractDataTypeOrRaw(
template, typeHierarchy, genericArray.getGenericComponentType()));
}
final Class<?> clazz = toClass(type);
if (clazz == null) {
return null;
}
// for my.custom.Pojo[][]
if (clazz.isArray()) {
return DataTypes.ARRAY(
extractDataTypeOrRaw(template, typeHierarchy, clazz.getComponentType()));
}
// for List<T>
// we only allow List here (not a subclass) because we cannot guarantee more specific
// data structures after conversion
if (clazz != List.class) {
return null;
}
if (!(type instanceof ParameterizedType)) {
throw extractionError(
"The class '%s' needs generic parameters for an array type.",
List.class.getName());
}
final ParameterizedType parameterizedType = (ParameterizedType) type;
final DataType element =
extractDataTypeOrRaw(
template, typeHierarchy, parameterizedType.getActualTypeArguments()[0]);
return DataTypes.ARRAY(element).bridgedTo(List.class);
}
private @Nullable DataType extractEnforcedRawType(DataTypeTemplate template, Type type) {
final Class<?> clazz = toClass(type);
if (template.isForceAnyPattern(clazz)) {
return createRawType(typeFactory, template.rawSerializer, clazz);
}
return null;
}
private void checkForCommonErrors(Type type) {
final Class<?> clazz = toClass(type);
if (clazz == null) {
return;
}
if (clazz == Row.class) {
throw extractionError(
"Cannot extract a data type from a pure '%s' class. "
+ "Please use annotations to define field names and field types.",
Row.class.getName());
} else if (clazz == Object.class) {
throw extractionError(
"Cannot extract a data type from a pure '%s' class. "
+ "Usually, this indicates that | DataTypeExtractor |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/cfg/EnvironmentSettings.java | {
"start": 1548,
"end": 1726
} | class ____.
*
* @see org.hibernate.boot.registry.classloading.internal.TcclLookupPrecedence
*/
String TC_CLASSLOADER = "hibernate.classLoader.tccl_lookup_precedence";
}
| lookup |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnnecessarilyFullyQualifiedTest.java | {
"start": 5446,
"end": 5587
} | class ____ {}
""")
.expectUnchanged()
.addInputLines(
"Test.java",
"""
| Annotation |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/statement/MySqlShowRelayLogEventsStatement.java | {
"start": 837,
"end": 1701
} | class ____ extends MySqlStatementImpl implements MySqlShowStatement {
private SQLExpr logName;
private SQLExpr from;
private SQLLimit limit;
public SQLExpr getLogName() {
return logName;
}
public void setLogName(SQLExpr logName) {
this.logName = logName;
}
public SQLExpr getFrom() {
return from;
}
public void setFrom(SQLExpr from) {
this.from = from;
}
public SQLLimit getLimit() {
return limit;
}
public void setLimit(SQLLimit limit) {
this.limit = limit;
}
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, logName);
acceptChild(visitor, from);
acceptChild(visitor, limit);
}
visitor.endVisit(this);
}
}
| MySqlShowRelayLogEventsStatement |
java | google__guava | android/guava-tests/test/com/google/common/primitives/LongArrayAsListTest.java | {
"start": 3172,
"end": 3360
} | class ____ extends TestLongListGenerator {
@Override
protected List<Long> create(Long[] elements) {
return asList(elements);
}
}
public static final | LongsAsListGenerator |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/materializedtable/AlterMaterializedTableChangeOperation.java | {
"start": 1543,
"end": 4429
} | class ____ extends AlterMaterializedTableOperation {
private final List<MaterializedTableChange> tableChanges;
private final CatalogMaterializedTable catalogMaterializedTable;
public AlterMaterializedTableChangeOperation(
ObjectIdentifier tableIdentifier,
List<MaterializedTableChange> tableChanges,
CatalogMaterializedTable catalogMaterializedTable) {
super(tableIdentifier);
this.tableChanges = tableChanges;
this.catalogMaterializedTable = catalogMaterializedTable;
}
public List<MaterializedTableChange> getTableChanges() {
return tableChanges;
}
public CatalogMaterializedTable getCatalogMaterializedTable() {
return catalogMaterializedTable;
}
@Override
public TableResultInternal execute(Context ctx) {
ctx.getCatalogManager()
.alterTable(
getCatalogMaterializedTable(),
getTableChanges().stream()
.map(TableChange.class::cast)
.collect(Collectors.toList()),
getTableIdentifier(),
false);
return TableResultImpl.TABLE_RESULT_OK;
}
@Override
public String asSummaryString() {
String changes =
tableChanges.stream()
.map(AlterMaterializedTableChangeOperation::toString)
.collect(Collectors.joining(",\n"));
return String.format(
"ALTER MATERIALIZED TABLE %s\n%s", tableIdentifier.asSummaryString(), changes);
}
private static String toString(MaterializedTableChange tableChange) {
if (tableChange instanceof TableChange.ModifyRefreshStatus) {
TableChange.ModifyRefreshStatus refreshStatus =
(TableChange.ModifyRefreshStatus) tableChange;
return String.format(
" MODIFY REFRESH STATUS TO '%s'", refreshStatus.getRefreshStatus());
} else if (tableChange instanceof TableChange.ModifyRefreshHandler) {
TableChange.ModifyRefreshHandler refreshHandler =
(TableChange.ModifyRefreshHandler) tableChange;
return String.format(
" MODIFY REFRESH HANDLER DESCRIPTION TO '%s'",
refreshHandler.getRefreshHandlerDesc());
} else if (tableChange instanceof TableChange.ModifyDefinitionQuery) {
TableChange.ModifyDefinitionQuery definitionQuery =
(TableChange.ModifyDefinitionQuery) tableChange;
return String.format(
" MODIFY DEFINITION QUERY TO '%s'", definitionQuery.getDefinitionQuery());
} else {
return AlterTableChangeOperation.toString(tableChange);
}
}
}
| AlterMaterializedTableChangeOperation |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/event/EventUtils.java | {
"start": 1168,
"end": 1213
} | class ____ {
private static final | EventUtils |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/tasks/RawTaskStatus.java | {
"start": 1091,
"end": 2846
} | class ____ implements Task.Status {
public static final String NAME = "raw";
private final BytesReference status;
public RawTaskStatus(BytesReference status) {
this.status = requireNonNull(status, "status may not be null");
}
/**
* Read from a stream.
*/
public RawTaskStatus(StreamInput in) throws IOException {
status = in.readOptionalBytesReference();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalBytesReference(status);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
try (InputStream stream = status.streamInput()) {
return builder.rawValue(stream, XContentHelper.xContentType(status));
}
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public String toString() {
return Strings.toString(this);
}
/**
* Convert the from XContent to a Map for easy reading.
*/
public Map<String, Object> toMap() {
return convertToMap(status, false).v2();
}
// Implements equals and hashcode for testing
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != RawTaskStatus.class) {
return false;
}
RawTaskStatus other = (RawTaskStatus) obj;
// Totally not efficient, but ok for testing because it ignores order and spacing differences
return toMap().equals(other.toMap());
}
@Override
public int hashCode() {
// Totally not efficient, but ok for testing because consistent with equals
return toMap().hashCode();
}
}
| RawTaskStatus |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlSpMetadataAction.java | {
"start": 1192,
"end": 2476
} | class ____ extends SamlBaseRestHandler {
public RestSamlSpMetadataAction(Settings settings, XPackLicenseState licenseState) {
super(settings, licenseState);
}
@Override
public List<Route> routes() {
return List.of(new Route(GET, "/_security/saml/metadata/{realm}"));
}
@Override
public String getName() {
return "security_saml_metadata_action";
}
@Override
public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException {
final SamlSpMetadataRequest SamlSpMetadataRequest = new SamlSpMetadataRequest(request.param("realm"));
return channel -> client.execute(
SamlSpMetadataAction.INSTANCE,
SamlSpMetadataRequest,
new RestBuilderListener<SamlSpMetadataResponse>(channel) {
@Override
public RestResponse buildResponse(SamlSpMetadataResponse response, XContentBuilder builder) throws Exception {
builder.startObject();
builder.field("metadata", response.getXMLString());
builder.endObject();
return new RestResponse(RestStatus.OK, builder);
}
}
);
}
}
| RestSamlSpMetadataAction |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java | {
"start": 13737,
"end": 14019
} | class ____ extends DisruptedLinks {
public IsolateAllNodes(Set<String> nodes) {
super(nodes);
}
@Override
public boolean disrupt(String node1, String node2) {
return true;
}
}
/**
* Abstract | IsolateAllNodes |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/datasource/embedded/OutputStreamFactory.java | {
"start": 935,
"end": 1276
} | class ____ {
private OutputStreamFactory() {
}
/**
* Returns an {@link java.io.OutputStream} that ignores all data given to it.
*/
public static OutputStream getNoopOutputStream() {
return new OutputStream() {
@Override
public void write(int b) throws IOException {
// ignore the output
}
};
}
}
| OutputStreamFactory |
java | elastic__elasticsearch | x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/LegacyAdaptingPerFieldPostingsFormat.java | {
"start": 3563,
"end": 7839
} | class ____ extends FieldsProducer {
private final Map<String, FieldsProducer> fields = new TreeMap<>();
private final Map<String, FieldsProducer> formats = new HashMap<>();
private final String segment;
// clone for merge
FieldsReader(FieldsReader other) {
Map<FieldsProducer, FieldsProducer> oldToNew = new IdentityHashMap<>();
// First clone all formats
for (Map.Entry<String, FieldsProducer> ent : other.formats.entrySet()) {
FieldsProducer values = ent.getValue().getMergeInstance();
formats.put(ent.getKey(), values);
oldToNew.put(ent.getValue(), values);
}
// Then rebuild fields:
for (Map.Entry<String, FieldsProducer> ent : other.fields.entrySet()) {
FieldsProducer producer = oldToNew.get(ent.getValue());
assert producer != null;
fields.put(ent.getKey(), producer);
}
segment = other.segment;
}
FieldsReader(final SegmentReadState readState) throws IOException {
// Read _X.per and init each format:
boolean success = false;
try {
// Read field name -> format name
for (FieldInfo fi : readState.fieldInfos) {
if (fi.getIndexOptions() != IndexOptions.NONE) {
final String fieldName = fi.name;
final String formatName = fi.getAttribute(PER_FIELD_FORMAT_KEY);
if (formatName != null) {
// null formatName means the field is in fieldInfos, but has no postings!
final String suffix = fi.getAttribute(PER_FIELD_SUFFIX_KEY);
if (suffix == null) {
throw new IllegalStateException("missing attribute: " + PER_FIELD_SUFFIX_KEY + " for field: " + fieldName);
}
PostingsFormat format = getPostingsFormat(formatName);
String segmentSuffix = getSuffix(formatName, suffix);
if (formats.containsKey(segmentSuffix) == false) {
formats.put(segmentSuffix, format.fieldsProducer(new SegmentReadState(readState, segmentSuffix)));
}
fields.put(fieldName, formats.get(segmentSuffix));
}
}
}
success = true;
} finally {
if (success == false) {
IOUtils.closeWhileHandlingException(formats.values());
}
}
this.segment = readState.segmentInfo.name;
}
@Override
public Iterator<String> iterator() {
return Collections.unmodifiableSet(fields.keySet()).iterator();
}
@Override
public Terms terms(String field) throws IOException {
FieldsProducer fieldsProducer = fields.get(field);
return fieldsProducer == null ? null : fieldsProducer.terms(field);
}
@Override
public int size() {
return fields.size();
}
@Override
public void close() throws IOException {
IOUtils.close(formats.values());
}
@Override
public void checkIntegrity() throws IOException {
for (FieldsProducer producer : formats.values()) {
producer.checkIntegrity();
}
}
@Override
public FieldsProducer getMergeInstance() {
return new FieldsReader(this);
}
@Override
public String toString() {
return "PerFieldPostings(segment=" + segment + " formats=" + formats.size() + ")";
}
}
@Override
public FieldsConsumer fieldsConsumer(SegmentWriteState state) {
throw new IllegalStateException("This codec should only be used for reading, not writing");
}
@Override
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
return new FieldsReader(state);
}
}
| FieldsReader |
java | apache__flink | flink-connectors/flink-connector-files/src/test/java/org/apache/flink/connector/file/src/impl/FileRecordsTest.java | {
"start": 1294,
"end": 4564
} | class ____ {
@Test
void testEmptySplits() {
final String split = "empty";
final FileRecords<Object> records = FileRecords.finishedSplit(split);
assertThat(records.finishedSplits()).isEqualTo(Collections.singleton(split));
}
@Test
void testMoveToFirstSplit() {
final String splitId = "splitId";
final FileRecords<Object> records =
FileRecords.forRecords(splitId, new SingletonResultIterator<>());
final String firstSplitId = records.nextSplit();
assertThat(splitId).isEqualTo(firstSplitId);
}
@Test
void testMoveToSecondSplit() {
final FileRecords<Object> records =
FileRecords.forRecords("splitId", new SingletonResultIterator<>());
records.nextSplit();
final String secondSplitId = records.nextSplit();
assertThat(secondSplitId).isNull();
}
@Test
void testRecordsFromFirstSplit() {
final SingletonResultIterator<String> iter = new SingletonResultIterator<>();
iter.set("test", 18, 99);
final FileRecords<String> records = FileRecords.forRecords("splitId", iter);
records.nextSplit();
final RecordAndPosition<String> recAndPos = records.nextRecordFromSplit();
assertThat(recAndPos.getRecord()).isEqualTo("test");
assertThat(recAndPos.getOffset()).isEqualTo(18);
assertThat(recAndPos.getRecordSkipCount()).isEqualTo(99);
}
@Test
void testRecordsInitiallyIllegal() {
final FileRecords<Object> records =
FileRecords.forRecords("splitId", new SingletonResultIterator<>());
assertThatThrownBy(records::nextRecordFromSplit).isInstanceOf(IllegalStateException.class);
}
@Test
void testRecordsOnSecondSplitIllegal() {
final FileRecords<Object> records =
FileRecords.forRecords("splitId", new SingletonResultIterator<>());
records.nextSplit();
records.nextSplit();
assertThatThrownBy(records::nextRecordFromSplit).isInstanceOf(IllegalStateException.class);
}
@Test
void testRecycleExhaustedBatch() {
final AtomicBoolean recycled = new AtomicBoolean(false);
final SingletonResultIterator<Object> iter =
new SingletonResultIterator<>(() -> recycled.set(true));
iter.set(new Object(), 1L, 2L);
final FileRecords<Object> records = FileRecords.forRecords("test split", iter);
records.nextSplit();
records.nextRecordFromSplit();
// make sure we exhausted the iterator
assertThat(records.nextRecordFromSplit()).isNull();
assertThat(records.nextSplit()).isNull();
records.recycle();
assertThat(recycled.get()).isTrue();
}
@Test
void testRecycleNonExhaustedBatch() {
final AtomicBoolean recycled = new AtomicBoolean(false);
final SingletonResultIterator<Object> iter =
new SingletonResultIterator<>(() -> recycled.set(true));
iter.set(new Object(), 1L, 2L);
final FileRecords<Object> records = FileRecords.forRecords("test split", iter);
records.nextSplit();
records.recycle();
assertThat(recycled.get()).isTrue();
}
}
| FileRecordsTest |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/aot/AbstractAotContextLoader.java | {
"start": 1132,
"end": 1721
} | class ____ extends AbstractContextLoader implements AotContextLoader {
@Override
public final GenericApplicationContext loadContext(MergedContextConfiguration mergedConfig) {
return new StaticApplicationContext();
}
@Override
public final GenericApplicationContext loadContextForAotRuntime(MergedContextConfiguration mergedConfig,
ApplicationContextInitializer<ConfigurableApplicationContext> initializer) {
return loadContext(mergedConfig);
}
@Override
protected final String getResourceSuffix() {
throw new UnsupportedOperationException();
}
}
| AbstractAotContextLoader |
java | processing__processing4 | app/src/processing/app/contrib/ManagerFrame.java | {
"start": 1212,
"end": 6937
} | class ____ {
static final String ANY_CATEGORY = Language.text("contrib.all");
static final int AUTHOR_WIDTH = Toolkit.zoom(240);
static final int STATUS_WIDTH = Toolkit.zoom(66);
static final int VERSION_WIDTH = Toolkit.zoom(66);
static final String title = "Contribution Manager";
Base base;
JFrame frame;
ManagerTabs tabs;
ContributionTab librariesTab;
ContributionTab modesTab;
ContributionTab toolsTab;
ContributionTab examplesTab;
UpdateContributionTab updatesTab;
ContributionTab[] tabList;
public ManagerFrame(Base base) {
this.base = base;
librariesTab = new ContributionTab(this, ContributionType.LIBRARY);
modesTab = new ContributionTab(this, ContributionType.MODE);
toolsTab = new ContributionTab(this, ContributionType.TOOL);
examplesTab = new ContributionTab(this, ContributionType.EXAMPLES);
updatesTab = new UpdateContributionTab(this);
tabList = new ContributionTab[] {
librariesTab, modesTab, toolsTab, examplesTab, updatesTab
};
}
public void showFrame(ContributionType contributionType) {
ContributionTab showTab = getTab(contributionType);
if (frame == null) {
// Build the Contribution Manager UI on first use.
makeFrame();
// Update the list of contribs with what's installed locally.
ContributionListing.updateInstalled(base.getInstalledContribs());
// Set the list of categories on first use. If a new category is added
// from an already-installed contrib, or in the downloaded contribs list,
// it won't be included. Yech! But practically speaking… [fry 230114]
getTab(ContributionType.LIBRARY).updateCategoryChooser();
// TODO If it's the updates tab, need to reset the list. This is papering
// over a concurrency bug with adding/removing contribs during the
// initial load/startup, but probably always relevant. [fry 230115]
// if (showTab.contribType == null) {
for (ContributionTab tab : tabList) {
//tab.listPanel.model.fireTableDataChanged();
}
}
tabs.setPanel(showTab);
frame.setVisible(true);
// Avoid the search box taking focus and hiding the 'search' text
tabs.requestFocusInWindow();
}
private void makeFrame() {
frame = new JFrame(title);
frame.setMinimumSize(Toolkit.zoom(750, 500));
tabs = new ManagerTabs();
//rebuildTabLayouts(false, true);
// for (ContributionTab tab : tabList) {
// tab.rebuildLayout();
// }
tabs.addPanel(librariesTab, "Libraries");
tabs.addPanel(modesTab, "Modes");
tabs.addPanel(toolsTab, "Tools");
tabs.addPanel(examplesTab, "Examples");
tabs.addPanel(updatesTab, "Updates");
frame.setResizable(true);
frame.getContentPane().add(tabs);
updateTheme();
frame.validate();
frame.repaint();
Toolkit.setIcon(frame);
registerDisposeListeners();
frame.pack();
frame.setLocationRelativeTo(null);
}
protected void updateTheme() {
// don't update if the Frame doesn't actually exist yet
// https://github.com/processing/processing4/issues/476
if (frame != null) {
Color bgColor = Theme.getColor("manager.tab.background");
frame.getContentPane().setBackground(bgColor);
tabs.updateTheme();
for (ContributionTab tab : tabList) {
tab.updateTheme();
}
}
}
/**
* Close the window after an OK or Cancel.
*/
protected void disposeFrame() {
frame.dispose();
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
private void registerDisposeListeners() {
frame.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
disposeFrame();
}
});
// handle window closing commands for ctrl/cmd-W or hitting ESC.
Toolkit.registerWindowCloseKeys(frame.getRootPane(), actionEvent -> disposeFrame());
frame.getContentPane().addKeyListener(new KeyAdapter() {
public void keyPressed(KeyEvent e) {
//System.out.println(e);
KeyStroke wc = Toolkit.WINDOW_CLOSE_KEYSTROKE;
if ((e.getKeyCode() == KeyEvent.VK_ESCAPE)
|| (KeyStroke.getKeyStrokeForEvent(e).equals(wc))) {
disposeFrame();
}
}
});
}
/*
// TODO move this to ContributionTab (this is handled weirdly, period) [fry]
//void downloadAndUpdateContributionListing(Base base) {
void downloadAndUpdateContributionListing() {
//activeTab is required now but should be removed
//as there is only one instance of contribListing, and it should be present in this class
// final ContributionTab activeTab = getActiveTab();
ContributionTab activeTab = (ContributionTab) tabs.getPanel();
// activeTab.updateContributionListing();
ContributionListing.updateInstalled(base);
activeTab.updateCategoryChooser();
//rebuildTabLayouts(false, false);
//activeTab.rebuildLayout(false, false);
activeTab.rebuildLayout();
}
*/
/*
protected void rebuildTabLayouts(boolean error, boolean loading) {
for (ContributionTab tab : tabList) {
tab.rebuildLayout(error, loading);
}
}
*/
protected ContributionTab getTab(ContributionType contributionType) {
if (contributionType == ContributionType.LIBRARY) {
return librariesTab;
} else if (contributionType == ContributionType.MODE) {
return modesTab;
} else if (contributionType == ContributionType.TOOL) {
return toolsTab;
} else if (contributionType == ContributionType.EXAMPLES) {
return examplesTab;
}
return updatesTab;
}
// ContributionTab getActiveTab() {
// return (ContributionTab) tabs.getPanel();
// }
}
| ManagerFrame |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.