language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/enums/EnumSerializationTest.java | {
"start": 1237,
"end": 1420
} | enum ____ {
A2, B2, C2;
private AnnotatedTestEnum() { }
@Override public String toString() { return name().toLowerCase(); }
}
protected | AnnotatedTestEnum |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/support/SQLStateSQLExceptionTranslator.java | {
"start": 2470,
"end": 6270
} | class ____ extends AbstractFallbackSQLExceptionTranslator {
private static final Set<String> BAD_SQL_GRAMMAR_CODES = Set.of(
"07", // Dynamic SQL error
"21", // Cardinality violation
"2A", // Syntax error direct SQL
"37", // Syntax error dynamic SQL
"42", // General SQL syntax error
"65" // Oracle: unknown identifier
);
private static final Set<String> DATA_INTEGRITY_VIOLATION_CODES = Set.of(
"01", // Data truncation
"02", // No data found
"22", // Value out of range
"23", // Integrity constraint violation
"27", // Triggered data change violation
"44" // With check violation
);
private static final Set<String> PESSIMISTIC_LOCKING_FAILURE_CODES = Set.of(
"40", // Transaction rollback
"61" // Oracle: deadlock
);
private static final Set<String> DATA_ACCESS_RESOURCE_FAILURE_CODES = Set.of(
"08", // Connection exception
"53", // PostgreSQL: insufficient resources (for example, disk full)
"54", // PostgreSQL: program limit exceeded (for example, statement too complex)
"57", // DB2: out-of-memory exception / database not started
"58" // DB2: unexpected system error
);
private static final Set<String> TRANSIENT_DATA_ACCESS_RESOURCE_CODES = Set.of(
"JW", // Sybase: internal I/O error
"JZ", // Sybase: unexpected I/O error
"S1" // DB2: communication failure
);
private static final Set<Integer> DUPLICATE_KEY_ERROR_CODES = Set.of(
1, // Oracle
301, // SAP HANA
1062, // MySQL/MariaDB
2601, // MS SQL Server
2627, // MS SQL Server
-239, // Informix
-268 // Informix
);
@Override
protected @Nullable DataAccessException doTranslate(String task, @Nullable String sql, SQLException ex) {
SQLException sqlEx = ex;
String sqlState;
if (sqlEx instanceof BatchUpdateException) {
// Unwrap BatchUpdateException to expose contained exception
// with potentially more specific SQL state.
if (sqlEx.getNextException() != null) {
SQLException nestedSqlEx = sqlEx.getNextException();
if (nestedSqlEx.getSQLState() != null) {
sqlEx = nestedSqlEx;
}
}
sqlState = sqlEx.getSQLState();
}
else {
// Expose top-level exception but potentially use nested SQL state.
sqlState = getSqlState(sqlEx);
}
// The actual SQL state check...
if (sqlState != null && sqlState.length() >= 2) {
String classCode = sqlState.substring(0, 2);
if (logger.isDebugEnabled()) {
logger.debug("Extracted SQL state class '" + classCode + "' from value '" + sqlState + "'");
}
if (BAD_SQL_GRAMMAR_CODES.contains(classCode)) {
return new BadSqlGrammarException(task, (sql != null ? sql : ""), ex);
}
else if (DATA_INTEGRITY_VIOLATION_CODES.contains(classCode)) {
if (indicatesDuplicateKey(sqlState, sqlEx.getErrorCode())) {
return new DuplicateKeyException(buildMessage(task, sql, sqlEx), ex);
}
return new DataIntegrityViolationException(buildMessage(task, sql, sqlEx), ex);
}
else if (PESSIMISTIC_LOCKING_FAILURE_CODES.contains(classCode)) {
if (indicatesCannotAcquireLock(sqlState)) {
return new CannotAcquireLockException(buildMessage(task, sql, sqlEx), ex);
}
return new PessimisticLockingFailureException(buildMessage(task, sql, sqlEx), ex);
}
else if (DATA_ACCESS_RESOURCE_FAILURE_CODES.contains(classCode)) {
if (indicatesQueryTimeout(sqlState)) {
return new QueryTimeoutException(buildMessage(task, sql, sqlEx), ex);
}
return new DataAccessResourceFailureException(buildMessage(task, sql, sqlEx), ex);
}
else if (TRANSIENT_DATA_ACCESS_RESOURCE_CODES.contains(classCode)) {
return new TransientDataAccessResourceException(buildMessage(task, sql, sqlEx), ex);
}
}
// For MySQL: exception | SQLStateSQLExceptionTranslator |
java | dropwizard__dropwizard | dropwizard-jersey/src/test/java/io/dropwizard/jersey/DropwizardResourceConfigTest.java | {
"start": 13645,
"end": 13750
} | interface ____ {
@GET
String bar();
}
@Path("/")
public static | ResourceInterface |
java | spring-projects__spring-boot | module/spring-boot-webclient/src/test/java/org/springframework/boot/webclient/autoconfigure/service/ReactiveHttpServiceClientAutoConfigurationTests.java | {
"start": 10902,
"end": 11125
} | class ____ {
@Bean
WebClientCustomizer webClientCustomizer() {
return (builder) -> builder.defaultHeader("customized", "true");
}
}
@Configuration(proxyBeanMethods = false)
static | WebClientCustomizerConfiguration |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestObserverNode.java | {
"start": 24515,
"end": 24628
} | class ____ {
private long lastSeenStateId = -7;
private FileNotFoundException fnfe;
}
static | ClientState |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/RedisClientUnitTests.java | {
"start": 1032,
"end": 3282
} | class ____ {
@Mock
ClientResources clientResources;
@Mock(extraInterfaces = Closeable.class)
AsyncCloseable asyncCloseable;
@Test
void shutdownShouldDeferResourcesShutdown() throws Exception {
when(clientResources.eventExecutorGroup()).thenReturn(ImmediateEventExecutor.INSTANCE);
CompletableFuture<Void> completableFuture = new CompletableFuture<>();
when(asyncCloseable.closeAsync()).thenReturn(completableFuture);
RedisClient redisClient = RedisClient.create(clientResources, "redis://foo");
Field field = AbstractRedisClient.class.getDeclaredField("sharedResources");
field.setAccessible(true);
field.set(redisClient, false);
Set<AsyncCloseable> closeableResources = (Set) ReflectionTestUtils.getField(redisClient, "closeableResources");
closeableResources.add(asyncCloseable);
CompletableFuture<Void> future = redisClient.shutdownAsync();
verify(asyncCloseable).closeAsync();
verify(clientResources, never()).shutdown(anyLong(), anyLong(), any());
assertThat(future).isNotDone();
}
@Test
void shutdownShutsDownResourcesAfterChannels() throws Exception {
when(clientResources.eventExecutorGroup()).thenReturn(ImmediateEventExecutor.INSTANCE);
CompletableFuture<Void> completableFuture = new CompletableFuture<>();
when(asyncCloseable.closeAsync()).thenReturn(completableFuture);
RedisClient redisClient = RedisClient.create(clientResources, "redis://foo");
Field field = AbstractRedisClient.class.getDeclaredField("sharedResources");
field.setAccessible(true);
field.set(redisClient, false);
Set<AsyncCloseable> closeableResources = (Set) ReflectionTestUtils.getField(redisClient, "closeableResources");
closeableResources.add(asyncCloseable);
CompletableFuture<Void> future = redisClient.shutdownAsync();
verify(asyncCloseable).closeAsync();
verify(clientResources, never()).shutdown(anyLong(), anyLong(), any());
completableFuture.complete(null);
verify(clientResources).shutdown(anyLong(), anyLong(), any());
assertThat(future).isDone();
}
}
| RedisClientUnitTests |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java | {
"start": 1268,
"end": 1667
} | class ____ extends ActionType<StopDatafeedAction.Response> {
public static final StopDatafeedAction INSTANCE = new StopDatafeedAction();
public static final String NAME = "cluster:admin/xpack/ml/datafeed/stop";
public static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueMinutes(5);
private StopDatafeedAction() {
super(NAME);
}
public static | StopDatafeedAction |
java | spring-projects__spring-boot | module/spring-boot-activemq/src/test/java/org/springframework/boot/activemq/autoconfigure/ActiveMQAutoConfigurationTests.java | {
"start": 13930,
"end": 14349
} | class ____ {
@Bean
ActiveMQConnectionDetails activemqConnectionDetails() {
return new ActiveMQConnectionDetails() {
@Override
public String getBrokerUrl() {
return "tcp://localhost:12345";
}
@Override
public String getUser() {
return "springuser";
}
@Override
public String getPassword() {
return "spring";
}
};
}
}
}
| TestConnectionDetailsConfiguration |
java | apache__flink | flink-python/src/main/java/org/apache/flink/streaming/api/transformations/python/DelegateOperatorTransformation.java | {
"start": 2025,
"end": 3159
} | interface ____<OUT> {
SimpleOperatorFactory<OUT> getOperatorFactory();
static void configureOperator(
DelegateOperatorTransformation<?> transformation,
AbstractPythonFunctionOperator<?> operator) {
DelegateOperator<?> delegateOperator =
(DelegateOperator<?>) transformation.getOperatorFactory().getOperator();
operator.getConfiguration().addAll(delegateOperator.getConfiguration());
if (operator instanceof DataStreamPythonFunctionOperator) {
DataStreamPythonFunctionOperator<?> dataStreamOperator =
(DataStreamPythonFunctionOperator<?>) operator;
dataStreamOperator.addSideOutputTags(delegateOperator.getSideOutputTags());
if (delegateOperator.getNumPartitions() != null) {
dataStreamOperator.setNumPartitions(delegateOperator.getNumPartitions());
}
}
}
/**
* {@link DelegateOperator} holds configurations, e.g. {@link OutputTag}s, which will be applied
* to the actual python operator at translation stage.
*/
| DelegateOperatorTransformation |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/MDCRouteIdAwareTest.java | {
"start": 1232,
"end": 2069
} | class ____ extends ContextTestSupport {
@Test
public void testMDC() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World");
template.sendBodyAndHeader(fileUri(), "Hello World", Exchange.FILE_NAME, "hello.txt");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// enable MDC
context.setUseMDCLogging(true);
context.getRegistry().bind("myFilter", new MyFilter());
from(fileUri("?filter=#myFilter")).routeId("myRoute")
.to("mock:result");
}
};
}
private | MDCRouteIdAwareTest |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/function/ServerRequest.java | {
"start": 2316,
"end": 2418
} | interface ____ {
/**
* Get the HTTP method.
* @return the HTTP method as an HttpMethod | ServerRequest |
java | elastic__elasticsearch | modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpHeaderValidator.java | {
"start": 6901,
"end": 6977
} | enum ____ {
PASSING,
VALIDATING,
DROPPING
}
}
| State |
java | apache__rocketmq | proxy/src/main/java/org/apache/rocketmq/proxy/common/ProxyExceptionCode.java | {
"start": 852,
"end": 1048
} | enum ____ {
INVALID_BROKER_NAME,
TRANSACTION_DATA_NOT_FOUND,
FORBIDDEN,
MESSAGE_PROPERTY_CONFLICT_WITH_TYPE,
INVALID_RECEIPT_HANDLE,
INTERNAL_SERVER_ERROR,
}
| ProxyExceptionCode |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/FluxOnBackpressureBufferTimeoutTest.java | {
"start": 1411,
"end": 12336
} | class ____ implements Consumer<Object> {
final List<Object> evicted = Collections.synchronizedList(new ArrayList<>());
@Override
public void accept(Object t) {
evicted.add(t);
}
@Test
public void empty() {
StepVerifier.create(Flux.empty().onBackpressureBuffer(Duration.ofMinutes(1), Integer.MAX_VALUE, v -> {}))
.verifyComplete();
}
@Test
public void error() {
StepVerifier.create(Flux.error(new IOException())
.onBackpressureBuffer(Duration.ofMinutes(1), Integer.MAX_VALUE, v -> {}))
.verifyError(IOException.class);
}
@Test
public void errorDelayed() {
StepVerifier.create(Flux.just(1)
.concatWith(Flux.error(new IOException()))
.onBackpressureBuffer(Duration.ofMinutes(1), Integer.MAX_VALUE, v -> {}),
0)
.expectSubscription()
.expectNoEvent(Duration.ofMillis(50))
.thenRequest(1)
.expectNext(1)
.verifyError(IOException.class);
}
@Test
public void normal1() {
StepVerifier.create(Flux.range(1, 5)
.onBackpressureBuffer(Duration.ofMinutes(1), Integer.MAX_VALUE, v -> {}))
.expectNext(1, 2, 3, 4, 5)
.verifyComplete();
}
@Test
public void normal1SingleStep() {
StepVerifier.create(Flux.range(1, 5)
.onBackpressureBuffer(Duration.ofMinutes(1), Integer.MAX_VALUE, v -> {})
.limitRate(1))
.expectNext(1, 2, 3, 4, 5)
.verifyComplete();
}
@Test
public void normal2() {
StepVerifier.create(Flux.range(1, 5)
.onBackpressureBuffer(Duration.ofMinutes(1), Integer.MAX_VALUE, v -> {}, Schedulers.single()))
.expectNext(1, 2, 3, 4, 5)
.verifyComplete();
}
@Test
public void normal2SingleStep() {
StepVerifier.create(Flux.range(1, 5)
.onBackpressureBuffer(Duration.ofMinutes(1), Integer.MAX_VALUE, v -> {}, Schedulers.single())
.limitRate(1))
.expectNext(1, 2, 3, 4, 5)
.verifyComplete();
}
@Test
public void normal3() {
StepVerifier.create(Flux.range(1, 5)
.onBackpressureBuffer(Duration.ofMinutes(1), 10, this, Schedulers.single()))
.expectNext(1, 2, 3, 4, 5)
.verifyComplete();
}
@Test
public void normal3SingleStep() {
StepVerifier.create(Flux.range(1, 5)
.onBackpressureBuffer(Duration.ofMinutes(1), 10, this, Schedulers.single())
.limitRate(1))
.expectNext(1, 2, 3, 4, 5)
.verifyComplete();
}
@Test
public void normal4() {
StepVerifier.create(Flux.range(1, 5)
.onBackpressureBuffer(Duration.ofMinutes(1), Integer.MAX_VALUE, this, Schedulers.single()))
.expectNext(1, 2, 3, 4, 5)
.verifyComplete();
}
@Test
public void normal4SingleStep() {
StepVerifier.create(Flux.range(1, 5)
.onBackpressureBuffer(Duration.ofMinutes(1), Integer.MAX_VALUE, this, Schedulers.single())
.limitRate(1))
.expectNext(1, 2, 3, 4, 5)
.verifyComplete();
}
@Test
public void bufferLimit() {
StepVerifier.create(Flux.range(1, 5)
.onBackpressureBuffer(Duration.ofMinutes(1), 1, this, Schedulers.single()),
0)
.expectSubscription()
.expectNoEvent(Duration.ofMillis(100))
.thenRequest(1)
.expectNext(5)
.verifyComplete();
assertThat(evicted).containsExactly(1, 2, 3, 4);
}
@Test
public void timeoutLimit() {
TestPublisher<Integer> tp = TestPublisher.create();
StepVerifier.withVirtualTime(() ->
tp.flux().onBackpressureBuffer(Duration.ofSeconds(1), 1, this, VirtualTimeScheduler.get()), 0)
.expectSubscription()
.then(() -> tp.next(1))
.expectNoEvent(Duration.ofMillis(500))
.then(() -> tp.next(2))
.expectNoEvent(Duration.ofMillis(500))
.then(() -> tp.next(3))
.expectNoEvent(Duration.ofMillis(500))
.then(() -> tp.next(4))
.expectNoEvent(Duration.ofMillis(500))
.then(() -> tp.next(5))
.expectNoEvent(Duration.ofMillis(500))
.then(tp::complete)
.thenRequest(1)
.expectNext(5)
.verifyComplete();
assertThat(evicted).containsExactly(1, 2, 3, 4);
}
@Test
public void take() {
StepVerifier.create(Flux.range(1, 5)
.onBackpressureBuffer(Duration.ofMinutes(1), Integer.MAX_VALUE, v -> {})
.take(2, false))
.expectNext(1, 2)
.verifyComplete();
}
@Test
public void cancelEvictAll() {
StepVerifier.create(Flux.range(1, 5)
.log()
.onBackpressureBuffer(Duration.ofMinutes(1), Integer.MAX_VALUE, this,
Schedulers.single()),
0)
.thenAwait(Duration.ofMillis(100)) //small hiccup to cancel after the prefetch
.thenCancel()
.verify();
assertThat(evicted).containsExactly(1, 2, 3, 4, 5);
}
@Test
public void timeoutEvictAll() {
StepVerifier.withVirtualTime(() -> Flux.range(1, 5)
.onBackpressureBuffer(Duration.ofSeconds(1), Integer.MAX_VALUE, this, VirtualTimeScheduler.get()),
0)
.expectSubscription()
.expectNoEvent(Duration.ofSeconds(1))
.thenAwait(Duration.ofMinutes(1))
.thenRequest(1)
.verifyComplete();
assertThat(evicted).containsExactly(1, 2, 3, 4, 5);
}
@Test
public void evictCancels() {
AtomicReference<Subscription> subscription = new AtomicReference<>();
TestPublisher<Integer> tp = TestPublisher.create();
StepVerifier.withVirtualTime(() -> tp.flux()
.doOnSubscribe(subscription::set)
.onBackpressureBuffer(Duration.ofSeconds(1), 10, i -> {
evicted.add(i);
subscription.get().cancel();
}, VirtualTimeScheduler.get()),
0)
.then(() -> tp.emit(1, 2, 3, 4, 5))
.thenAwait(Duration.ofMinutes(1))
.verifyComplete();
tp.assertCancelled();
assertThat(evicted).containsExactly(1, 2, 3, 4, 5);
}
@Test
public void evictThrows() {
TestPublisher<Integer> tp = TestPublisher.create();
StepVerifier.withVirtualTime(() -> tp.flux()
.onBackpressureBuffer(Duration.ofSeconds(1), 10, i -> {
throw new IllegalStateException(i.toString());
}),
0)
.then(() -> tp.emit(1, 2, 3, 4, 5))
.thenAwait(Duration.ofMinutes(1))
.thenRequest(1)
.expectComplete()
.verifyThenAssertThat()
.hasDroppedErrors(5)
.hasDroppedErrorsSatisfying(c -> {
Iterator<Throwable> it = c.iterator();
for (int i = 1; it.hasNext(); i++) {
assertThat(it.next())
.isInstanceOf(IllegalStateException.class)
.hasMessage("" + i);
}
});
}
@Test
public void cancelAndRequest() {
List<Integer> seen = Collections.synchronizedList(new ArrayList<>());
Flux.range(1, 5)
.onBackpressureBuffer(Duration.ofMinutes(1), Integer.MAX_VALUE, this, Schedulers.single())
.subscribe(new BaseSubscriber<Integer>() {
@Override
protected void hookOnSubscribe(Subscription subscription) {
request(1);
}
@Override
protected void hookOnNext(Integer value) {
seen.add(value);
cancel();
}
});
assertThat(seen).containsExactly(1);
}
@Test
public void dropBySizeAndTimeout() {
TestPublisher<Integer> tp = TestPublisher.create();
StepVerifier.withVirtualTime(() -> tp.flux()
// Note: using sub-millis durations after gh-1734
.onBackpressureBuffer(Duration.ofNanos(600),
5, this).log(),
0)
.expectSubscription()
.then(() -> tp.next(1,2, 3, 4, 5, 6, 7)) //evict 2 elements
.then(() -> assertThat(evicted).containsExactly(1, 2))
.thenAwait(Duration.ofNanos(500))
.then(() -> tp.emit(8, 9, 10))
.thenAwait(Duration.ofNanos(100)) // evict elements older than 8
.then(() -> assertThat(evicted).containsExactly(1, 2, 3, 4, 5, 6, 7))
.thenRequest(10)
.expectNext(8, 9, 10)
.verifyComplete();
}
@Test
public void gh1194() {
StepVerifier.withVirtualTime(() ->
Flux.just("1", "not requested", "not requested")
.onBackpressureBuffer(Duration.ofSeconds(1), 3, s -> {}), 1)
.expectNext("1")
.thenAwait(Duration.ofSeconds(1))
.verifyComplete();
}
@Test
public void scanOperator() {
Scannable test = (Scannable) Flux.never().onBackpressureBuffer(Duration.ofSeconds(1), 123, v -> {}, Schedulers.single());
assertThat(test.scan(Scannable.Attr.RUN_ON)).isSameAs(Schedulers.single());
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.ASYNC);
}
@Test
public void scanSubscriber() {
CoreSubscriber<String> actual = new LambdaSubscriber<>(null, null, null, null);
BackpressureBufferTimeoutSubscriber<String> test = new BackpressureBufferTimeoutSubscriber<>(actual, Duration.ofSeconds(1), Schedulers.immediate(), 123, v -> {});
Subscription s = Operators.emptySubscription();
test.onSubscribe(s);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(s);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
assertThat(test.scan(Scannable.Attr.REQUESTED_FROM_DOWNSTREAM)).isEqualTo(Long.MAX_VALUE);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.offer("foo");
test.offer("bar");
assertThat(test.scan(Scannable.Attr.BUFFERED)).isEqualTo(2);
test.error = new RuntimeException("boom");
assertThat(test.scan(Scannable.Attr.ERROR)).isSameAs(test.error);
assertThat(test.scan(Scannable.Attr.PREFETCH)).isEqualTo(Integer.MAX_VALUE);
assertThat(test.scan(Scannable.Attr.DELAY_ERROR)).isFalse();
assertThat(test.scan(Scannable.Attr.RUN_ON)).isSameAs(Schedulers.immediate());
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.ASYNC);
}
}
| FluxOnBackpressureBufferTimeoutTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfigurationOutputter.java | {
"start": 4500,
"end": 4768
} | class ____ extends PublishedConfigurationOutputter {
public JsonOutputter(PublishedConfiguration owner) {
super(owner);
}
@Override
public String asString() throws IOException {
return owner.asJson();
}
}
public static | JsonOutputter |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/SystemUtils.java | {
"start": 26179,
"end": 26720
} | class ____ loaded, the value will be out of sync with that System property.
* </p>
*
* @see SystemProperties#getOsArch()
* @since Java 1.1
*/
public static final String OS_ARCH = SystemProperties.getOsArch();
/**
* A constant for the System Property {@code os.name}. Operating system name.
*
* <p>
* Defaults to {@code null} if the runtime does not have security access to read this property or the property does not exist.
* </p>
* <p>
* This value is initialized when the | is |
java | quarkusio__quarkus | extensions/reactive-oracle-client/deployment/src/main/java/io/quarkus/reactive/oracle/client/deployment/OraclePoolBuildItem.java | {
"start": 351,
"end": 1058
} | class ____ extends MultiBuildItem {
private final String dataSourceName;
private final Function<SyntheticCreationalContext<OraclePool>, OraclePool> oraclePool;
public OraclePoolBuildItem(String dataSourceName, Function<SyntheticCreationalContext<OraclePool>, OraclePool> oraclePool) {
this.dataSourceName = dataSourceName;
this.oraclePool = oraclePool;
}
public String getDataSourceName() {
return dataSourceName;
}
public Function<SyntheticCreationalContext<OraclePool>, OraclePool> getOraclePool() {
return oraclePool;
}
public boolean isDefault() {
return DataSourceUtil.isDefault(dataSourceName);
}
}
| OraclePoolBuildItem |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/configuration/AutowiredConfigurationTests.java | {
"start": 15330,
"end": 15727
} | class ____ {
@Bean @Scope("prototype")
public TestBean testBean(@Value("#{systemProperties[myProp]}") Provider<String> name) {
return new TestBean(name.get());
}
@Bean @Scope("prototype")
public TestBean testBean2(@Value("#{systemProperties[myProp]}") Provider<String> name2) {
return new TestBean(name2.get());
}
}
@Configuration
static | ValueConfigWithProviderMethodArguments |
java | google__guava | android/guava/src/com/google/common/base/FinalizablePhantomReference.java | {
"start": 1260,
"end": 1685
} | class ____<T> extends PhantomReference<T>
implements FinalizableReference {
/**
* Constructs a new finalizable phantom reference.
*
* @param referent to phantom reference
* @param queue that should finalize the referent
*/
protected FinalizablePhantomReference(@Nullable T referent, FinalizableReferenceQueue queue) {
super(referent, queue.queue);
queue.cleanUp();
}
}
| FinalizablePhantomReference |
java | apache__flink | flink-formats/flink-parquet/src/main/java/org/apache/flink/formats/parquet/vector/reader/FloatColumnReader.java | {
"start": 1269,
"end": 3736
} | class ____ extends AbstractColumnReader<WritableFloatVector> {
public FloatColumnReader(ColumnDescriptor descriptor, PageReader pageReader)
throws IOException {
super(descriptor, pageReader);
checkTypeName(PrimitiveType.PrimitiveTypeName.FLOAT);
}
@Override
protected void readBatch(int rowId, int num, WritableFloatVector column) {
int left = num;
while (left > 0) {
if (runLenDecoder.currentCount == 0) {
runLenDecoder.readNextGroup();
}
int n = Math.min(left, runLenDecoder.currentCount);
switch (runLenDecoder.mode) {
case RLE:
if (runLenDecoder.currentValue == maxDefLevel) {
readFloats(n, column, rowId);
} else {
column.setNulls(rowId, n);
}
break;
case PACKED:
for (int i = 0; i < n; ++i) {
if (runLenDecoder.currentBuffer[runLenDecoder.currentBufferIdx++]
== maxDefLevel) {
column.setFloat(rowId + i, readFloat());
} else {
column.setNullAt(rowId + i);
}
}
break;
}
rowId += n;
left -= n;
runLenDecoder.currentCount -= n;
}
}
@Override
protected void readBatchFromDictionaryIds(
int rowId, int num, WritableFloatVector column, WritableIntVector dictionaryIds) {
for (int i = rowId; i < rowId + num; ++i) {
if (!column.isNullAt(i)) {
column.setFloat(i, dictionary.decodeToFloat(dictionaryIds.getInt(i)));
}
}
}
private float readFloat() {
return readDataBuffer(4).getFloat();
}
private void readFloats(int total, WritableFloatVector c, int rowId) {
int requiredBytes = total * 4;
ByteBuffer buffer = readDataBuffer(requiredBytes);
if (buffer.hasArray()) {
int offset = buffer.arrayOffset() + buffer.position();
c.setFloatsFromBinary(rowId, total, buffer.array(), offset);
} else {
for (int i = 0; i < total; i += 1) {
c.setFloat(rowId + i, buffer.getFloat());
}
}
}
}
| FloatColumnReader |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/projection/CollectionAwareProjectionFactory.java | {
"start": 1110,
"end": 1378
} | class ____ extends SpelAwareProxyProjectionFactory {
@Override
protected ProjectionInformation createProjectionInformation(Class<?> projectionType) {
return new CollectionAwareProjectionInformation(projectionType);
}
private static | CollectionAwareProjectionFactory |
java | quarkusio__quarkus | extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/StockMethodsAdder.java | {
"start": 2072,
"end": 3617
} | class ____ {
private static Set<MethodInfo> ALL_SPRING_DATA_REPOSITORY_METHODS = null;
private final IndexView index;
private final FieldDescriptor operationsField;
public StockMethodsAdder(IndexView index, TypeBundle typeBundle) {
this.index = index;
String operationsName = typeBundle.operations().dotName().toString();
operationsField = of(operationsName, "INSTANCE", operationsName);
}
public void add(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor,
String generatedClassName, ClassInfo repositoryToImplement, DotName entityDotName, String idTypeStr) {
Set<MethodInfo> methodsOfExtendedSpringDataRepositories = methodsOfExtendedSpringDataRepositories(
repositoryToImplement);
Set<MethodInfo> stockMethodsAddedToInterface = stockMethodsAddedToInterface(repositoryToImplement);
Set<MethodInfo> allMethodsToBeImplemented = new HashSet<>(methodsOfExtendedSpringDataRepositories);
allMethodsToBeImplemented.addAll(stockMethodsAddedToInterface);
Map<MethodDescriptor, Boolean> allMethodsToBeImplementedToResult = new HashMap<>();
for (MethodInfo methodInfo : allMethodsToBeImplemented) {
allMethodsToBeImplementedToResult.put(GenerationUtil.toMethodDescriptor(generatedClassName, methodInfo), false);
}
String entityTypeStr = entityDotName.toString();
// for all Spring Data repository methods we know how to implement, check if the generated | StockMethodsAdder |
java | hibernate__hibernate-orm | hibernate-c3p0/src/test/java/org/hibernate/test/c3p0/IrrelevantEntity.java | {
"start": 511,
"end": 908
} | class ____ {
private Integer id;
private String name;
@Id
@GeneratedValue( generator = "increment" )
@GenericGenerator( name = "increment", strategy = "increment" )
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@NotBlank
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| IrrelevantEntity |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatDoubleGroupingAggregatorFunction.java | {
"start": 1179,
"end": 15890
} | class ____ implements GroupingAggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("top", ElementType.FLOAT),
new IntermediateStateDesc("output", ElementType.DOUBLE) );
private final TopFloatDoubleAggregator.GroupingState state;
private final List<Integer> channels;
private final DriverContext driverContext;
private final int limit;
private final boolean ascending;
public TopFloatDoubleGroupingAggregatorFunction(List<Integer> channels,
TopFloatDoubleAggregator.GroupingState state, DriverContext driverContext, int limit,
boolean ascending) {
this.channels = channels;
this.state = state;
this.driverContext = driverContext;
this.limit = limit;
this.ascending = ascending;
}
public static TopFloatDoubleGroupingAggregatorFunction create(List<Integer> channels,
DriverContext driverContext, int limit, boolean ascending) {
return new TopFloatDoubleGroupingAggregatorFunction(channels, TopFloatDoubleAggregator.initGrouping(driverContext.bigArrays(), limit, ascending), driverContext, limit, ascending);
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds,
Page page) {
FloatBlock vBlock = page.getBlock(channels.get(0));
DoubleBlock outputValueBlock = page.getBlock(channels.get(1));
FloatVector vVector = vBlock.asVector();
if (vVector == null) {
maybeEnableGroupIdTracking(seenGroupIds, vBlock, outputValueBlock);
return new GroupingAggregatorFunction.AddInput() {
@Override
public void add(int positionOffset, IntArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, vBlock, outputValueBlock);
}
@Override
public void add(int positionOffset, IntBigArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, vBlock, outputValueBlock);
}
@Override
public void add(int positionOffset, IntVector groupIds) {
addRawInput(positionOffset, groupIds, vBlock, outputValueBlock);
}
@Override
public void close() {
}
};
}
DoubleVector outputValueVector = outputValueBlock.asVector();
if (outputValueVector == null) {
maybeEnableGroupIdTracking(seenGroupIds, vBlock, outputValueBlock);
return new GroupingAggregatorFunction.AddInput() {
@Override
public void add(int positionOffset, IntArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, vBlock, outputValueBlock);
}
@Override
public void add(int positionOffset, IntBigArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, vBlock, outputValueBlock);
}
@Override
public void add(int positionOffset, IntVector groupIds) {
addRawInput(positionOffset, groupIds, vBlock, outputValueBlock);
}
@Override
public void close() {
}
};
}
return new GroupingAggregatorFunction.AddInput() {
@Override
public void add(int positionOffset, IntArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, vVector, outputValueVector);
}
@Override
public void add(int positionOffset, IntBigArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, vVector, outputValueVector);
}
@Override
public void add(int positionOffset, IntVector groupIds) {
addRawInput(positionOffset, groupIds, vVector, outputValueVector);
}
@Override
public void close() {
}
};
}
private void addRawInput(int positionOffset, IntArrayBlock groups, FloatBlock vBlock,
DoubleBlock outputValueBlock) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
if (vBlock.isNull(valuesPosition)) {
continue;
}
if (outputValueBlock.isNull(valuesPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int vStart = vBlock.getFirstValueIndex(valuesPosition);
int vEnd = vStart + vBlock.getValueCount(valuesPosition);
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
float vValue = vBlock.getFloat(vOffset);
int outputValueStart = outputValueBlock.getFirstValueIndex(valuesPosition);
int outputValueEnd = outputValueStart + outputValueBlock.getValueCount(valuesPosition);
for (int outputValueOffset = outputValueStart; outputValueOffset < outputValueEnd; outputValueOffset++) {
double outputValueValue = outputValueBlock.getDouble(outputValueOffset);
TopFloatDoubleAggregator.combine(state, groupId, vValue, outputValueValue);
}
}
}
}
}
private void addRawInput(int positionOffset, IntArrayBlock groups, FloatVector vVector,
DoubleVector outputValueVector) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
float vValue = vVector.getFloat(valuesPosition);
double outputValueValue = outputValueVector.getDouble(valuesPosition);
TopFloatDoubleAggregator.combine(state, groupId, vValue, outputValueValue);
}
}
}
@Override
public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block topUncast = page.getBlock(channels.get(0));
if (topUncast.areAllValuesNull()) {
return;
}
FloatBlock top = (FloatBlock) topUncast;
Block outputUncast = page.getBlock(channels.get(1));
if (outputUncast.areAllValuesNull()) {
return;
}
DoubleBlock output = (DoubleBlock) outputUncast;
assert top.getPositionCount() == output.getPositionCount();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valuesPosition = groupPosition + positionOffset;
TopFloatDoubleAggregator.combineIntermediate(state, groupId, top, output, valuesPosition);
}
}
}
private void addRawInput(int positionOffset, IntBigArrayBlock groups, FloatBlock vBlock,
DoubleBlock outputValueBlock) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
if (vBlock.isNull(valuesPosition)) {
continue;
}
if (outputValueBlock.isNull(valuesPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int vStart = vBlock.getFirstValueIndex(valuesPosition);
int vEnd = vStart + vBlock.getValueCount(valuesPosition);
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
float vValue = vBlock.getFloat(vOffset);
int outputValueStart = outputValueBlock.getFirstValueIndex(valuesPosition);
int outputValueEnd = outputValueStart + outputValueBlock.getValueCount(valuesPosition);
for (int outputValueOffset = outputValueStart; outputValueOffset < outputValueEnd; outputValueOffset++) {
double outputValueValue = outputValueBlock.getDouble(outputValueOffset);
TopFloatDoubleAggregator.combine(state, groupId, vValue, outputValueValue);
}
}
}
}
}
private void addRawInput(int positionOffset, IntBigArrayBlock groups, FloatVector vVector,
DoubleVector outputValueVector) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
float vValue = vVector.getFloat(valuesPosition);
double outputValueValue = outputValueVector.getDouble(valuesPosition);
TopFloatDoubleAggregator.combine(state, groupId, vValue, outputValueValue);
}
}
}
@Override
public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block topUncast = page.getBlock(channels.get(0));
if (topUncast.areAllValuesNull()) {
return;
}
FloatBlock top = (FloatBlock) topUncast;
Block outputUncast = page.getBlock(channels.get(1));
if (outputUncast.areAllValuesNull()) {
return;
}
DoubleBlock output = (DoubleBlock) outputUncast;
assert top.getPositionCount() == output.getPositionCount();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valuesPosition = groupPosition + positionOffset;
TopFloatDoubleAggregator.combineIntermediate(state, groupId, top, output, valuesPosition);
}
}
}
private void addRawInput(int positionOffset, IntVector groups, FloatBlock vBlock,
DoubleBlock outputValueBlock) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int valuesPosition = groupPosition + positionOffset;
if (vBlock.isNull(valuesPosition)) {
continue;
}
if (outputValueBlock.isNull(valuesPosition)) {
continue;
}
int groupId = groups.getInt(groupPosition);
int vStart = vBlock.getFirstValueIndex(valuesPosition);
int vEnd = vStart + vBlock.getValueCount(valuesPosition);
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
float vValue = vBlock.getFloat(vOffset);
int outputValueStart = outputValueBlock.getFirstValueIndex(valuesPosition);
int outputValueEnd = outputValueStart + outputValueBlock.getValueCount(valuesPosition);
for (int outputValueOffset = outputValueStart; outputValueOffset < outputValueEnd; outputValueOffset++) {
double outputValueValue = outputValueBlock.getDouble(outputValueOffset);
TopFloatDoubleAggregator.combine(state, groupId, vValue, outputValueValue);
}
}
}
}
private void addRawInput(int positionOffset, IntVector groups, FloatVector vVector,
DoubleVector outputValueVector) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int valuesPosition = groupPosition + positionOffset;
int groupId = groups.getInt(groupPosition);
float vValue = vVector.getFloat(valuesPosition);
double outputValueValue = outputValueVector.getDouble(valuesPosition);
TopFloatDoubleAggregator.combine(state, groupId, vValue, outputValueValue);
}
}
@Override
public void addIntermediateInput(int positionOffset, IntVector groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block topUncast = page.getBlock(channels.get(0));
if (topUncast.areAllValuesNull()) {
return;
}
FloatBlock top = (FloatBlock) topUncast;
Block outputUncast = page.getBlock(channels.get(1));
if (outputUncast.areAllValuesNull()) {
return;
}
DoubleBlock output = (DoubleBlock) outputUncast;
assert top.getPositionCount() == output.getPositionCount();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int groupId = groups.getInt(groupPosition);
int valuesPosition = groupPosition + positionOffset;
TopFloatDoubleAggregator.combineIntermediate(state, groupId, top, output, valuesPosition);
}
}
private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, FloatBlock vBlock,
DoubleBlock outputValueBlock) {
if (vBlock.mayHaveNulls()) {
state.enableGroupIdTracking(seenGroupIds);
}
if (outputValueBlock.mayHaveNulls()) {
state.enableGroupIdTracking(seenGroupIds);
}
}
@Override
public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) {
state.enableGroupIdTracking(seenGroupIds);
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) {
state.toIntermediate(blocks, offset, selected, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, IntVector selected,
GroupingAggregatorEvaluationContext ctx) {
blocks[offset] = TopFloatDoubleAggregator.evaluateFinal(state, selected, ctx);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
| TopFloatDoubleGroupingAggregatorFunction |
java | elastic__elasticsearch | x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionEncoderTests.java | {
"start": 633,
"end": 11439
} | class ____ extends ESTestCase {
public void testEncodingOrderingSemver() {
assertTrue(encodeVersion("1").compareTo(encodeVersion("1.0")) < 0);
assertTrue(encodeVersion("1.0").compareTo(encodeVersion("1.0.0.0.0.0.0.0.0.1")) < 0);
assertTrue(encodeVersion("1.0.0").compareTo(encodeVersion("1.0.0.0.0.0.0.0.0.1")) < 0);
assertTrue(encodeVersion("1.0.0").compareTo(encodeVersion("2.0.0")) < 0);
assertTrue(encodeVersion("2.0.0").compareTo(encodeVersion("11.0.0")) < 0);
assertTrue(encodeVersion("2.0.0").compareTo(encodeVersion("2.1.0")) < 0);
assertTrue(encodeVersion("2.1.0").compareTo(encodeVersion("2.1.1")) < 0);
assertTrue(encodeVersion("2.1.1").compareTo(encodeVersion("2.1.1.0")) < 0);
assertTrue(encodeVersion("2.0.0").compareTo(encodeVersion("11.0.0")) < 0);
assertTrue(encodeVersion("1.0.0").compareTo(encodeVersion("2.0")) < 0);
assertTrue(encodeVersion("1.0.0-a").compareTo(encodeVersion("1.0.0-b")) < 0);
assertTrue(encodeVersion("1.0.0-1.0.0").compareTo(encodeVersion("1.0.0-2.0")) < 0);
assertTrue(encodeVersion("1.0.0-alpha").compareTo(encodeVersion("1.0.0-alpha.1")) < 0);
assertTrue(encodeVersion("1.0.0-alpha.1").compareTo(encodeVersion("1.0.0-alpha.beta")) < 0);
assertTrue(encodeVersion("1.0.0-alpha.beta").compareTo(encodeVersion("1.0.0-beta")) < 0);
assertTrue(encodeVersion("1.0.0-beta").compareTo(encodeVersion("1.0.0-beta.2")) < 0);
assertTrue(encodeVersion("1.0.0-beta.2").compareTo(encodeVersion("1.0.0-beta.11")) < 0);
assertTrue(encodeVersion("1.0.0-beta11").compareTo(encodeVersion("1.0.0-beta2")) < 0); // correct according to Semver specs
assertTrue(encodeVersion("1.0.0-beta.11").compareTo(encodeVersion("1.0.0-rc.1")) < 0);
assertTrue(encodeVersion("1.0.0-rc.1").compareTo(encodeVersion("1.0.0")) < 0);
assertTrue(encodeVersion("1.0.0").compareTo(encodeVersion("2.0.0-pre127")) < 0);
assertTrue(encodeVersion("2.0.0-pre127").compareTo(encodeVersion("2.0.0-pre128")) < 0);
assertTrue(encodeVersion("2.0.0-pre128").compareTo(encodeVersion("2.0.0-pre128-somethingelse")) < 0);
assertTrue(encodeVersion("2.0.0-pre20201231z110026").compareTo(encodeVersion("2.0.0-pre227")) < 0);
// invalid versions sort after valid ones
assertTrue(encodeVersion("99999.99999.99999").compareTo(encodeVersion("1.invalid")) < 0);
assertTrue(encodeVersion("").compareTo(encodeVersion("a")) < 0);
}
private static BytesRef encodeVersion(String version) {
return VersionEncoder.encodeVersion(version).bytesRef;
}
public void testPreReleaseFlag() {
assertTrue(VersionEncoder.encodeVersion("1.2-alpha.beta").isPreRelease);
assertTrue(VersionEncoder.encodeVersion("1.2.3-someOtherPreRelease").isPreRelease);
assertTrue(VersionEncoder.encodeVersion("1.2.3-some-Other-Pre.123").isPreRelease);
assertTrue(VersionEncoder.encodeVersion("1.2.3-some-Other-Pre.123+withBuild").isPreRelease);
assertFalse(VersionEncoder.encodeVersion("1").isPreRelease);
assertFalse(VersionEncoder.encodeVersion("1.2").isPreRelease);
assertFalse(VersionEncoder.encodeVersion("1.2.3").isPreRelease);
assertFalse(VersionEncoder.encodeVersion("1.2.3+buildSufix").isPreRelease);
assertFalse(VersionEncoder.encodeVersion("1.2.3+buildSufix-withDash").isPreRelease);
}
public void testVersionPartExtraction() {
int numParts = randomIntBetween(1, 6);
String[] parts = new String[numParts];
for (int i = 0; i < numParts; i++) {
parts[i] = String.valueOf(randomIntBetween(1, 1000));
}
EncodedVersion encodedVersion = VersionEncoder.encodeVersion(String.join(".", parts));
assertEquals(parts[0], encodedVersion.major.toString());
if (numParts > 1) {
assertEquals(parts[1], encodedVersion.minor.toString());
} else {
assertNull(encodedVersion.minor);
}
if (numParts > 2) {
assertEquals(parts[2], encodedVersion.patch.toString());
} else {
assertNull(encodedVersion.patch);
}
}
public void testMaxDigitGroupLength() {
String versionString = "1.0." + "1".repeat(128);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> decodeVersion(encodeVersion(versionString)));
assertEquals("Groups of digits cannot be longer than 127, but found: 128", ex.getMessage());
}
/**
* test that encoding and decoding leads back to the same version string
*/
public void testRandomRoundtrip() {
String versionString = randomVersionString();
assertEquals(versionString, decodeVersion(encodeVersion(versionString)).utf8ToString());
}
private String randomVersionString() {
StringBuilder sb = new StringBuilder();
sb.append(randomIntBetween(0, 1000));
int releaseNumerals = randomIntBetween(0, 4);
for (int i = 0; i < releaseNumerals; i++) {
sb.append(".");
sb.append(randomIntBetween(0, 10000));
}
// optional pre-release part
if (randomBoolean()) {
sb.append("-");
int preReleaseParts = randomIntBetween(1, 5);
for (int i = 0; i < preReleaseParts; i++) {
if (randomBoolean()) {
sb.append(randomIntBetween(0, 1000));
} else {
int alphanumParts = 3;
for (int j = 0; j < alphanumParts; j++) {
if (randomBoolean()) {
sb.append(randomAlphaOfLengthBetween(1, 2));
} else {
sb.append(randomIntBetween(1, 99));
}
if (rarely()) {
sb.append(randomFrom(Arrays.asList("-")));
}
}
}
sb.append(".");
}
sb.deleteCharAt(sb.length() - 1); // remove trailing dot
}
// optional build part
if (randomBoolean()) {
sb.append("+").append(randomAlphaOfLengthBetween(1, 15));
}
return sb.toString();
}
/**
* taken from https://regex101.com/r/vkijKf/1/ via https://semver.org/
*/
public void testSemVerValidation() {
String[] validSemverVersions = new String[] {
"0.0.4",
"1.2.3",
"10.20.30",
"1.1.2-prerelease+meta",
"1.1.2+meta",
"1.1.2+meta-valid",
"1.0.0-alpha",
"1.0.0-beta",
"1.0.0-alpha.beta",
"1.0.0-alpha.beta.1",
"1.0.0-alpha.1",
"1.0.0-alpha0.valid",
"1.0.0-alpha.0valid",
"1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay",
"1.0.0-rc.1+build.1",
"2.0.0-rc.1+build.123",
"1.2.3-beta",
"10.2.3-DEV-SNAPSHOT",
"1.2.3-SNAPSHOT-123",
"1.0.0",
"2.0.0",
"1.1.7",
"2.0.0+build.1848",
"2.0.1-alpha.1227",
"1.0.0-alpha+beta",
"1.2.3----RC-SNAPSHOT.12.9.1--.12+788",
"1.2.3----R-S.12.9.1--.12+meta",
"1.2.3----RC-SNAPSHOT.12.9.1--.12",
"1.0.0+0.build.1-rc.10000aaa-kk-0.1",
"999999999.999999999.999999999",
"1.0.0-0A.is.legal",
// the following are not strict semver but we allow them
"1.2-SNAPSHOT",
"1.2-RC-SNAPSHOT",
"1",
"1.2.3.4" };
for (String version : validSemverVersions) {
EncodedVersion encodedVersion = VersionEncoder.encodeVersion(version);
assertTrue("should be valid: " + version, encodedVersion.isLegal);
// since we're here, also check encoding / decoding rountrip
assertEquals(version, decodeVersion(encodeVersion(version)).utf8ToString());
checkPrefixAutomaton(version, encodedVersion, false);
}
String[] invalidSemverVersions = new String[] {
"",
"1.2.3-0123",
"1.2.3-0123.0123",
"1.1.2+.123",
"+invalid",
"-invalid",
"-invalid+invalid",
"-invalid.01",
"alpha",
"alpha.beta",
"alpha.beta.1",
"alpha.1",
"alpha+beta",
"alpha_beta",
"alpha.",
"alpha..",
"beta",
"1.0.0-alpha_beta",
"-alpha.",
"1.0.0-alpha..",
"1.0.0-alpha..1",
"1.0.0-alpha...1",
"1.0.0-alpha....1",
"1.0.0-alpha.....1",
"1.0.0-alpha......1",
"1.0.0-alpha.......1",
"01.1.1",
"1.01.1",
"1.1.01",
"1.2.3.DEV",
"1.2.31.2.3----RC-SNAPSHOT.12.09.1--..12+788",
"-1.0.3-gamma+b7718",
"+justmeta",
"9.8.7+meta+meta",
"9.8.7-whatever+meta+meta",
"999999999.999999999.999999999.----RC-SNAPSHOT.12.09.1--------------------------------..12",
"12.el2",
"12.el2-1.0-rc5",
"6.nüll.7" // make sure extended ascii-range (128-255) in invalid versions is decoded correctly
};
for (String version : invalidSemverVersions) {
EncodedVersion encodedVersion = VersionEncoder.encodeVersion(version);
assertFalse("should be invalid: " + version, encodedVersion.isLegal);
// since we're here, also check encoding / decoding rountrip
assertEquals(version, decodeVersion(encodeVersion(version)).utf8ToString());
checkPrefixAutomaton(version, encodedVersion, false);
}
}
/**
* check if an automaton created for a random prefix of the version accepts the version
*/
private void checkPrefixAutomaton(String version, EncodedVersion encodedVersion, boolean caseInsensitive) {
if (version.length() > 0) {
CompiledAutomaton prefixAutomaton = VersionEncoder.prefixAutomaton(
version.substring(0, randomIntBetween(1, Math.max(1, version.length() - 1))),
caseInsensitive
);
assertTrue(prefixAutomaton.runAutomaton.run(encodedVersion.bytesRef.bytes, 0, encodedVersion.bytesRef.bytes.length));
}
}
public void testPrefixAutomatonCaseinsensiti() {
checkPrefixAutomaton("SOME.ILLEGAL.VERSION", VersionEncoder.encodeVersion("some.illegal.version"), true);
}
}
| VersionEncoderTests |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumWriter.java | {
"start": 9652,
"end": 14787
} | enum ____. May be overridden for alternate enum
* representations.
*/
protected void writeEnum(Schema schema, Object datum, Encoder out) throws IOException {
if (!data.isEnum(datum)) {
AvroTypeException cause = new AvroTypeException(
"value " + SchemaUtil.describe(datum) + " is not a " + SchemaUtil.describe(schema));
throw new TracingAvroTypeException(cause);
}
out.writeEnum(schema.getEnumOrdinal(datum.toString()));
}
/**
* Called to write a array. May be overridden for alternate array
* representations.
*/
protected void writeArray(Schema schema, Object datum, Encoder out) throws IOException {
Schema element = schema.getElementType();
long size = getArraySize(datum);
long actualSize = 0;
out.writeArrayStart();
out.setItemCount(size);
for (Iterator<?> it = getArrayElements(datum); it.hasNext();) {
out.startItem();
try {
write(element, it.next(), out);
} catch (TracingNullPointException | TracingClassCastException | TracingAvroTypeException e) {
e.tracePath(new ArrayPositionPredicate(actualSize));
throw e;
}
actualSize++;
}
out.writeArrayEnd();
if (actualSize != size) {
throw new ConcurrentModificationException(
"Size of array written was " + size + ", but number of elements written was " + actualSize + ". ");
}
}
/**
* Called to find the index for a datum within a union. By default calls
* {@link GenericData#resolveUnion(Schema,Object)}.
*/
protected int resolveUnion(Schema union, Object datum) {
return data.resolveUnion(union, datum);
}
/**
* Called by the default implementation of {@link #writeArray} to get the size
* of an array. The default implementation is for {@link Collection}.
*/
protected long getArraySize(Object array) {
return ((Collection<?>) array).size();
}
/**
* Called by the default implementation of {@link #writeArray} to enumerate
* array elements. The default implementation is for {@link Collection}.
*/
protected Iterator<?> getArrayElements(Object array) {
return ((Collection<?>) array).iterator();
}
/**
* Called to write a map. May be overridden for alternate map representations.
*/
protected void writeMap(Schema schema, Object datum, Encoder out) throws IOException {
Schema value = schema.getValueType();
int size = getMapSize(datum);
int actualSize = 0;
out.writeMapStart();
out.setItemCount(size);
for (Map.Entry<Object, Object> entry : getMapEntries(datum)) {
out.startItem();
String key;
try {
key = entry.getKey().toString();
} catch (NullPointerException npe) {
TracingNullPointException tnpe = new TracingNullPointException(npe, Schema.create(Schema.Type.STRING), false);
tnpe.tracePath(new MapKeyPredicate(null));
throw tnpe;
}
writeString(key, out);
try {
write(value, entry.getValue(), out);
} catch (TracingNullPointException | TracingClassCastException | TracingAvroTypeException e) {
e.tracePath(new MapKeyPredicate(key));
throw e;
}
actualSize++;
}
out.writeMapEnd();
if (actualSize != size) {
throw new ConcurrentModificationException(
"Size of map written was " + size + ", but number of entries written was " + actualSize + ". ");
}
}
/**
* Called by the default implementation of {@link #writeMap} to get the size of
* a map. The default implementation is for {@link Map}.
*/
@SuppressWarnings("unchecked")
protected int getMapSize(Object map) {
return ((Map) map).size();
}
/**
* Called by the default implementation of {@link #writeMap} to enumerate map
* elements. The default implementation is for {@link Map}.
*/
@SuppressWarnings("unchecked")
protected Iterable<Map.Entry<Object, Object>> getMapEntries(Object map) {
return ((Map) map).entrySet();
}
/**
* Called to write a string. May be overridden for alternate string
* representations.
*/
protected void writeString(Schema schema, Object datum, Encoder out) throws IOException {
writeString(datum, out);
}
/**
* Called to write a string. May be overridden for alternate string
* representations.
*/
protected void writeString(Object datum, Encoder out) throws IOException {
out.writeString((CharSequence) datum);
}
/**
* Called to write a bytes. May be overridden for alternate bytes
* representations.
*/
protected void writeBytes(Object datum, Encoder out) throws IOException {
out.writeBytes((ByteBuffer) datum);
}
/**
* Called to write a fixed value. May be overridden for alternate fixed
* representations.
*/
protected void writeFixed(Schema schema, Object datum, Encoder out) throws IOException {
out.writeFixed(((GenericFixed) datum).bytes(), 0, schema.getFixedSize());
}
private void error(Schema schema, Object datum) {
throw new AvroTypeException("value " + SchemaUtil.describe(datum) + " is not a " + SchemaUtil.describe(schema));
}
}
| value |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/source/coordinator/SourceCoordinatorTest.java | {
"start": 35995,
"end": 37982
} | class ____<
T, EnumT extends SplitEnumerator<MockSourceSplit, Set<MockSourceSplit>>>
implements Source<T, MockSourceSplit, Set<MockSourceSplit>> {
final CompletableFuture<EnumT> createEnumeratorFuture = new CompletableFuture<>();
final CompletableFuture<EnumT> restoreEnumeratorFuture = new CompletableFuture<>();
private final Supplier<EnumT> enumeratorFactory;
public EnumeratorCreatingSource(Supplier<EnumT> enumeratorFactory) {
this.enumeratorFactory = enumeratorFactory;
}
@Override
public Boundedness getBoundedness() {
return Boundedness.CONTINUOUS_UNBOUNDED;
}
@Override
public SourceReader<T, MockSourceSplit> createReader(SourceReaderContext readerContext) {
throw new UnsupportedOperationException();
}
@Override
public SplitEnumerator<MockSourceSplit, Set<MockSourceSplit>> createEnumerator(
SplitEnumeratorContext<MockSourceSplit> enumContext) {
final EnumT enumerator = enumeratorFactory.get();
createEnumeratorFuture.complete(enumerator);
return enumerator;
}
@Override
public SplitEnumerator<MockSourceSplit, Set<MockSourceSplit>> restoreEnumerator(
SplitEnumeratorContext<MockSourceSplit> enumContext,
Set<MockSourceSplit> checkpoint) {
final EnumT enumerator = enumeratorFactory.get();
restoreEnumeratorFuture.complete(enumerator);
return enumerator;
}
@Override
public SimpleVersionedSerializer<MockSourceSplit> getSplitSerializer() {
return new MockSourceSplitSerializer();
}
@Override
public SimpleVersionedSerializer<Set<MockSourceSplit>> getEnumeratorCheckpointSerializer() {
return new MockSplitEnumeratorCheckpointSerializer();
}
}
}
| EnumeratorCreatingSource |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/test/java/io/quarkus/redis/datasource/NumericCommandsTest.java | {
"start": 432,
"end": 1690
} | class ____ extends DatasourceTestBase {
private RedisDataSource ds;
static String key = "key-sort";
private ValueCommands<String, Long> num;
@BeforeEach
void initialize() {
ds = new BlockingRedisDataSourceImpl(vertx, redis, api, Duration.ofSeconds(5));
num = ds.value(Long.class);
}
@AfterEach
public void clear() {
ds.flushall();
}
@Test
void getDataSource() {
assertThat(ds).isEqualTo(num.getDataSource());
}
@Test
void decr() {
assertThat(num.decr(key)).isEqualTo(-1);
assertThat(num.decr(key)).isEqualTo(-2);
}
@Test
void decrby() {
assertThat(num.decrby(key, 3)).isEqualTo(-3);
assertThat(num.decrby(key, 3)).isEqualTo(-6);
}
@Test
void incr() {
assertThat(num.incr(key)).isEqualTo(1);
assertThat(num.incr(key)).isEqualTo(2);
}
@Test
void incrby() {
assertThat(num.incrby(key, 3)).isEqualTo(3);
assertThat(num.incrby(key, 3)).isEqualTo(6);
}
@Test
void incrbyfloat() {
assertThat(num.incrbyfloat(key, 3.0)).isEqualTo(3.0, offset(0.1));
assertThat(num.incrbyfloat(key, 0.2)).isEqualTo(3.2, offset(0.1));
}
}
| NumericCommandsTest |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/HttpEndpointBuilderFactory.java | {
"start": 79534,
"end": 79841
} | class ____ extends AbstractEndpointBuilder implements HttpEndpointBuilder, AdvancedHttpEndpointBuilder {
public HttpEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new HttpEndpointBuilderImpl(path);
}
} | HttpEndpointBuilderImpl |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/Logger.java | {
"start": 179424,
"end": 181462
} | class ____
* method when location information needs to be logged.
* @param location The location of the caller.
* @param message The message format.
* @param throwable the {@code Throwable} to log, including its stack trace.
* @since 2.13.0
*/
default void logMessage(
final Level level,
final Marker marker,
final String fqcn,
final StackTraceElement location,
final Message message,
final Throwable throwable) {
// noop
}
/**
* Construct a trace log event.
* @return a LogBuilder.
* @since 2.13.0
*/
default LogBuilder atTrace() {
return LogBuilder.NOOP;
}
/**
* Construct a trace log event.
* @return a LogBuilder.
* @since 2.13.0
*/
default LogBuilder atDebug() {
return LogBuilder.NOOP;
}
/**
* Construct a trace log event.
* @return a LogBuilder.
* @since 2.13.0
*/
default LogBuilder atInfo() {
return LogBuilder.NOOP;
}
/**
* Construct a trace log event.
* @return a LogBuilder.
* @since 2.13.0
*/
default LogBuilder atWarn() {
return LogBuilder.NOOP;
}
/**
* Construct a trace log event.
* @return a LogBuilder.
* @since 2.13.0
*/
default LogBuilder atError() {
return LogBuilder.NOOP;
}
/**
* Construct a trace log event.
* @return a LogBuilder.
* @since 2.13.0
*/
default LogBuilder atFatal() {
return LogBuilder.NOOP;
}
/**
* Construct a log event that will always be logged.
* @return a LogBuilder.
* @since 2.13.0
*/
default LogBuilder always() {
return LogBuilder.NOOP;
}
/**
* Construct a log event.
* @param level Any level (ignoreed here).
* @return a LogBuilder.
* @since 2.13.0
*/
default LogBuilder atLevel(final Level level) {
return LogBuilder.NOOP;
}
}
| and |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/aggregate/window/buffers/RecordsWindowBuffer.java | {
"start": 5656,
"end": 7462
} | class ____ implements WindowBuffer.Factory {
private static final long serialVersionUID = 1L;
private final PagedTypeSerializer<RowData> keySer;
private final AbstractRowDataSerializer<RowData> inputSer;
private final RecordsCombiner.Factory factory;
public Factory(
PagedTypeSerializer<RowData> keySer,
AbstractRowDataSerializer<RowData> inputSer,
RecordsCombiner.Factory combinerFactory) {
this.keySer = keySer;
this.inputSer = inputSer;
this.factory = combinerFactory;
}
@Override
public WindowBuffer create(
Object operatorOwner,
MemoryManager memoryManager,
long memorySize,
RuntimeContext runtimeContext,
WindowTimerService<Long> timerService,
KeyedStateBackend<RowData> stateBackend,
WindowState<Long> windowState,
boolean isEventTime,
ZoneId shiftTimeZone)
throws Exception {
RecordsCombiner combiner =
factory.createRecordsCombiner(
runtimeContext, timerService, stateBackend, windowState, isEventTime);
boolean requiresCopy = !stateBackend.isSafeToReuseKVState();
return new RecordsWindowBuffer(
operatorOwner,
memoryManager,
memorySize,
combiner,
keySer,
inputSer,
requiresCopy,
shiftTimeZone);
}
}
/** Factory to create {@link RecordsWindowBuffer} with {@link RecordsCombiner.LocalFactory}. */
public static final | Factory |
java | netty__netty | codec-http3/src/main/java/io/netty/handler/codec/http3/QpackException.java | {
"start": 850,
"end": 1328
} | class ____ extends Exception {
private QpackException(String message, @Nullable Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
static QpackException newStatic(Class<?> clazz, String method, String message) {
return ThrowableUtil.unknownStackTrace(new QpackException(message, null, false, false), clazz, method);
}
}
| QpackException |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/xml/spi/XmlProcessingResult.java | {
"start": 786,
"end": 1649
} | class ____<M extends JaxbManagedType> {
private final JaxbEntityMappingsImpl jaxbRoot;
private final XmlDocumentContext xmlDocumentContext;
private final M managedType;
public OverrideTuple(JaxbEntityMappingsImpl jaxbRoot, XmlDocumentContext xmlDocumentContext, M managedType) {
this.jaxbRoot = jaxbRoot;
this.xmlDocumentContext = xmlDocumentContext;
this.managedType = managedType;
}
public JaxbEntityMappingsImpl getJaxbRoot() {
return jaxbRoot;
}
public XmlDocumentContext getXmlDocumentContext() {
return xmlDocumentContext;
}
public M getManagedType() {
return managedType;
}
}
void apply();
List<OverrideTuple<JaxbEntityImpl>> getEntityOverrides();
List<OverrideTuple<JaxbMappedSuperclassImpl>> getMappedSuperclassesOverrides();
List<OverrideTuple<JaxbEmbeddableImpl>> getEmbeddableOverrides();
}
| OverrideTuple |
java | spring-projects__spring-boot | core/spring-boot-test/src/main/java/org/springframework/boot/test/system/OutputCapture.java | {
"start": 7887,
"end": 8026
} | enum ____ {
OUT, ERR
}
/**
* Save, disable and restore AnsiOutput without it needing to be on the classpath.
*/
private static | Type |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java | {
"start": 1986,
"end": 5475
} | class ____ extends ESTestCase {
private final EsqlParser parser = new EsqlParser();
private final String esql = "from test | eval license() | LIMIT 10";
public void testLicense() {
for (License.OperationMode functionLicense : License.OperationMode.values()) {
final LicensedFeature functionLicenseFeature = random().nextBoolean()
? LicensedFeature.momentary("test", "license", functionLicense)
: LicensedFeature.persistent("test", "license", functionLicense);
for (License.OperationMode operationMode : License.OperationMode.values()) {
if (License.OperationMode.TRIAL != operationMode && License.OperationMode.compare(operationMode, functionLicense) < 0) {
// non-compliant license
final VerificationException ex = expectThrows(
VerificationException.class,
() -> analyze(operationMode, functionLicenseFeature)
);
assertThat(ex.getMessage(), containsString("current license is non-compliant for [license()]"));
assertThat(ex.getMessage(), containsString("current license is non-compliant for [LicensedLimit]"));
} else {
// compliant license
assertNotNull(analyze(operationMode, functionLicenseFeature));
}
}
}
}
private LogicalPlan analyze(License.OperationMode operationMode, LicensedFeature functionLicenseFeature) {
final EsqlFunctionRegistry.FunctionBuilder builder = (source, expression, cfg) -> new LicensedFunction(
source,
functionLicenseFeature
);
final FunctionDefinition def = EsqlFunctionRegistry.def(LicensedFunction.class, builder, "license");
final EsqlFunctionRegistry registry = new EsqlFunctionRegistry(def) {
@Override
public EsqlFunctionRegistry snapshotRegistry() {
return this;
}
};
var plan = parser.createStatement(esql);
plan = plan.transformDown(
Limit.class,
l -> Objects.equals(l.limit().fold(FoldContext.small()), 10)
? new LicensedLimit(l.source(), l.limit(), l.child(), functionLicenseFeature)
: l
);
return analyzer(registry, operationMode).analyze(plan);
}
private static Analyzer analyzer(EsqlFunctionRegistry registry, License.OperationMode operationMode) {
return new Analyzer(
testAnalyzerContext(
EsqlTestUtils.TEST_CFG,
registry,
analyzerDefaultMapping(),
defaultEnrichResolution(),
emptyInferenceResolution()
),
new Verifier(new Metrics(new EsqlFunctionRegistry()), getLicenseState(operationMode))
);
}
private static XPackLicenseState getLicenseState(License.OperationMode operationMode) {
final TestUtils.UpdatableLicenseState licenseState = new TestUtils.UpdatableLicenseState();
licenseState.update(new XPackLicenseStatus(operationMode, true, null));
return licenseState;
}
// It needs to be public because we run validation on it via reflection in org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests.
// This test prevents to add the license as constructor parameter too.
public static | CheckLicenseTests |
java | square__javapoet | src/test/java/com/squareup/javapoet/JavaFileTest.java | {
"start": 24587,
"end": 24671
} | class ____ {\n"
+ " com.squareup.tacos.A a;\n"
+ "\n"
+ " | Taco |
java | micronaut-projects__micronaut-core | http-client-jdk/src/main/java/io/micronaut/http/client/jdk/cookie/CookieDecoder.java | {
"start": 931,
"end": 1188
} | interface ____ extends Ordered {
/**
* Decode the cookies from the request.
*
* @param request the request
* @return the cookies or empty if none
*/
@NonNull
Optional<Cookies> decode(HttpRequest<?> request);
}
| CookieDecoder |
java | grpc__grpc-java | okhttp/third_party/okhttp/main/java/io/grpc/okhttp/internal/framed/Http2.java | {
"start": 26268,
"end": 30056
} | class ____ {
static String formatHeader(boolean inbound, int streamId, int length, byte type, byte flags) {
String formattedType = type < TYPES.length ? TYPES[type] : format("0x%02x", type);
String formattedFlags = formatFlags(type, flags);
return format(Locale.US, "%s 0x%08x %5d %-13s %s", inbound ? "<<" : ">>", streamId, length,
formattedType, formattedFlags);
}
/**
* Looks up valid string representing flags from the table. Invalid
* combinations are represented in binary.
*/
// Visible for testing.
static String formatFlags(byte type, byte flags) {
if (flags == 0) return "";
switch (type) { // Special case types that have 0 or 1 flag.
case TYPE_SETTINGS:
case TYPE_PING:
return flags == FLAG_ACK ? "ACK" : BINARY[flags];
case TYPE_PRIORITY:
case TYPE_RST_STREAM:
case TYPE_GOAWAY:
case TYPE_WINDOW_UPDATE:
return BINARY[flags];
}
String result = flags < FLAGS.length ? FLAGS[flags] : BINARY[flags];
// Special case types that have overlap flag values.
if (type == TYPE_PUSH_PROMISE && (flags & FLAG_END_PUSH_PROMISE) != 0) {
return result.replace("HEADERS", "PUSH_PROMISE"); // TODO: Avoid allocation.
} else if (type == TYPE_DATA && (flags & FLAG_COMPRESSED) != 0) {
return result.replace("PRIORITY", "COMPRESSED"); // TODO: Avoid allocation.
}
return result;
}
/** Lookup table for valid frame types. */
private static final String[] TYPES = new String[] {
"DATA",
"HEADERS",
"PRIORITY",
"RST_STREAM",
"SETTINGS",
"PUSH_PROMISE",
"PING",
"GOAWAY",
"WINDOW_UPDATE",
"CONTINUATION"
};
/**
* Lookup table for valid flags for DATA, HEADERS, CONTINUATION. Invalid
* combinations are represented in binary.
*/
private static final String[] FLAGS = new String[0x40]; // Highest bit flag is 0x20.
private static final String[] BINARY = new String[256];
static {
for (int i = 0; i < BINARY.length; i++) {
BINARY[i] = format("%8s", Integer.toBinaryString(i)).replace(' ', '0');
}
FLAGS[FLAG_NONE] = "";
FLAGS[FLAG_END_STREAM] = "END_STREAM";
int[] prefixFlags = new int[] {FLAG_END_STREAM};
FLAGS[FLAG_PADDED] = "PADDED";
for (int prefixFlag : prefixFlags) {
FLAGS[prefixFlag | FLAG_PADDED] = FLAGS[prefixFlag] + "|PADDED";
}
FLAGS[FLAG_END_HEADERS] = "END_HEADERS"; // Same as END_PUSH_PROMISE.
FLAGS[FLAG_PRIORITY] = "PRIORITY"; // Same as FLAG_COMPRESSED.
FLAGS[FLAG_END_HEADERS | FLAG_PRIORITY] = "END_HEADERS|PRIORITY"; // Only valid on HEADERS.
int[] frameFlags =
new int[] {FLAG_END_HEADERS, FLAG_PRIORITY, FLAG_END_HEADERS | FLAG_PRIORITY};
for (int frameFlag : frameFlags) {
for (int prefixFlag : prefixFlags) {
FLAGS[prefixFlag | frameFlag] = FLAGS[prefixFlag] + '|' + FLAGS[frameFlag];
FLAGS[prefixFlag | frameFlag | FLAG_PADDED] =
FLAGS[prefixFlag] + '|' + FLAGS[frameFlag] + "|PADDED";
}
}
for (int i = 0; i < FLAGS.length; i++) { // Fill in holes with binary representation.
if (FLAGS[i] == null) FLAGS[i] = BINARY[i];
}
}
}
private static int readMedium(BufferedSource source) throws IOException {
return (source.readByte() & 0xff) << 16
| (source.readByte() & 0xff) << 8
| (source.readByte() & 0xff);
}
private static void writeMedium(BufferedSink sink, int i) throws IOException {
sink.writeByte((i >>> 16) & 0xff);
sink.writeByte((i >>> 8) & 0xff);
sink.writeByte(i & 0xff);
}
}
| FrameLogger |
java | spring-projects__spring-boot | core/spring-boot-test/src/main/java/org/springframework/boot/test/context/filter/annotation/AnnotationCustomizableTypeExcludeFilter.java | {
"start": 1553,
"end": 4026
} | class ____ extends TypeExcludeFilter
implements BeanClassLoaderAware {
@SuppressWarnings("NullAway.Init")
private ClassLoader classLoader;
@Override
public void setBeanClassLoader(ClassLoader classLoader) {
this.classLoader = classLoader;
}
@Override
public boolean match(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory)
throws IOException {
if (hasAnnotation()) {
return !(include(metadataReader, metadataReaderFactory) && !exclude(metadataReader, metadataReaderFactory));
}
return false;
}
protected boolean include(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory)
throws IOException {
if (new FilterAnnotations(this.classLoader, getFilters(FilterType.INCLUDE)).anyMatches(metadataReader,
metadataReaderFactory)) {
return true;
}
return isUseDefaultFilters() && defaultInclude(metadataReader, metadataReaderFactory);
}
protected boolean defaultInclude(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory)
throws IOException {
for (Class<?> include : getDefaultIncludes()) {
if (isTypeOrAnnotated(metadataReader, metadataReaderFactory, include)) {
return true;
}
}
for (Class<?> component : getComponentIncludes()) {
if (isTypeOrAnnotated(metadataReader, metadataReaderFactory, component)) {
return true;
}
}
return false;
}
protected boolean exclude(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory)
throws IOException {
return new FilterAnnotations(this.classLoader, getFilters(FilterType.EXCLUDE)).anyMatches(metadataReader,
metadataReaderFactory);
}
@SuppressWarnings("unchecked")
protected final boolean isTypeOrAnnotated(MetadataReader metadataReader,
MetadataReaderFactory metadataReaderFactory, Class<?> type) throws IOException {
AnnotationTypeFilter annotationFilter = new AnnotationTypeFilter((Class<? extends Annotation>) type);
AssignableTypeFilter typeFilter = new AssignableTypeFilter(type);
return annotationFilter.match(metadataReader, metadataReaderFactory)
|| typeFilter.match(metadataReader, metadataReaderFactory);
}
protected abstract boolean hasAnnotation();
protected abstract Filter[] getFilters(FilterType type);
protected abstract boolean isUseDefaultFilters();
protected abstract Set<Class<?>> getDefaultIncludes();
protected abstract Set<Class<?>> getComponentIncludes();
protected | AnnotationCustomizableTypeExcludeFilter |
java | apache__dubbo | dubbo-config/dubbo-config-api/src/main/java/org/apache/dubbo/config/deploy/DefaultApplicationDeployer.java | {
"start": 6222,
"end": 25060
} | class ____ extends AbstractDeployer<ApplicationModel> implements ApplicationDeployer {
private static final ErrorTypeAwareLogger logger =
LoggerFactory.getErrorTypeAwareLogger(DefaultApplicationDeployer.class);
private final ApplicationModel applicationModel;
private final ConfigManager configManager;
private final Environment environment;
private final ReferenceCache referenceCache;
private final FrameworkExecutorRepository frameworkExecutorRepository;
private final ExecutorRepository executorRepository;
private final AtomicBoolean hasPreparedApplicationInstance = new AtomicBoolean(false);
private volatile boolean hasPreparedInternalModule = false;
private ScheduledFuture<?> asyncMetadataFuture;
private volatile CompletableFuture<Boolean> startFuture;
private final DubboShutdownHook dubboShutdownHook;
private volatile MetricsServiceExporter metricsServiceExporter;
private final Object stateLock = new Object();
private final Object startLock = new Object();
private final Object destroyLock = new Object();
private final Object internalModuleLock = new Object();
public DefaultApplicationDeployer(ApplicationModel applicationModel) {
super(applicationModel);
this.applicationModel = applicationModel;
configManager = applicationModel.getApplicationConfigManager();
environment = applicationModel.modelEnvironment();
referenceCache = new CompositeReferenceCache(applicationModel);
frameworkExecutorRepository =
applicationModel.getFrameworkModel().getBeanFactory().getBean(FrameworkExecutorRepository.class);
executorRepository = ExecutorRepository.getInstance(applicationModel);
dubboShutdownHook = new DubboShutdownHook(applicationModel);
// load spi listener
Set<ApplicationDeployListener> deployListeners = applicationModel
.getExtensionLoader(ApplicationDeployListener.class)
.getSupportedExtensionInstances();
for (ApplicationDeployListener listener : deployListeners) {
this.addDeployListener(listener);
}
}
public static ApplicationDeployer get(ScopeModel moduleOrApplicationModel) {
ApplicationModel applicationModel = ScopeModelUtil.getApplicationModel(moduleOrApplicationModel);
ApplicationDeployer applicationDeployer = applicationModel.getDeployer();
if (applicationDeployer == null) {
applicationDeployer = applicationModel.getBeanFactory().getOrRegisterBean(DefaultApplicationDeployer.class);
}
return applicationDeployer;
}
@Override
public ApplicationModel getApplicationModel() {
return applicationModel;
}
private <T> ExtensionLoader<T> getExtensionLoader(Class<T> type) {
return applicationModel.getExtensionLoader(type);
}
private void unRegisterShutdownHook() {
dubboShutdownHook.unregister();
}
/**
* Enable registration of instance for pure Consumer process by setting registerConsumer to 'true'
* by default is false.
*/
private boolean isRegisterConsumerInstance() {
Boolean registerConsumer = getApplicationOrElseThrow().getRegisterConsumer();
if (registerConsumer == null) {
return false;
}
return Boolean.TRUE.equals(registerConsumer);
}
@Override
public ReferenceCache getReferenceCache() {
return referenceCache;
}
/**
* Initialize
*/
@Override
public void initialize() {
if (initialized) {
return;
}
// Ensure that the initialization is completed when concurrent calls
synchronized (startLock) {
if (initialized) {
return;
}
onInitialize();
// register shutdown hook
registerShutdownHook();
startConfigCenter();
loadApplicationConfigs();
initModuleDeployers();
initMetricsReporter();
initMetricsService();
// @since 3.2.3
initObservationRegistry();
// @since 2.7.8
startMetadataCenter();
initialized = true;
if (logger.isInfoEnabled()) {
logger.info(getIdentifier() + " has been initialized!");
}
}
}
private void registerShutdownHook() {
dubboShutdownHook.register();
}
private void initModuleDeployers() {
// make sure created default module
applicationModel.getDefaultModule();
// deployer initialize
for (ModuleModel moduleModel : applicationModel.getModuleModels()) {
moduleModel.getDeployer().initialize();
}
}
private void loadApplicationConfigs() {
configManager.loadConfigs();
}
private void startConfigCenter() {
// load application config
configManager.loadConfigsOfTypeFromProps(ApplicationConfig.class);
// try set model name
if (StringUtils.isBlank(applicationModel.getModelName())) {
applicationModel.setModelName(applicationModel.tryGetApplicationName());
}
// load config centers
configManager.loadConfigsOfTypeFromProps(ConfigCenterConfig.class);
useRegistryAsConfigCenterIfNecessary();
// check Config Center
Collection<ConfigCenterConfig> configCenters = configManager.getConfigCenters();
if (CollectionUtils.isEmpty(configCenters)) {
ConfigCenterConfig configCenterConfig = new ConfigCenterConfig();
configCenterConfig.setScopeModel(applicationModel);
configCenterConfig.refresh();
ConfigValidationUtils.validateConfigCenterConfig(configCenterConfig);
if (configCenterConfig.isValid()) {
configManager.addConfigCenter(configCenterConfig);
configCenters = configManager.getConfigCenters();
}
} else {
for (ConfigCenterConfig configCenterConfig : configCenters) {
configCenterConfig.refresh();
ConfigValidationUtils.validateConfigCenterConfig(configCenterConfig);
}
}
if (CollectionUtils.isNotEmpty(configCenters)) {
CompositeDynamicConfiguration compositeDynamicConfiguration = new CompositeDynamicConfiguration();
for (ConfigCenterConfig configCenter : configCenters) {
// Pass config from ConfigCenterBean to environment
environment.updateExternalConfigMap(configCenter.getExternalConfiguration());
environment.updateAppExternalConfigMap(configCenter.getAppExternalConfiguration());
// Fetch config from remote config center
compositeDynamicConfiguration.addConfiguration(prepareEnvironment(configCenter));
}
environment.setDynamicConfiguration(compositeDynamicConfiguration);
}
}
private void startMetadataCenter() {
useRegistryAsMetadataCenterIfNecessary();
ApplicationConfig applicationConfig = getApplicationOrElseThrow();
String metadataType = applicationConfig.getMetadataType();
// FIXME, multiple metadata config support.
Collection<MetadataReportConfig> metadataReportConfigs = configManager.getMetadataConfigs();
if (CollectionUtils.isEmpty(metadataReportConfigs)) {
if (REMOTE_METADATA_STORAGE_TYPE.equals(metadataType)) {
throw new IllegalStateException(
"No MetadataConfig found, Metadata Center address is required when 'metadata=remote' is enabled.");
}
return;
}
MetadataReportInstance metadataReportInstance =
applicationModel.getBeanFactory().getBean(MetadataReportInstance.class);
List<MetadataReportConfig> validMetadataReportConfigs = new ArrayList<>(metadataReportConfigs.size());
for (MetadataReportConfig metadataReportConfig : metadataReportConfigs) {
if (ConfigValidationUtils.isValidMetadataConfig(metadataReportConfig)) {
ConfigValidationUtils.validateMetadataConfig(metadataReportConfig);
validMetadataReportConfigs.add(metadataReportConfig);
}
}
metadataReportInstance.init(validMetadataReportConfigs);
if (!metadataReportInstance.isInitialized()) {
throw new IllegalStateException(String.format(
"%s MetadataConfigs found, but none of them is valid.", metadataReportConfigs.size()));
}
}
/**
* For compatibility purpose, use registry as the default config center when
* there's no config center specified explicitly and
* useAsConfigCenter of registryConfig is null or true
*/
private void useRegistryAsConfigCenterIfNecessary() {
// we use the loading status of DynamicConfiguration to decide whether ConfigCenter has been initiated.
if (environment.getDynamicConfiguration().isPresent()) {
return;
}
if (CollectionUtils.isNotEmpty(configManager.getConfigCenters())) {
return;
}
// load registry
configManager.loadConfigsOfTypeFromProps(RegistryConfig.class);
List<RegistryConfig> defaultRegistries = configManager.getDefaultRegistries();
if (!defaultRegistries.isEmpty()) {
defaultRegistries.stream()
.filter(this::isUsedRegistryAsConfigCenter)
.map(this::registryAsConfigCenter)
.forEach(configCenter -> {
if (configManager.getConfigCenter(configCenter.getId()).isPresent()) {
return;
}
configManager.addConfigCenter(configCenter);
logger.info("use registry as config-center: " + configCenter);
});
}
}
private void initMetricsService() {
this.metricsServiceExporter =
getExtensionLoader(MetricsServiceExporter.class).getDefaultExtension();
metricsServiceExporter.init();
}
private void initMetricsReporter() {
if (!MetricsSupportUtil.isSupportMetrics()) {
return;
}
DefaultMetricsCollector collector = applicationModel.getBeanFactory().getBean(DefaultMetricsCollector.class);
Optional<MetricsConfig> configOptional = configManager.getMetrics();
// If no specific metrics type is configured and there is no Prometheus dependency in the dependencies.
MetricsConfig metricsConfig = configOptional.orElse(new MetricsConfig(applicationModel));
if (PROTOCOL_PROMETHEUS.equals(metricsConfig.getProtocol()) && !MetricsSupportUtil.isSupportPrometheus()) {
return;
}
if (StringUtils.isBlank(metricsConfig.getProtocol())) {
metricsConfig.setProtocol(
MetricsSupportUtil.isSupportPrometheus() ? PROTOCOL_PROMETHEUS : PROTOCOL_DEFAULT);
}
collector.setCollectEnabled(true);
collector.collectApplication();
collector.setThreadpoolCollectEnabled(
Optional.ofNullable(metricsConfig.getEnableThreadpool()).orElse(true));
collector.setMetricsInitEnabled(
Optional.ofNullable(metricsConfig.getEnableMetricsInit()).orElse(true));
MetricsReporterFactory metricsReporterFactory =
getExtensionLoader(MetricsReporterFactory.class).getAdaptiveExtension();
MetricsReporter metricsReporter = null;
try {
metricsReporter = metricsReporterFactory.createMetricsReporter(metricsConfig.toUrl());
} catch (IllegalStateException e) {
if (e.getMessage().startsWith("No such extension org.apache.dubbo.metrics.report.MetricsReporterFactory")) {
logger.warn(COMMON_METRICS_COLLECTOR_EXCEPTION, "", "", e.getMessage());
return;
} else {
throw e;
}
}
metricsReporter.init();
applicationModel.getBeanFactory().registerBean(metricsReporter);
// If the protocol is not the default protocol, the default protocol is also initialized.
if (!PROTOCOL_DEFAULT.equals(metricsConfig.getProtocol())) {
DefaultMetricsReporterFactory defaultMetricsReporterFactory =
new DefaultMetricsReporterFactory(applicationModel);
MetricsReporter defaultMetricsReporter =
defaultMetricsReporterFactory.createMetricsReporter(metricsConfig.toUrl());
defaultMetricsReporter.init();
applicationModel.getBeanFactory().registerBean(defaultMetricsReporter);
}
}
/**
* init ObservationRegistry(Micrometer)
*/
private void initObservationRegistry() {
if (!ObservationSupportUtil.isSupportObservation()) {
if (logger.isDebugEnabled()) {
logger.debug(
"Not found micrometer-observation or plz check the version of micrometer-observation version if already introduced, need > 1.10.0");
}
return;
}
if (!ObservationSupportUtil.isSupportTracing()) {
if (logger.isDebugEnabled()) {
logger.debug("Not found micrometer-tracing dependency, skip init ObservationRegistry.");
}
return;
}
Optional<TracingConfig> configOptional = configManager.getTracing();
if (!configOptional.isPresent() || !configOptional.get().getEnabled()) {
return;
}
DubboObservationRegistry dubboObservationRegistry =
new DubboObservationRegistry(applicationModel, configOptional.get());
dubboObservationRegistry.initObservationRegistry();
}
private boolean isUsedRegistryAsConfigCenter(RegistryConfig registryConfig) {
return isUsedRegistryAsCenter(
registryConfig, registryConfig::getUseAsConfigCenter, "config", DynamicConfigurationFactory.class);
}
private ConfigCenterConfig registryAsConfigCenter(RegistryConfig registryConfig) {
String protocol = registryConfig.getProtocol();
Integer port = registryConfig.getPort();
URL url = URL.valueOf(registryConfig.getAddress(), registryConfig.getScopeModel());
String id = "config-center-" + protocol + "-" + url.getHost() + "-" + port;
ConfigCenterConfig cc = new ConfigCenterConfig();
cc.setId(id);
cc.setScopeModel(applicationModel);
if (cc.getParameters() == null) {
cc.setParameters(new HashMap<>());
}
if (CollectionUtils.isNotEmptyMap(registryConfig.getParameters())) {
cc.getParameters().putAll(registryConfig.getParameters()); // copy the parameters
}
cc.getParameters().put(CLIENT_KEY, registryConfig.getClient());
cc.setProtocol(protocol);
cc.setPort(port);
if (StringUtils.isNotEmpty(registryConfig.getGroup())) {
cc.setGroup(registryConfig.getGroup());
}
cc.setAddress(getRegistryCompatibleAddress(registryConfig));
cc.setNamespace(registryConfig.getGroup());
cc.setUsername(registryConfig.getUsername());
cc.setPassword(registryConfig.getPassword());
if (registryConfig.getTimeout() != null) {
cc.setTimeout(registryConfig.getTimeout().longValue());
}
cc.setHighestPriority(false);
return cc;
}
private void useRegistryAsMetadataCenterIfNecessary() {
Collection<MetadataReportConfig> originMetadataConfigs = configManager.getMetadataConfigs();
if (originMetadataConfigs.stream().anyMatch(m -> Objects.nonNull(m.getAddress()))) {
return;
}
Collection<MetadataReportConfig> metadataConfigsToOverride = originMetadataConfigs.stream()
.filter(m -> Objects.isNull(m.getAddress()))
.collect(Collectors.toList());
if (metadataConfigsToOverride.size() > 1) {
return;
}
MetadataReportConfig metadataConfigToOverride =
metadataConfigsToOverride.stream().findFirst().orElse(null);
List<RegistryConfig> defaultRegistries = configManager.getDefaultRegistries();
if (!defaultRegistries.isEmpty()) {
defaultRegistries.stream()
.filter(this::isUsedRegistryAsMetadataCenter)
.map(registryConfig -> registryAsMetadataCenter(registryConfig, metadataConfigToOverride))
.forEach(metadataReportConfig ->
overrideMetadataReportConfig(metadataConfigToOverride, metadataReportConfig));
}
}
private void overrideMetadataReportConfig(
MetadataReportConfig metadataConfigToOverride, MetadataReportConfig metadataReportConfig) {
if (metadataReportConfig.getId() == null) {
Collection<MetadataReportConfig> metadataReportConfigs = configManager.getMetadataConfigs();
if (CollectionUtils.isNotEmpty(metadataReportConfigs)) {
for (MetadataReportConfig existedConfig : metadataReportConfigs) {
if (existedConfig.getId() == null
&& existedConfig.getAddress().equals(metadataReportConfig.getAddress())) {
return;
}
}
}
configManager.removeConfig(metadataConfigToOverride);
configManager.addMetadataReport(metadataReportConfig);
} else {
Optional<MetadataReportConfig> configOptional =
configManager.getConfig(MetadataReportConfig.class, metadataReportConfig.getId());
if (configOptional.isPresent()) {
return;
}
configManager.removeConfig(metadataConfigToOverride);
configManager.addMetadataReport(metadataReportConfig);
}
logger.info("use registry as metadata-center: " + metadataReportConfig);
}
private boolean isUsedRegistryAsMetadataCenter(RegistryConfig registryConfig) {
return isUsedRegistryAsCenter(
registryConfig, registryConfig::getUseAsMetadataCenter, "metadata", MetadataReportFactory.class);
}
/**
* Is used the specified registry as a center infrastructure
*
* @param registryConfig the {@link RegistryConfig}
* @param usedRegistryAsCenter the configured value on
* @param centerType the type name of center
* @param extensionClass an extension | DefaultApplicationDeployer |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/persistent/decider/EnableAssignmentDecider.java | {
"start": 2983,
"end": 3735
} | enum ____ {
NONE,
ALL;
public static Allocation fromString(final String strValue) {
if (strValue == null) {
return null;
} else {
String value = strValue.toUpperCase(Locale.ROOT);
try {
return valueOf(value);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
"Illegal value [" + value + "] for [" + CLUSTER_TASKS_ALLOCATION_ENABLE_SETTING.getKey() + "]"
);
}
}
}
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
}
}
| Allocation |
java | redisson__redisson | redisson/src/main/java/org/redisson/mapreduce/MapReduceExecutor.java | {
"start": 2838,
"end": 5517
} | class ____ it should be static");
}
}
@Override
public Map<KOut, VOut> execute() {
return commandExecutor.get(executeAsync());
}
@Override
public RFuture<Map<KOut, VOut>> executeAsync() {
AtomicReference<RFuture<BatchResult<?>>> batchRef = new AtomicReference<>();
RFuture<Void> mapperFuture = executeMapperAsync(resultMapName, null);
CompletableFuture<Map<KOut, VOut>> f = mapperFuture.thenCompose(res -> {
RBatch batch = redisson.createBatch();
RMapAsync<KOut, VOut> resultMap = batch.getMap(resultMapName, objectCodec);
RFuture<Map<KOut, VOut>> future = resultMap.readAllMapAsync();
resultMap.deleteAsync();
RFuture<BatchResult<?>> batchFuture = batch.executeAsync();
batchRef.set(batchFuture);
return future;
}).toCompletableFuture();
f.whenComplete((r, e) -> {
if (f.isCancelled()) {
if (batchRef.get() != null) {
batchRef.get().cancel(true);
}
mapperFuture.cancel(true);
}
});
if (timeout > 0) {
commandExecutor.getServiceManager().newTimeout(task -> {
f.completeExceptionally(new MapReduceTimeoutException());
}, timeout, TimeUnit.MILLISECONDS);
}
return new CompletableFutureWrapper<>(f);
}
@Override
public void execute(String resultMapName) {
commandExecutor.get(executeAsync(resultMapName));
}
@Override
public RFuture<Void> executeAsync(String resultMapName) {
return executeMapperAsync(resultMapName, null);
}
private <R> RFuture<R> executeMapperAsync(String resultMapName, RCollator<KOut, VOut, R> collator) {
if (mapper == null) {
throw new NullPointerException("Mapper is not defined");
}
if (reducer == null) {
throw new NullPointerException("Reducer is not defined");
}
Callable<Object> task = createTask(resultMapName, (RCollator<KOut, VOut, Object>) collator);
return (RFuture<R>) executorService.submit(task);
}
protected abstract Callable<Object> createTask(String resultMapName, RCollator<KOut, VOut, Object> collator);
@Override
public <R> R execute(RCollator<KOut, VOut, R> collator) {
return commandExecutor.get(executeAsync(collator));
}
@Override
public <R> RFuture<R> executeAsync(RCollator<KOut, VOut, R> collator) {
check(collator);
return executeMapperAsync(resultMapName, collator);
}
}
| and |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/filter/CurrentObject3160Test.java | {
"start": 771,
"end": 1241
} | class ____ {
public Collection<String> set;
public Strategy strategy;
public String id;
public Item3160(Collection<String> set, String id) {
this.set = set;
this.strategy = new Foo(42);
this.id = id;
}
}
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type")
@JsonSubTypes({ @JsonSubTypes.Type(name = "Foo", value = Foo.class) })
| Item3160 |
java | redisson__redisson | redisson/src/main/java/org/redisson/connection/ConnectionListener.java | {
"start": 792,
"end": 1804
} | interface ____ {
/*
* Implement onConnect(InetSocketAddress, NodeType) method instead.
* It can be empty.
*/
@Deprecated
void onConnect(InetSocketAddress addr);
/**
* This method is triggered when Redisson
* connected to Redis server.
*
* @param addr Redis server network address
* @param nodeType type of Redis server
*/
default void onConnect(InetSocketAddress addr, NodeType nodeType) {
onConnect(addr);
}
/*
* Implement onDisconnect(InetSocketAddress, NodeType) method instead.
* It can be empty.
*/
@Deprecated
void onDisconnect(InetSocketAddress addr);
/**
* This method is triggered when Redisson
* discovers that Redis server in disconnected state.
*
* @param addr Redis server network address
* @param nodeType type of Redis server
*/
default void onDisconnect(InetSocketAddress addr, NodeType nodeType) {
onDisconnect(addr);
}
}
| ConnectionListener |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/DFSClientCache.java | {
"start": 2338,
"end": 3179
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(DFSClientCache.class);
/**
* Cache that maps User id to the corresponding DFSClient.
*/
private final LoadingCache<DfsClientKey, DFSClient> clientCache;
final static int DEFAULT_DFS_CLIENT_CACHE_SIZE = 256;
/**
* Cache that maps <DFSClient, inode path, nnid> to the corresponding
* FSDataInputStream.
*/
private final LoadingCache<DFSInputStreamCacheKey,
FSDataInputStream> inputstreamCache;
/**
* Time to live for a DFSClient (in seconds).
*/
final static int DEFAULT_DFS_INPUTSTREAM_CACHE_SIZE = 1024;
final static int DEFAULT_DFS_INPUTSTREAM_CACHE_TTL = 10 * 60;
private final NfsConfiguration config;
private final HashMap<Integer, URI> namenodeUriMap;
private static final | DFSClientCache |
java | grpc__grpc-java | xds/src/generated/thirdparty/grpc/io/envoyproxy/envoy/service/load_stats/v3/LoadReportingServiceGrpc.java | {
"start": 17772,
"end": 18946
} | class ____
extends LoadReportingServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
LoadReportingServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (LoadReportingServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new LoadReportingServiceFileDescriptorSupplier())
.addMethod(getStreamLoadStatsMethod())
.build();
}
}
}
return result;
}
}
| LoadReportingServiceMethodDescriptorSupplier |
java | google__guice | core/test/com/google/inject/ProvisionListenerTest.java | {
"start": 19448,
"end": 19638
} | class ____ implements ProvisionListener {
int count = 0;
@Override
public <T> void onProvision(ProvisionInvocation<T> provision) {
count++;
}
}
private static | Counter |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/onexception/OnExceptionRetryUntilTest.java | {
"start": 3212,
"end": 3998
} | class ____ {
// using bean binding we can bind the information from the exchange to
// the types we have in our method signature
public boolean retry(
@Header(Exchange.REDELIVERY_COUNTER) Integer counter, @Body String body,
@ExchangeException Exception causedBy) {
// NOTE: counter is the redelivery attempt, will start from 1
invoked++;
assertEquals("Hello World", body);
boolean b = causedBy instanceof MyFunctionalException;
assertTrue(b);
// we can of course do what ever we want to determine the result but
// this is a unit test so we end after 3 attempts
return counter < 3;
}
}
// END SNIPPET: e2
}
| MyRetryBean |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/core/MessagePostProcessor.java | {
"start": 1048,
"end": 1328
} | interface ____ {
/**
* Process the given message.
* @param message the message to process
* @return a post-processed variant of the message, or simply the incoming
* message; never {@code null}
*/
Message<?> postProcessMessage(Message<?> message);
}
| MessagePostProcessor |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/mutation/internal/SqmMultiTableMutationStrategyProviderInitiator.java | {
"start": 482,
"end": 1150
} | class ____ implements StandardServiceInitiator<SqmMultiTableMutationStrategyProvider> {
/**
* Singleton access
*/
public static final SqmMultiTableMutationStrategyProviderInitiator INSTANCE = new SqmMultiTableMutationStrategyProviderInitiator();
@Override
public SqmMultiTableMutationStrategyProvider initiateService(
Map<String, Object> configurationValues,
ServiceRegistryImplementor registry) {
return new SqmMultiTableMutationStrategyProviderStandard();
}
@Override
public Class<SqmMultiTableMutationStrategyProvider> getServiceInitiated() {
return SqmMultiTableMutationStrategyProvider.class;
}
}
| SqmMultiTableMutationStrategyProviderInitiator |
java | elastic__elasticsearch | modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/DotPrefixValidator.java | {
"start": 1468,
"end": 2219
} | class ____ implements the {@link #apply(Task, String, ActionRequest, ActionListener, ActionFilterChain)}
* method which checks for indices in the request that begin with a dot, emitting a deprecation
* warning if they do. If the request is performed by a non-external user (operator, internal product, etc.)
* as defined by {@link #isInternalRequest()} then the deprecation is emitted. Otherwise, it is skipped.
*
* The indices for consideration are returned by the abstract {@link #getIndicesFromRequest(Object)}
* method, which subclasses must implement.
*
* Some built-in index names and patterns are also elided from the check, as defined in
* {@link #IGNORED_INDEX_NAMES} and {@link #IGNORED_INDEX_PATTERNS_SETTING}.
*/
public abstract | then |
java | quarkusio__quarkus | extensions/funqy/funqy-amazon-lambda/runtime/src/main/java/io/quarkus/funqy/lambda/model/cloudevents/CloudEventV1.java | {
"start": 571,
"end": 5151
} | class ____ implements CloudEvent {
//private static final Pattern JSON_TYPE_PATTERN = Pattern.compile("^(application|text)/([a-zA-Z]+\\+)?json;?.*$");
private final CloudEventDataV1 data;
private final SpecVersion specVersion;
private final String id;
private final String type;
private final URI source;
private final String dataContentType;
private final URI dataSchema;
private final String subject;
private final OffsetDateTime time;
private final Map<String, Object> extensions;
public CloudEventV1(
@JsonProperty("specversion") String specVersion,
@JsonProperty("id") String id,
@JsonProperty("type") String type,
@JsonProperty("source") URI source,
@JsonProperty("datacontenttype") String dataContentType,
@JsonProperty("dataschema") URI dataSchema,
@JsonProperty("subject") String subject,
@JsonProperty("time") OffsetDateTime time,
@JsonProperty("data") JsonNode data,
@JsonProperty("data_base64") JsonNode dataBase64) {
this.specVersion = SpecVersion.parse(specVersion);
this.id = id;
this.type = type;
this.source = source;
this.dataContentType = dataContentType;
this.dataSchema = dataSchema;
this.subject = subject;
this.time = time;
this.extensions = new HashMap<>();
this.data = deserializeData(data, dataBase64, dataContentType);
}
@JsonAnySetter
public void add(String property, String value) {
switch (property) {
case "specversion":
case "id":
case "source":
case "type":
case "datacontenttype":
case "dataschema":
case "data":
case "data_base64":
case "subject":
case "time":
// Those names are reserved
return;
}
extensions.put(property, value);
}
private CloudEventDataV1 deserializeData(final JsonNode data, final JsonNode dataBase64,
final String dataContentType) {
if (dataBase64 != null) {
try {
return new CloudEventDataV1(dataBase64.binaryValue());
} catch (IOException e) {
throw new RuntimeException(e);
}
} else if (data == null) {
return null;
}
if (data.isTextual()) {
return new CloudEventDataV1(data.asText());
} else {
// This should work for every other type. Even for application/json, because we need to serialize
// the data anyway for the interface.
return new CloudEventDataV1(data.toString());
}
}
@Override
public CloudEventData getData() {
return this.data;
}
@Override
public SpecVersion getSpecVersion() {
return this.specVersion;
}
@Override
public String getId() {
return this.id;
}
@Override
public String getType() {
return this.type;
}
@Override
public URI getSource() {
return this.source;
}
@Override
public String getDataContentType() {
return this.dataContentType;
}
@Override
public URI getDataSchema() {
return this.dataSchema;
}
@Override
public String getSubject() {
return this.subject;
}
@Override
public OffsetDateTime getTime() {
return this.time;
}
@Override
public Object getAttribute(final String attributeName) throws IllegalArgumentException {
return switch (attributeName) {
case "specversion" -> getSpecVersion();
case "id" -> getId();
case "source" -> getSource();
case "type" -> getType();
case "datacontenttype" -> getDataContentType();
case "dataschema" -> getDataSchema();
case "subject" -> getSubject();
case "time" -> getTime();
default -> throw new IllegalArgumentException(
"The specified attribute name \"" + attributeName + "\" is not specified in version v1.");
};
}
@Override
public Object getExtension(final String s) {
if (s == null) {
throw new IllegalArgumentException("Extension name cannot be null");
}
return this.extensions.get(s);
}
@Override
public Set<String> getExtensionNames() {
return this.extensions.keySet();
}
}
| CloudEventV1 |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/PipelineTest.java | {
"start": 1537,
"end": 4720
} | class ____ implements Processor {
@Override
public void process(Exchange exchange) {
exchange.getMessage().copyFrom(exchange.getIn());
Integer counter = exchange.getIn().getHeader("copy-counter", Integer.class);
if (counter == null) {
counter = 0;
}
exchange.getMessage().setHeader("copy-counter", counter + 1);
}
}
protected MockEndpoint resultEndpoint;
@Test
public void testSendMessageThroughAPipeline() throws Exception {
resultEndpoint.expectedBodiesReceived(4);
Exchange results = template.request("direct:a", new Processor() {
public void process(Exchange exchange) {
// now lets fire in a message
Message in = exchange.getIn();
in.setBody(1);
in.setHeader("foo", "bar");
}
});
resultEndpoint.assertIsSatisfied();
assertEquals(4, results.getMessage().getBody(), "Result body");
}
@Test
public void testResultsReturned() {
Exchange exchange = template.request("direct:b", new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setBody("Hello World");
}
});
assertEquals("Hello World", exchange.getMessage().getBody());
assertEquals(3, exchange.getMessage().getHeader("copy-counter"));
}
@Test
public void testOnlyProperties() {
Exchange exchange = template.request("direct:b", new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setHeader("header", "headerValue");
}
});
assertEquals("headerValue", exchange.getMessage().getHeader("header"));
assertEquals(3, exchange.getMessage().getHeader("copy-counter"));
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
resultEndpoint = getMockEndpoint("mock:result");
}
@Override
protected RouteBuilder createRouteBuilder() {
final Processor processor = new Processor() {
public void process(Exchange exchange) {
Integer number = exchange.getIn().getBody(Integer.class);
if (number == null) {
number = 0;
}
number = number + 1;
exchange.getMessage().setBody(number);
}
};
return new RouteBuilder() {
public void configure() {
// START SNIPPET: example
from("direct:a").pipeline("direct:x", "direct:y", "direct:z", "mock:result");
// END SNIPPET: example
from("direct:x").process(processor);
from("direct:y").process(processor);
from("direct:z").process(processor);
// Create a route that uses the InToOut processor 3 times. the
// copy-counter header should be == 3
from("direct:b").process(new InToOut()).process(new InToOut()).process(new InToOut());
}
};
}
}
| InToOut |
java | bumptech__glide | library/test/src/test/java/com/bumptech/glide/load/resource/bitmap/BitmapTransformationTest.java | {
"start": 6500,
"end": 6923
} | class ____ extends BitmapTransformation {
int givenWidth;
int givenHeight;
@Override
protected Bitmap transform(
@NonNull BitmapPool pool, @NonNull Bitmap toTransform, int outWidth, int outHeight) {
givenWidth = outWidth;
givenHeight = outHeight;
return null;
}
@Override
public void updateDiskCacheKey(@NonNull MessageDigest messageDigest) {}
}
}
| SizeTrackingTransform |
java | apache__camel | components/camel-timer/src/main/java/org/apache/camel/component/timer/TimerConsumer.java | {
"start": 1590,
"end": 11836
} | class ____ extends DefaultConsumer implements StartupListener, Suspendable {
private static final Logger LOG = LoggerFactory.getLogger(TimerConsumer.class);
private final TimerEndpoint endpoint;
private volatile TimerTask task;
private volatile boolean configured;
private ExecutorService executorService;
private final AtomicLong counter = new AtomicLong();
private volatile boolean polling;
public TimerConsumer(TimerEndpoint endpoint, Processor processor) {
super(endpoint, processor);
this.endpoint = endpoint;
}
@Override
public TimerEndpoint getEndpoint() {
return (TimerEndpoint) super.getEndpoint();
}
/**
* Total number of polls run
*/
@ManagedAttribute(description = "Total number of polls run")
public long getCounter() {
return counter.get();
}
/**
* Whether polling is currently in progress
*/
@ManagedAttribute(description = "Whether polling is currently in progress")
public boolean isPolling() {
return polling;
}
@ManagedAttribute(description = "Timer Name")
public String getTimerName() {
return getEndpoint().getTimerName();
}
@ManagedAttribute(description = "Timer FixedRate")
public boolean isFixedRate() {
return getEndpoint().isFixedRate();
}
@ManagedAttribute(description = "Timer Delay")
public long getDelay() {
return getEndpoint().getDelay();
}
@ManagedAttribute(description = "Timer Period")
public long getPeriod() {
return getEndpoint().getPeriod();
}
@ManagedAttribute(description = "Repeat Count")
public long getRepeatCount() {
return getEndpoint().getRepeatCount();
}
@ManagedAttribute(description = "The consumer logs a start/complete log line when it polls. This option allows you to configure the logging level for that.")
public String getRunLoggingLevel() {
return getEndpoint().getRunLoggingLevel().name();
}
@Override
public void doInit() throws Exception {
if (endpoint.getDelay() >= 0) {
task = new TimerTask() {
@Override
public void run() {
if (!isTaskRunAllowed()) {
// do not run timer task as it was not allowed
LOG.debug("Run not allowed for timer: {}", endpoint);
return;
}
// log starting
LoggingLevel runLoggingLevel = getEndpoint().getRunLoggingLevel();
if (LoggingLevel.ERROR == runLoggingLevel) {
LOG.error("Timer task started on: {}", getEndpoint());
} else if (LoggingLevel.WARN == runLoggingLevel) {
LOG.warn("Timer task started on: {}", getEndpoint());
} else if (LoggingLevel.INFO == runLoggingLevel) {
LOG.info("Timer task started on: {}", getEndpoint());
} else if (LoggingLevel.DEBUG == runLoggingLevel) {
LOG.debug("Timer task started on: {}", getEndpoint());
} else {
LOG.trace("Timer task started on: {}", getEndpoint());
}
try {
polling = true;
doRun();
} catch (Exception e) {
LOG.warn(
"Error processing exchange. This exception will be ignored, to let the timer be able to trigger again.",
e);
} finally {
polling = false;
}
// log completed
if (LoggingLevel.ERROR == runLoggingLevel) {
LOG.error("Timer task completed on: {}", getEndpoint());
} else if (LoggingLevel.WARN == runLoggingLevel) {
LOG.warn("Timer task completed on: {}", getEndpoint());
} else if (LoggingLevel.INFO == runLoggingLevel) {
LOG.info("Timer task completed on: {}", getEndpoint());
} else if (LoggingLevel.DEBUG == runLoggingLevel) {
LOG.debug("Timer task completed on: {}", getEndpoint());
} else {
LOG.trace("Timer task completed on: {}", getEndpoint());
}
}
protected void doRun() {
long count = counter.incrementAndGet();
boolean fire = endpoint.getRepeatCount() <= 0 || count <= endpoint.getRepeatCount();
if (fire) {
sendTimerExchange(count);
} else {
// no need to fire anymore as we exceeded repeat
// count
LOG.debug("Cancelling {} timer as repeat count limit reached after {} counts.",
endpoint.getTimerName(), endpoint.getRepeatCount());
cancel();
}
}
};
}
}
@Override
protected void doStart() throws Exception {
super.doStart();
if (endpoint.getDelay() >= 0) {
// only configure task if CamelContext already started, otherwise
// the StartupListener is configuring the task later
if (task != null && !configured && endpoint.getCamelContext().getStatus().isStarted()) {
Timer timer = endpoint.getTimer(this);
configureTask(task, timer);
}
} else {
// if the delay is negative then we use an ExecutorService and fire messages as soon as possible
executorService = endpoint.getCamelContext().getExecutorServiceManager().newSingleThreadExecutor(this,
endpoint.getEndpointUri());
executorService.execute(() -> {
polling = true;
try {
long count = counter.incrementAndGet();
while ((endpoint.getRepeatCount() <= 0 || count <= endpoint.getRepeatCount()) && isRunAllowed()) {
sendTimerExchange(count);
count = counter.incrementAndGet();
}
} finally {
polling = false;
}
});
}
}
@Override
protected void doStop() throws Exception {
if (task != null) {
task.cancel();
}
task = null;
configured = false;
// remove timer
endpoint.removeTimer(this);
// if executorService is instantiated then we shutdown it
if (executorService != null) {
endpoint.getCamelContext().getExecutorServiceManager().shutdown(executorService);
executorService = null;
}
super.doStop();
}
@Override
public void onCamelContextStarted(CamelContext context, boolean alreadyStarted) throws Exception {
if (task != null && !configured) {
Timer timer = endpoint.getTimer(this);
configureTask(task, timer);
}
}
/**
* Whether the timer task is allow to run or not
*/
protected boolean isTaskRunAllowed() {
// only run if we are started
return isStarted();
}
protected void configureTask(TimerTask task, Timer timer) {
if (endpoint.isFixedRate()) {
if (endpoint.getTime() != null) {
timer.scheduleAtFixedRate(task, endpoint.getTime(), endpoint.getPeriod());
} else {
timer.scheduleAtFixedRate(task, endpoint.getDelay(), endpoint.getPeriod());
}
} else {
if (endpoint.getTime() != null) {
if (endpoint.getPeriod() > 0) {
timer.schedule(task, endpoint.getTime(), endpoint.getPeriod());
} else {
timer.schedule(task, endpoint.getTime());
}
} else {
if (endpoint.getPeriod() > 0) {
timer.schedule(task, endpoint.getDelay(), endpoint.getPeriod());
} else {
timer.schedule(task, endpoint.getDelay());
}
}
}
configured = true;
}
protected void sendTimerExchange(long counter) {
final Exchange exchange = createExchange(false);
if (endpoint.isIncludeMetadata()) {
exchange.setProperty(Exchange.TIMER_COUNTER, counter);
exchange.setProperty(Exchange.TIMER_NAME, endpoint.getTimerName());
if (endpoint.getTime() != null) {
exchange.setProperty(Exchange.TIMER_TIME, endpoint.getTime());
}
exchange.setProperty(Exchange.TIMER_PERIOD, endpoint.getPeriod());
Date now = new Date();
exchange.setProperty(TimerConstants.HEADER_FIRED_TIME, now);
exchange.getIn().setHeader(TimerConstants.HEADER_FIRED_TIME, now);
exchange.getIn().setHeader(TimerConstants.HEADER_MESSAGE_TIMESTAMP, now.getTime());
}
if (LOG.isTraceEnabled()) {
LOG.trace("Timer {} is firing #{} count", endpoint.getTimerName(), counter);
}
if (!endpoint.isSynchronous()) {
// use default consumer callback
AsyncCallback cb = defaultConsumerCallback(exchange, false);
getAsyncProcessor().process(exchange, cb);
} else {
try {
getProcessor().process(exchange);
} catch (Exception e) {
exchange.setException(e);
}
// handle any thrown exception
try {
if (exchange.getException() != null) {
getExceptionHandler().handleException("Error processing exchange", exchange, exchange.getException());
}
} finally {
releaseExchange(exchange, false);
}
}
}
}
| TimerConsumer |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/processor/internals/ProcessorNodeTest.java | {
"start": 12488,
"end": 12980
} | class ____ implements Processor<Object, Object, Object, Object> {
@Override
public void init(final ProcessorContext<Object, Object> context) {
throw new RuntimeException();
}
@Override
public void process(final Record<Object, Object> record) {
throw new RuntimeException();
}
@Override
public void close() {
throw new RuntimeException();
}
}
private static | ExceptionalProcessor |
java | elastic__elasticsearch | libs/plugin-scanner/src/main/java/org/elasticsearch/plugin/scanner/AnnotatedHierarchyVisitor.java | {
"start": 877,
"end": 1059
} | class ____ extends ClassVisitor {
private String currentClassName;
private final String targetAnnotationDescriptor;
// a function taking the current | AnnotatedHierarchyVisitor |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/generated/CurrentTimestampAnnotationTests.java | {
"start": 2741,
"end": 3369
} | class ____ {
@Id
public Integer id;
public String name;
//tag::mapping-generated-CurrentTimestamp-ex1[]
@CurrentTimestamp(event = INSERT)
public Instant createdAt;
@CurrentTimestamp(event = {INSERT, UPDATE})
public Instant lastUpdatedAt;
//end::mapping-generated-CurrentTimestamp-ex1[]
public AuditedEntity() {
}
public AuditedEntity(Integer id, String name) {
this.id = id;
this.name = name;
}
}
private static void waitALittle() {
try {
Thread.sleep( 10 );
}
catch (InterruptedException e) {
throw new HibernateError( "Unexpected wakeup from test sleep" );
}
}
}
| AuditedEntity |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/SecurityNamespaceHandlerTests.java | {
"start": 2113,
"end": 7794
} | class ____ {
// @formatter:off
private static final String XML_AUTHENTICATION_MANAGER = "<authentication-manager>"
+ " <authentication-provider>"
+ " <user-service id='us'>"
+ " <user name='bob' password='bobspassword' authorities='ROLE_A' />"
+ " </user-service>"
+ " </authentication-provider>"
+ "</authentication-manager>";
// @formatter:on
private static final String XML_HTTP_BLOCK = "<http auto-config='true'/>";
private static final String FILTER_CHAIN_PROXY_CLASSNAME = "org.springframework.security.web.FilterChainProxy";
@Mock(answer = Answers.CALLS_REAL_METHODS)
private MockedStatic<ClassUtils> classUtils;
@Test
public void constructionWhenVersionsMatchThenLogsNothing() {
Appender<ILoggingEvent> appender = mock(Appender.class);
Logger logger = (Logger) LoggerFactory.getLogger(SecurityNamespaceHandler.class);
logger.addAppender(appender);
assertThat(new SecurityNamespaceHandler()).isNotNull();
verify(appender, never()).doAppend(any(ILoggingEvent.class));
}
@Test
public void constructorWhenDetectsMismatchingVersionsThenLogsError() {
Appender<ILoggingEvent> appender = mock(Appender.class);
Logger logger = (Logger) LoggerFactory.getLogger(SecurityNamespaceHandler.class);
logger.addAppender(appender);
try (MockedStatic<SpringSecurityCoreVersion> core = Mockito.mockStatic(SpringSecurityCoreVersion.class)) {
core.when(SpringSecurityCoreVersion::getVersion).thenReturn("mismatching");
assertThat(new SecurityNamespaceHandler()).isNotNull();
ArgumentCaptor<ILoggingEvent> captor = ArgumentCaptor.forClass(ILoggingEvent.class);
verify(appender).doAppend(captor.capture());
assertThat(captor.getValue().getLevel()).isEqualTo(Level.ERROR);
}
}
@Test
public void beanIdsConstantsAreNotEmpty() {
assertThat(BeanIds.AUTHENTICATION_MANAGER).isNotEmpty();
assertThat(BeanIds.SPRING_SECURITY_FILTER_CHAIN).isNotEmpty();
}
@Test
public void elementsConstantsAreNotEmpty() {
assertThat(Elements.HTTP).isNotEmpty();
assertThat(Elements.AUTHENTICATION_MANAGER).isNotEmpty();
}
@Test
public void pre32SchemaAreNotSupported() {
assertThatExceptionOfType(BeanDefinitionParsingException.class)
.isThrownBy(() -> new InMemoryXmlApplicationContext(
"<user-service id='us'><user name='bob' password='bobspassword' authorities='ROLE_A' /></user-service>",
"3.0.3", null))
.withMessageContaining(
"You cannot use any XSD older than spring-security-7.0.xsd. Either change to spring-security.xsd or spring-security-7.0.xsd");
}
// SEC-1868
@Test
public void initDoesNotLogErrorWhenFilterChainProxyFailsToLoad() throws Exception {
String className = "jakarta.servlet.Filter";
Log logger = mock(Log.class);
SecurityNamespaceHandler handler = new SecurityNamespaceHandler();
ReflectionTestUtils.setField(handler, "logger", logger);
expectClassUtilsForNameThrowsNoClassDefFoundError(className);
handler.init();
verifyNoMoreInteractions(logger);
}
@Test
public void filterNoClassDefFoundError() throws Exception {
String className = "jakarta.servlet.Filter";
expectClassUtilsForNameThrowsNoClassDefFoundError(className);
assertThatExceptionOfType(BeanDefinitionParsingException.class)
.isThrownBy(() -> new InMemoryXmlApplicationContext(XML_AUTHENTICATION_MANAGER + XML_HTTP_BLOCK))
.havingRootCause()
.isInstanceOf(NoClassDefFoundError.class)
.withMessage(className);
}
@Test
public void filterNoClassDefFoundErrorNoHttpBlock() throws Exception {
String className = "jakarta.servlet.Filter";
expectClassUtilsForNameThrowsNoClassDefFoundError(className);
new InMemoryXmlApplicationContext(XML_AUTHENTICATION_MANAGER);
// should load just fine since no http block
}
@Test
public void filterChainProxyClassNotFoundException() throws Exception {
String className = FILTER_CHAIN_PROXY_CLASSNAME;
expectClassUtilsForNameThrowsClassNotFoundException(className);
assertThatExceptionOfType(BeanDefinitionParsingException.class)
.isThrownBy(() -> new InMemoryXmlApplicationContext(XML_AUTHENTICATION_MANAGER + XML_HTTP_BLOCK))
.havingRootCause()
.isInstanceOf(ClassNotFoundException.class)
.withMessage(className);
}
@Test
public void filterChainProxyClassNotFoundExceptionNoHttpBlock() throws Exception {
String className = FILTER_CHAIN_PROXY_CLASSNAME;
expectClassUtilsForNameThrowsClassNotFoundException(className);
new InMemoryXmlApplicationContext(XML_AUTHENTICATION_MANAGER);
// should load just fine since no http block
}
@Test
public void websocketNotFoundExceptionNoMessageBlock() throws Exception {
String className = FILTER_CHAIN_PROXY_CLASSNAME;
expectClassUtilsForNameThrowsClassNotFoundException(className);
new InMemoryXmlApplicationContext(XML_AUTHENTICATION_MANAGER);
// should load just fine since no websocket block
}
@Test
public void configureWhenOldVersionThenErrorMessageContainsCorrectVersion() {
assertThatExceptionOfType(BeanDefinitionParsingException.class)
.isThrownBy(() -> new InMemoryXmlApplicationContext(XML_AUTHENTICATION_MANAGER, "3.0", null))
.withMessageContaining(SpringSecurityVersions.getCurrentXsdVersionFromSpringSchemas());
}
private void expectClassUtilsForNameThrowsNoClassDefFoundError(String className) {
this.classUtils.when(() -> ClassUtils.forName(eq(FILTER_CHAIN_PROXY_CLASSNAME), any()))
.thenThrow(new NoClassDefFoundError(className));
}
private void expectClassUtilsForNameThrowsClassNotFoundException(String className) {
this.classUtils.when(() -> ClassUtils.forName(eq(FILTER_CHAIN_PROXY_CLASSNAME), any()))
.thenThrow(new ClassNotFoundException(className));
}
}
| SecurityNamespaceHandlerTests |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/functions/RuntimeContext.java | {
"start": 4062,
"end": 4156
} | class ____ release.
*
* <p>The release hook is executed just before the user code | loader |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ConditionalFunction.java | {
"start": 1043,
"end": 1097
} | class ____ conditional predicates.
*/
public abstract | for |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-jackson/deployment/src/test/java/io/quarkus/resteasy/reactive/jackson/deployment/test/JsonViewDeserializeSerializeTest.java | {
"start": 1688,
"end": 2008
} | class ____ {
@JsonView(Views.Private.class)
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public RestResponse<User> create(@JsonView(Views.Public.class) User user) {
return RestResponse.status(CREATED, user);
}
}
}
| Resource |
java | spring-projects__spring-boot | module/spring-boot-amqp/src/test/java/org/springframework/boot/amqp/autoconfigure/RabbitAutoConfigurationTests.java | {
"start": 54918,
"end": 55265
} | class ____ {
@Bean
RabbitMessagingTemplate messagingTemplate(RabbitTemplate rabbitTemplate) {
RabbitMessagingTemplate messagingTemplate = new RabbitMessagingTemplate(rabbitTemplate);
messagingTemplate.setDefaultDestination("fooBar");
return messagingTemplate;
}
}
@Configuration(proxyBeanMethods = false)
static | TestConfiguration4 |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableOnBackpressureReduceWith.java | {
"start": 985,
"end": 1689
} | class ____<T, R> extends AbstractFlowableWithUpstream<T, R> {
final BiFunction<R, ? super T, R> reducer;
final Supplier<R> supplier;
public FlowableOnBackpressureReduceWith(@NonNull Flowable<T> source,
@NonNull Supplier<R> supplier,
@NonNull BiFunction<R, ? super T, R> reducer) {
super(source);
this.reducer = reducer;
this.supplier = supplier;
}
@Override
protected void subscribeActual(@NonNull Subscriber<? super R> s) {
source.subscribe(new BackpressureReduceWithSubscriber<>(s, supplier, reducer));
}
static final | FlowableOnBackpressureReduceWith |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/StreamCacheException.java | {
"start": 837,
"end": 1020
} | class ____ extends TypeConversionException {
public StreamCacheException(Object value, Throwable cause) {
super(value, StreamCache.class, cause);
}
}
| StreamCacheException |
java | google__dagger | javatests/dagger/internal/codegen/AssistedFactoryErrorsTest.java | {
"start": 25888,
"end": 26661
} | class ____ {",
" @Inject",
" @AssistedInject",
" Foo(@Assisted int i) {}",
"}");
CompilerTests.daggerCompiler(foo)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
"Constructors cannot be annotated with both @Inject and @AssistedInject");
});
}
@Test
public void testInjectWithAssistedAnnotations() {
Source foo =
CompilerTests.javaSource(
"test.Foo",
"package test;",
"",
"import dagger.assisted.Assisted;",
"import javax.inject.Inject;",
"",
" | Foo |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/executiongraph/ArchivedExecutionVertexWithSpeculativeExecutionTest.java | {
"start": 1762,
"end": 9247
} | class ____ {
@RegisterExtension
private static final TestExecutorExtension<ScheduledExecutorService> EXECUTOR_RESOURCE =
TestingUtils.defaultExecutorExtension();
private TestingInternalFailuresListener internalFailuresListener;
@BeforeEach
void setUp() {
internalFailuresListener = new TestingInternalFailuresListener();
}
@Test
void testCreateSpeculativeExecution() throws Exception {
final SpeculativeExecutionVertex ev = createSpeculativeExecutionVertex();
ev.createNewSpeculativeExecution(System.currentTimeMillis());
ArchivedExecutionVertex aev = ev.archive();
ArchivedExecutionGraphTestUtils.compareExecutionVertex(ev, aev);
}
@Test
void testResetExecutionVertex() throws Exception {
final SpeculativeExecutionVertex ev = createSpeculativeExecutionVertex();
final Execution e1 = ev.getCurrentExecutionAttempt();
final Execution e2 = ev.createNewSpeculativeExecution(System.currentTimeMillis());
e1.transitionState(ExecutionState.RUNNING);
e1.markFinished();
e2.cancel();
ev.resetForNewExecution();
ArchivedExecutionVertex aev = ev.archive();
ArchivedExecutionGraphTestUtils.compareExecutionVertex(ev, aev);
}
@Test
void testCancel() throws Exception {
final SpeculativeExecutionVertex ev = createSpeculativeExecutionVertex();
ev.createNewSpeculativeExecution(System.currentTimeMillis());
ev.cancel();
ArchivedExecutionVertex aev = ev.archive();
ArchivedExecutionGraphTestUtils.compareExecutionVertex(ev, aev);
}
@Test
void testSuspend() throws Exception {
final SpeculativeExecutionVertex ev = createSpeculativeExecutionVertex();
ev.createNewSpeculativeExecution(System.currentTimeMillis());
ev.suspend();
ArchivedExecutionVertex aev = ev.archive();
ArchivedExecutionGraphTestUtils.compareExecutionVertex(ev, aev);
}
@Test
void testFail() throws Exception {
final SpeculativeExecutionVertex ev = createSpeculativeExecutionVertex();
ev.createNewSpeculativeExecution(System.currentTimeMillis());
ev.fail(new Exception("Forced test failure."));
ArchivedExecutionVertex aev = ev.archive();
ArchivedExecutionGraphTestUtils.compareExecutionVertex(ev, aev);
}
@Test
void testMarkFailed() throws Exception {
final SpeculativeExecutionVertex ev = createSpeculativeExecutionVertex();
ev.createNewSpeculativeExecution(System.currentTimeMillis());
ev.markFailed(new Exception("Forced test failure."));
ArchivedExecutionVertex aev = ev.archive();
ArchivedExecutionGraphTestUtils.compareExecutionVertex(ev, aev);
}
@Test
void testVertexTerminationAndJobTermination() throws Exception {
final JobVertex jobVertex = ExecutionGraphTestUtils.createNoOpVertex(1);
final JobGraph jobGraph = JobGraphTestUtils.batchJobGraph(jobVertex);
final ExecutionGraph eg = createExecutionGraph(jobGraph);
eg.transitionToRunning();
ExecutionJobVertex jv = eg.getJobVertex(jobVertex.getID());
assertThat(jv).isNotNull();
final SpeculativeExecutionVertex ev = (SpeculativeExecutionVertex) jv.getTaskVertices()[0];
final Execution e1 = ev.getCurrentExecutionAttempt();
final Execution e2 = ev.createNewSpeculativeExecution(System.currentTimeMillis());
e1.transitionState(ExecutionState.RUNNING);
e1.markFinished();
e2.cancel();
ArchivedExecutionVertex aev = ev.archive();
ArchivedExecutionGraphTestUtils.compareExecutionVertex(ev, aev);
}
@Test
void testArchiveFailedExecutions() throws Exception {
final SpeculativeExecutionVertex ev = createSpeculativeExecutionVertex();
final Execution e1 = ev.getCurrentExecutionAttempt();
e1.transitionState(ExecutionState.RUNNING);
final Execution e2 = ev.createNewSpeculativeExecution(0);
e2.transitionState(ExecutionState.FAILED);
ev.archiveFailedExecution(e2.getAttemptId());
final Execution e3 = ev.createNewSpeculativeExecution(0);
e3.transitionState(ExecutionState.RUNNING);
e1.transitionState(ExecutionState.FAILED);
ev.archiveFailedExecution(e1.getAttemptId());
ArchivedExecutionVertex aev = ev.archive();
ArchivedExecutionGraphTestUtils.compareExecutionVertex(ev, aev);
}
@Test
void testArchiveTheOnlyCurrentExecution() throws Exception {
final SpeculativeExecutionVertex ev = createSpeculativeExecutionVertex();
final Execution e1 = ev.getCurrentExecutionAttempt();
e1.transitionState(ExecutionState.FAILED);
ev.archiveFailedExecution(e1.getAttemptId());
ArchivedExecutionVertex aev = ev.archive();
ArchivedExecutionGraphTestUtils.compareExecutionVertex(ev, aev);
}
@Test
void testGetExecutionState() throws Exception {
final SpeculativeExecutionVertex ev = createSpeculativeExecutionVertex();
final Execution e1 = ev.getCurrentExecutionAttempt();
e1.transitionState(ExecutionState.CANCELED);
// the latter added state is more likely to reach FINISH state
final List<ExecutionState> statesSortedByPriority = new ArrayList<>();
statesSortedByPriority.add(ExecutionState.FAILED);
statesSortedByPriority.add(ExecutionState.CANCELING);
statesSortedByPriority.add(ExecutionState.CREATED);
statesSortedByPriority.add(ExecutionState.SCHEDULED);
statesSortedByPriority.add(ExecutionState.DEPLOYING);
statesSortedByPriority.add(ExecutionState.INITIALIZING);
statesSortedByPriority.add(ExecutionState.RUNNING);
statesSortedByPriority.add(ExecutionState.FINISHED);
for (ExecutionState state : statesSortedByPriority) {
final Execution execution = ev.createNewSpeculativeExecution(0);
execution.transitionState(state);
// Check the AchievedExecutionVertex in each state.
ArchivedExecutionVertex aev = ev.archive();
ArchivedExecutionGraphTestUtils.compareExecutionVertex(ev, aev);
}
}
private SpeculativeExecutionVertex createSpeculativeExecutionVertex() throws Exception {
final JobVertex jobVertex = ExecutionGraphTestUtils.createNoOpVertex(1);
final JobGraph jobGraph = JobGraphTestUtils.batchJobGraph(jobVertex);
final ExecutionGraph executionGraph = createExecutionGraph(jobGraph);
ExecutionJobVertex jv = executionGraph.getJobVertex(jobVertex.getID());
assertThat(jv).isNotNull();
return (SpeculativeExecutionVertex) jv.getTaskVertices()[0];
}
private ExecutionGraph createExecutionGraph(final JobGraph jobGraph) throws Exception {
final ExecutionGraph executionGraph =
TestingDefaultExecutionGraphBuilder.newBuilder()
.setJobGraph(jobGraph)
.setExecutionJobVertexFactory(new SpeculativeExecutionJobVertex.Factory())
.build(EXECUTOR_RESOURCE.getExecutor());
executionGraph.setInternalTaskFailuresListener(internalFailuresListener);
executionGraph.start(ComponentMainThreadExecutorServiceAdapter.forMainThread());
return executionGraph;
}
}
| ArchivedExecutionVertexWithSpeculativeExecutionTest |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/DelegatingCatalogExtension.java | {
"start": 1674,
"end": 6317
} | class ____ implements CatalogExtension {
protected CatalogPlugin delegate;
@Override
public final void setDelegateCatalog(CatalogPlugin delegate) {
this.delegate = delegate;
}
@Override
public String name() {
return delegate.name();
}
@Override
public void initialize(String name, CaseInsensitiveStringMap options) {}
@Override
public Set<TableCatalogCapability> capabilities() {
return asTableCatalog().capabilities();
}
@Override
public String[] defaultNamespace() {
return delegate.defaultNamespace();
}
@Override
public Identifier[] listTables(String[] namespace) throws NoSuchNamespaceException {
return asTableCatalog().listTables(namespace);
}
@Override
public Table loadTable(Identifier ident) throws NoSuchTableException {
return asTableCatalog().loadTable(ident);
}
@Override
public Table loadTable(Identifier ident, long timestamp) throws NoSuchTableException {
return asTableCatalog().loadTable(ident, timestamp);
}
@Override
public Table loadTable(Identifier ident, String version) throws NoSuchTableException {
return asTableCatalog().loadTable(ident, version);
}
@Override
public void invalidateTable(Identifier ident) {
asTableCatalog().invalidateTable(ident);
}
@Override
public boolean tableExists(Identifier ident) {
return asTableCatalog().tableExists(ident);
}
@Override
public Table createTable(
Identifier ident,
StructType schema,
Transform[] partitions,
Map<String, String> properties) throws TableAlreadyExistsException, NoSuchNamespaceException {
return asTableCatalog().createTable(ident, schema, partitions, properties);
}
@Override
public Table createTable(
Identifier ident,
Column[] columns,
Transform[] partitions,
Map<String, String> properties) throws TableAlreadyExistsException, NoSuchNamespaceException {
return asTableCatalog().createTable(ident, columns, partitions, properties);
}
@Override
public Table alterTable(
Identifier ident,
TableChange... changes) throws NoSuchTableException {
return asTableCatalog().alterTable(ident, changes);
}
@Override
public boolean dropTable(Identifier ident) {
return asTableCatalog().dropTable(ident);
}
@Override
public boolean purgeTable(Identifier ident) {
return asTableCatalog().purgeTable(ident);
}
@Override
public void renameTable(
Identifier oldIdent,
Identifier newIdent) throws NoSuchTableException, TableAlreadyExistsException {
asTableCatalog().renameTable(oldIdent, newIdent);
}
@Override
public String[][] listNamespaces() throws NoSuchNamespaceException {
return asNamespaceCatalog().listNamespaces();
}
@Override
public String[][] listNamespaces(String[] namespace) throws NoSuchNamespaceException {
return asNamespaceCatalog().listNamespaces(namespace);
}
@Override
public boolean namespaceExists(String[] namespace) {
return asNamespaceCatalog().namespaceExists(namespace);
}
@Override
public Map<String, String> loadNamespaceMetadata(
String[] namespace) throws NoSuchNamespaceException {
return asNamespaceCatalog().loadNamespaceMetadata(namespace);
}
@Override
public void createNamespace(
String[] namespace,
Map<String, String> metadata) throws NamespaceAlreadyExistsException {
asNamespaceCatalog().createNamespace(namespace, metadata);
}
@Override
public void alterNamespace(
String[] namespace,
NamespaceChange... changes) throws NoSuchNamespaceException {
asNamespaceCatalog().alterNamespace(namespace, changes);
}
@Override
public boolean dropNamespace(
String[] namespace,
boolean cascade) throws NoSuchNamespaceException, NonEmptyNamespaceException {
return asNamespaceCatalog().dropNamespace(namespace, cascade);
}
@Override
public UnboundFunction loadFunction(Identifier ident) throws NoSuchFunctionException {
return asFunctionCatalog().loadFunction(ident);
}
@Override
public Identifier[] listFunctions(String[] namespace) throws NoSuchNamespaceException {
return asFunctionCatalog().listFunctions(namespace);
}
@Override
public boolean functionExists(Identifier ident) {
return asFunctionCatalog().functionExists(ident);
}
private TableCatalog asTableCatalog() {
return (TableCatalog) delegate;
}
private SupportsNamespaces asNamespaceCatalog() {
return (SupportsNamespaces) delegate;
}
private FunctionCatalog asFunctionCatalog() {
return (FunctionCatalog) delegate;
}
}
| DelegatingCatalogExtension |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/LogGlobalLogNameTest.java | {
"start": 920,
"end": 1227
} | class ____ extends LogProcessorTest {
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
context.getGlobalOptions().put(Exchange.LOG_EIP_NAME, "com.foo.myapp");
return context;
}
}
| LogGlobalLogNameTest |
java | netty__netty | microbench/src/main/java/io/netty/microbench/channel/DefaultChannelPipelineBenchmark.java | {
"start": 2134,
"end": 14064
} | class ____ extends ChannelOutboundHandlerAdapter {
@Override
public final boolean isSharable() {
return true;
}
}
private static final ChannelHandler INBOUND_CONSUMING_HANDLER = new SharableInboundHandlerAdapter() {
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
// NOOP
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
// NOOP
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
// NOOP
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
// NOOP
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
// NOOP
}
};
private static final ChannelHandler OUTBOUND_CONSUMING_HANDLER = new SharableOutboundHandlerAdapter() {
@Override
public void read(ChannelHandlerContext ctx) throws Exception {
// NOOP
}
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception {
// NOOP
}
@Override
public void flush(ChannelHandlerContext ctx) throws Exception {
// NOOP
}
};
private static final ChannelHandler[] HANDLERS = {
new SharableInboundHandlerAdapter() {
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelActive();
}
},
new SharableInboundHandlerAdapter() {
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelInactive();
}
},
new SharableInboundHandlerAdapter() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
ctx.fireChannelRead(msg);
}
},
new SharableInboundHandlerAdapter() {
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
ctx.fireUserEventTriggered(evt);
}
},
new SharableInboundHandlerAdapter() {
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.fireChannelReadComplete();
}
},
new SharableInboundHandlerAdapter() {
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelActive();
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelInactive();
}
},
new SharableInboundHandlerAdapter() {
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelActive();
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
ctx.fireChannelRead(msg);
}
},
new SharableInboundHandlerAdapter() {
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelActive();
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
ctx.fireUserEventTriggered(evt);
}
},
new SharableInboundHandlerAdapter() {
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelActive();
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.fireChannelReadComplete();
}
},
new SharableInboundHandlerAdapter() {
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelInactive();
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
ctx.fireChannelRead(msg);
}
},
new SharableInboundHandlerAdapter() {
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelInactive();
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
ctx.fireUserEventTriggered(evt);
}
},
new SharableInboundHandlerAdapter() {
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelInactive();
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.fireChannelReadComplete();
}
},
new SharableInboundHandlerAdapter() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
ctx.fireChannelRead(msg);
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
ctx.fireUserEventTriggered(evt);
}
},
new SharableInboundHandlerAdapter() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
ctx.fireChannelRead(msg);
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.fireChannelReadComplete();
}
},
new SharableInboundHandlerAdapter() {
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
ctx.fireUserEventTriggered(evt);
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.fireChannelReadComplete();
}
},
new SharableOutboundHandlerAdapter() {
@Override
public void read(ChannelHandlerContext ctx) throws Exception {
ctx.read();
}
},
new SharableOutboundHandlerAdapter() {
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception {
ctx.write(msg, promise);
}
},
new SharableOutboundHandlerAdapter() {
@Override
public void flush(ChannelHandlerContext ctx) throws Exception {
ctx.flush();
}
},
new SharableOutboundHandlerAdapter() {
@Override
public void read(ChannelHandlerContext ctx) throws Exception {
ctx.read();
}
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception {
ctx.write(msg, promise);
}
},
new SharableOutboundHandlerAdapter() {
@Override
public void read(ChannelHandlerContext ctx) throws Exception {
ctx.read();
}
@Override
public void flush(ChannelHandlerContext ctx) throws Exception {
ctx.flush();
}
},
new SharableOutboundHandlerAdapter() {
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception {
ctx.write(msg, promise);
}
@Override
public void flush(ChannelHandlerContext ctx) throws Exception {
ctx.flush();
}
},
};
private static final int CALL_TYPE_ARRAY_SIZE = 1024;
private static final int CALL_TYPE_ARRAY_MASK = CALL_TYPE_ARRAY_SIZE - 1;
@Param({ "1024" })
private int pipelineArrayLength;
private int pipelineArrayMask;
@Param({ "16" })
public int extraHandlers;
private ChannelPipeline[] pipelines;
private ChannelPromise[] promises;
private int pipelineCounter;
private int[] callTypes;
private int callTypeCounter;
@Setup(Level.Iteration)
public void setup() {
SplittableRandom rng = new SplittableRandom();
pipelineArrayMask = pipelineArrayLength - 1;
pipelines = new ChannelPipeline[pipelineArrayLength];
promises = new ChannelPromise[pipelineArrayLength];
for (int i = 0; i < pipelineArrayLength; i++) {
EmbeddedChannel channel = new EmbeddedChannel();
channel.config().setAutoRead(false);
ChannelPipeline pipeline = channel.pipeline();
pipeline.addLast(OUTBOUND_CONSUMING_HANDLER);
for (int j = 0; j < extraHandlers; j++) {
pipeline.addLast(HANDLERS[rng.nextInt(0, HANDLERS.length)]);
}
pipeline.addLast(INBOUND_CONSUMING_HANDLER);
pipelines[i] = pipeline;
promises[i] = pipeline.newPromise();
}
}
@TearDown
public void tearDown() {
for (ChannelPipeline pipeline : pipelines) {
pipeline.channel().close();
}
}
@CompilerControl(CompilerControl.Mode.DONT_INLINE)
@Benchmark
public void propagateEvent(Blackhole hole) {
ChannelPipeline pipeline = pipelines[pipelineCounter++ & pipelineArrayMask];
hole.consume(pipeline.fireChannelReadComplete());
}
@OperationsPerInvocation(12)
@CompilerControl(CompilerControl.Mode.DONT_INLINE)
@Benchmark()
public void propagateVariety(Blackhole hole) {
int index = pipelineCounter++ & pipelineArrayMask;
ChannelPipeline pipeline = pipelines[index];
hole.consume(pipeline.fireChannelActive()); // 1
hole.consume(pipeline.fireChannelRead(MESSAGE)); // 2
hole.consume(pipeline.fireChannelRead(MESSAGE)); // 3
hole.consume(pipeline.write(MESSAGE, promises[index])); // 4
hole.consume(pipeline.fireChannelRead(MESSAGE)); // 5
hole.consume(pipeline.fireChannelRead(MESSAGE)); // 6
hole.consume(pipeline.write(MESSAGE, promises[index])); // 7
hole.consume(pipeline.fireChannelReadComplete()); // 8
hole.consume(pipeline.fireUserEventTriggered(MESSAGE)); // 9
hole.consume(pipeline.fireChannelWritabilityChanged()); // 10
hole.consume(pipeline.flush()); // 11
hole.consume(pipeline.fireChannelInactive()); // 12
}
}
| SharableOutboundHandlerAdapter |
java | elastic__elasticsearch | x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java | {
"start": 1128,
"end": 2582
} | class ____ extends Plugin implements ActionPlugin {
@Override
public Collection<?> createComponents(PluginServices services) {
NodeSeenService nodeSeenService = new NodeSeenService(services.clusterService());
return Collections.singletonList(nodeSeenService);
}
@Override
public List<ActionHandler> getActions() {
ActionHandler putShutdown = new ActionHandler(PutShutdownNodeAction.INSTANCE, TransportPutShutdownNodeAction.class);
ActionHandler deleteShutdown = new ActionHandler(DeleteShutdownNodeAction.INSTANCE, TransportDeleteShutdownNodeAction.class);
ActionHandler getStatus = new ActionHandler(GetShutdownStatusAction.INSTANCE, TransportGetShutdownStatusAction.class);
return Arrays.asList(putShutdown, deleteShutdown, getStatus);
}
@Override
public List<RestHandler> getRestHandlers(
Settings settings,
NamedWriteableRegistry namedWriteableRegistry,
RestController restController,
ClusterSettings clusterSettings,
IndexScopedSettings indexScopedSettings,
SettingsFilter settingsFilter,
IndexNameExpressionResolver indexNameExpressionResolver,
Supplier<DiscoveryNodes> nodesInCluster,
Predicate<NodeFeature> clusterSupportsFeature
) {
return Arrays.asList(new RestPutShutdownNodeAction(), new RestDeleteShutdownNodeAction(), new RestGetShutdownStatusAction());
}
}
| ShutdownPlugin |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ClassPathScanningCandidateComponentProvider.java | {
"start": 20440,
"end": 21051
} | class ____ as a candidate component
*/
protected boolean isCandidateComponent(MetadataReader metadataReader) throws IOException {
for (TypeFilter filter : this.excludeFilters) {
if (filter.match(metadataReader, getMetadataReaderFactory())) {
return false;
}
}
for (TypeFilter filter : this.includeFilters) {
if (filter.match(metadataReader, getMetadataReaderFactory())) {
registerCandidateTypeForIncludeFilter(metadataReader.getClassMetadata().getClassName(), filter);
return isConditionMatch(metadataReader);
}
}
return false;
}
/**
* Determine whether the given | qualifies |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/event/SimpleEventNotifierEventsTest.java | {
"start": 1592,
"end": 8877
} | class ____ {
private final List<CamelEvent> events = new ArrayList<>();
private CamelContext context;
private ProducerTemplate template;
@BeforeEach
public void setUp() throws Exception {
context = createCamelContext();
context.addRoutes(createRouteBuilder());
template = context.createProducerTemplate();
context.start();
}
@AfterEach
public void tearDown() {
if (context != null) {
context.stop();
}
}
protected CamelContext createCamelContext() {
DefaultCamelContext context = new DefaultCamelContext();
context.getManagementStrategy().addEventNotifier(new SimpleEventNotifierSupport() {
public void notify(CamelEvent event) {
events.add(event);
}
});
return context;
}
@Test
public void testExchangeDone() throws Exception {
// optimized as this does not require exchange events
assertFalse(context.getCamelContextExtension().isEventNotificationApplicable());
MockEndpoint mock = context.getEndpoint("mock:result", MockEndpoint.class);
mock.expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
mock.assertIsSatisfied();
assertEquals(12, events.size());
assertIsInstanceOf(CamelEvent.CamelContextInitializingEvent.class, events.get(0));
assertIsInstanceOf(CamelEvent.CamelContextInitializedEvent.class, events.get(1));
assertIsInstanceOf(CamelContextStartingEvent.class, events.get(2));
assertIsInstanceOf(CamelContextRoutesStartingEvent.class, events.get(3));
assertIsInstanceOf(RouteAddedEvent.class, events.get(4));
assertIsInstanceOf(RouteAddedEvent.class, events.get(5));
assertIsInstanceOf(RouteStartingEvent.class, events.get(6));
assertIsInstanceOf(RouteStartedEvent.class, events.get(7));
assertIsInstanceOf(RouteStartingEvent.class, events.get(8));
assertIsInstanceOf(RouteStartedEvent.class, events.get(9));
assertIsInstanceOf(CamelContextRoutesStartedEvent.class, events.get(10));
assertIsInstanceOf(CamelContextStartedEvent.class, events.get(11));
context.stop();
assertEquals(22, events.size());
assertIsInstanceOf(CamelContextStoppingEvent.class, events.get(12));
assertIsInstanceOf(CamelContextRoutesStoppingEvent.class, events.get(13));
assertIsInstanceOf(RouteStoppingEvent.class, events.get(14));
assertIsInstanceOf(RouteStoppedEvent.class, events.get(15));
assertIsInstanceOf(RouteRemovedEvent.class, events.get(16));
assertIsInstanceOf(RouteStoppingEvent.class, events.get(17));
assertIsInstanceOf(RouteStoppedEvent.class, events.get(18));
assertIsInstanceOf(RouteRemovedEvent.class, events.get(19));
assertIsInstanceOf(CamelContextRoutesStoppedEvent.class, events.get(20));
assertIsInstanceOf(CamelContextStoppedEvent.class, events.get(21));
}
@Test
public void testExchangeFailed() {
// optimized as this does not require exchange events
assertFalse(context.getCamelContextExtension().isEventNotificationApplicable());
Exception e = assertThrows(Exception.class,
() -> template.sendBody("direct:fail", "Hello World"),
"Should have thrown an exception");
assertIsInstanceOf(IllegalArgumentException.class, e.getCause());
assertEquals(12, events.size());
assertIsInstanceOf(CamelEvent.CamelContextInitializingEvent.class, events.get(0));
assertIsInstanceOf(CamelEvent.CamelContextInitializedEvent.class, events.get(1));
assertIsInstanceOf(CamelContextStartingEvent.class, events.get(2));
assertIsInstanceOf(CamelContextRoutesStartingEvent.class, events.get(3));
assertIsInstanceOf(RouteAddedEvent.class, events.get(4));
assertIsInstanceOf(RouteAddedEvent.class, events.get(5));
assertIsInstanceOf(RouteStartingEvent.class, events.get(6));
assertIsInstanceOf(RouteStartedEvent.class, events.get(7));
assertIsInstanceOf(RouteStartingEvent.class, events.get(8));
assertIsInstanceOf(RouteStartedEvent.class, events.get(9));
assertIsInstanceOf(CamelContextRoutesStartedEvent.class, events.get(10));
assertIsInstanceOf(CamelContextStartedEvent.class, events.get(11));
context.stop();
assertEquals(22, events.size());
assertIsInstanceOf(CamelContextStoppingEvent.class, events.get(12));
assertIsInstanceOf(CamelContextRoutesStoppingEvent.class, events.get(13));
assertIsInstanceOf(RouteStoppingEvent.class, events.get(14));
assertIsInstanceOf(RouteStoppedEvent.class, events.get(15));
assertIsInstanceOf(RouteRemovedEvent.class, events.get(16));
assertIsInstanceOf(RouteStoppingEvent.class, events.get(17));
assertIsInstanceOf(RouteStoppedEvent.class, events.get(18));
assertIsInstanceOf(RouteRemovedEvent.class, events.get(19));
assertIsInstanceOf(CamelContextRoutesStoppedEvent.class, events.get(20));
assertIsInstanceOf(CamelContextStoppedEvent.class, events.get(21));
}
@Test
public void testSuspendResume() {
// optimized as this does not require exchange events
assertFalse(context.getCamelContextExtension().isEventNotificationApplicable());
assertEquals(12, events.size());
assertIsInstanceOf(CamelEvent.CamelContextInitializingEvent.class, events.get(0));
assertIsInstanceOf(CamelEvent.CamelContextInitializedEvent.class, events.get(1));
assertIsInstanceOf(CamelContextStartingEvent.class, events.get(2));
assertIsInstanceOf(CamelContextRoutesStartingEvent.class, events.get(3));
assertIsInstanceOf(RouteAddedEvent.class, events.get(4));
assertIsInstanceOf(RouteAddedEvent.class, events.get(5));
assertIsInstanceOf(RouteStartingEvent.class, events.get(6));
assertIsInstanceOf(RouteStartedEvent.class, events.get(7));
assertIsInstanceOf(RouteStartingEvent.class, events.get(8));
assertIsInstanceOf(RouteStartedEvent.class, events.get(9));
assertIsInstanceOf(CamelContextRoutesStartedEvent.class, events.get(10));
assertIsInstanceOf(CamelContextStartedEvent.class, events.get(11));
context.suspend();
assertEquals(14, events.size());
assertIsInstanceOf(CamelContextSuspendingEvent.class, events.get(12));
// notice direct component is not suspended (as they are internal)
assertIsInstanceOf(CamelContextSuspendedEvent.class, events.get(13));
context.resume();
assertEquals(16, events.size());
assertIsInstanceOf(CamelContextResumingEvent.class, events.get(14));
assertIsInstanceOf(CamelContextResumedEvent.class, events.get(15));
}
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("log:foo").to("mock:result");
from("direct:fail").throwException(new IllegalArgumentException("Damn"));
}
};
}
}
| SimpleEventNotifierEventsTest |
java | qos-ch__slf4j | slf4j-api/src/main/java/org/slf4j/spi/NOPLoggingEventBuilder.java | {
"start": 455,
"end": 2297
} | class ____ implements LoggingEventBuilder {
static final NOPLoggingEventBuilder SINGLETON = new NOPLoggingEventBuilder();
private NOPLoggingEventBuilder() {
}
/**
* <p>Returns the singleton instance of this class.
* Used by {@link org.slf4j.Logger#makeLoggingEventBuilder(Level) makeLoggingEventBuilder(Level)}.</p>
*
* @return the singleton instance of this class
*/
public static LoggingEventBuilder singleton() {
return SINGLETON;
}
@Override
public LoggingEventBuilder addMarker(Marker marker) {
return singleton();
}
@Override
public LoggingEventBuilder addArgument(Object p) {
return singleton();
}
@Override
public LoggingEventBuilder addArgument(Supplier<?> objectSupplier) {
return singleton();
}
@Override
public LoggingEventBuilder addKeyValue(String key, Object value) {
return singleton();
}
@Override
public LoggingEventBuilder addKeyValue(String key, Supplier<Object> value) {
return singleton();
}
@Override
public LoggingEventBuilder setCause(Throwable cause) {
return singleton();
}
@Override
public void log() {
}
@Override
public LoggingEventBuilder setMessage(String message) {
return this;
}
@Override
public LoggingEventBuilder setMessage(Supplier<String> messageSupplier) {
return this;
}
@Override
public void log(String message) {
}
@Override
public void log(Supplier<String> messageSupplier) {
}
@Override
public void log(String message, Object arg) {
}
@Override
public void log(String message, Object arg0, Object arg1) {
}
@Override
public void log(String message, Object... args) {
}
}
| NOPLoggingEventBuilder |
java | google__guava | android/guava/src/com/google/common/util/concurrent/ForwardingExecutorService.java | {
"start": 1620,
"end": 1951
} | class ____ <i>not</i> forward calls to {@code
* default} methods. Instead, it inherits their default implementations. When those implementations
* invoke methods, they invoke methods on the {@code ForwardingExecutorService}.
*
* @author Kurt Alfred Kluever
* @since 10.0
*/
@J2ktIncompatible
@GwtIncompatible
public abstract | does |
java | quarkusio__quarkus | test-framework/junit5/src/main/java/io/quarkus/test/junit/callback/QuarkusTestAfterAllCallback.java | {
"start": 302,
"end": 392
} | interface ____ {
void afterAll(QuarkusTestContext context);
}
| QuarkusTestAfterAllCallback |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/deviceframework/DeviceResourceHandlerImpl.java | {
"start": 12900,
"end": 13099
} | enum ____ {
BLOCK("b"),
CHAR("c");
private final String name;
DeviceType(String n) {
this.name = n;
}
public String getName() {
return name;
}
}
}
| DeviceType |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/search/Document.java | {
"start": 282,
"end": 1602
} | class ____ {
private final String id;
private final double score;
private final Map<String, Property> properties;
private final Response payload;
public Document(String id, double score, Response payload, Map<String, Property> properties) {
this.id = id;
this.score = score == 0 ? 1.0 : score;
this.payload = payload;
this.properties = properties == null ? Collections.emptyMap() : Collections.unmodifiableMap(properties);
}
/**
* @return the document key
*/
public String key() {
return id;
}
/**
* @return the score, 0.0 if not requested
*/
public double score() {
return score;
}
/**
* @return the list of properties of the document
*/
public Map<String, Property> properties() {
return properties;
}
/**
* Gets a specific property from the document
*
* @param name the name, must not be {@code null}
* @return the property, {@code null} if not found
*/
public Property property(String name) {
return properties.get(name);
}
/**
* @return the payload
*/
public Response payload() {
return payload;
}
/**
* Represents a document property / attribute
*/
public static | Document |
java | quarkusio__quarkus | extensions/spring-data-rest/deployment/src/test/java/io/quarkus/spring/data/rest/CrudAndPagedResourceTest.java | {
"start": 959,
"end": 20804
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(AbstractEntity.class, Record.class, CrudAndPagedRecordsRepository.class)
.addAsResource("application.properties")
.addAsResource("import.sql"));
@Test
void shouldGet() {
given().accept("application/json")
.when().get("/crud-and-paged-records/1")
.then().statusCode(200)
.and().body("id", is(equalTo(1)))
.and().body("name", is(equalTo("first")));
}
@Test
void shouldNotGetNonExistent() {
given().accept("application/json")
.when().get("/crud-and-paged-records/1000")
.then().statusCode(404);
}
@Test
void shouldGetHal() {
given().accept("application/hal+json")
.when().get("/crud-and-paged-records/1")
.then().statusCode(200)
.and().body("id", is(equalTo(1)))
.and().body("name", is(equalTo("first")))
.and().body("_links.add.href", endsWith("/crud-and-paged-records"))
.and().body("_links.list.href", endsWith("/crud-and-paged-records"))
.and().body("_links.self.href", endsWith("/crud-and-paged-records/1"))
.and().body("_links.update.href", endsWith("/crud-and-paged-records/1"))
.and().body("_links.remove.href", endsWith("/crud-and-paged-records/1"));
}
@Test
void shouldNotGetNonExistentHal() {
given().accept("application/hal+json")
.when().get("/crud-and-paged-records/1000")
.then().statusCode(404);
}
@Test
void shouldList() {
Response response = given().accept("application/json")
.when().get("/crud-and-paged-records")
.thenReturn();
assertThat(response.statusCode()).isEqualTo(200);
assertThat(response.body().jsonPath().getList("id")).contains(1, 2);
assertThat(response.body().jsonPath().getList("name")).contains("first", "second");
Map<String, String> expectedLinks = new HashMap<>(2);
expectedLinks.put("first", "/crud-and-paged-records?page=0&size=20");
expectedLinks.put("last", "/crud-and-paged-records?page=0&size=20");
assertLinks(response.headers(), expectedLinks);
}
@Test
void shouldListHal() {
given().accept("application/hal+json")
.when().get("/crud-and-paged-records")
.then().statusCode(200).log().all()
.and().body("_embedded.crud-and-paged-records.id", hasItems(1, 2))
.and().body("_embedded.crud-and-paged-records.name", hasItems("first", "second"))
.and()
.body("_embedded.crud-and-paged-records._links.add.href",
hasItems(endsWith("/crud-and-paged-records"), endsWith("/crud-and-paged-records")))
.and()
.body("_embedded.crud-and-paged-records._links.list.href",
hasItems(endsWith("/crud-and-paged-records"), endsWith("/crud-and-paged-records")))
.and()
.body("_embedded.crud-and-paged-records._links.self.href",
hasItems(endsWith("/crud-and-paged-records/1"), endsWith("/crud-and-paged-records/2")))
.and()
.body("_embedded.crud-and-paged-records._links.update.href",
hasItems(endsWith("/crud-and-paged-records/1"), endsWith("/crud-and-paged-records/2")))
.and()
.body("_embedded.crud-and-paged-records._links.remove.href",
hasItems(endsWith("/crud-and-paged-records/1"), endsWith("/crud-and-paged-records/2")))
.and().body("_links.add.href", endsWith("/crud-and-paged-records"))
.and().body("_links.list.href", endsWith("/crud-and-paged-records"))
.and().body("_links.first.href", endsWith("/crud-and-paged-records?page=0&size=20"))
.and().body("_links.last.href", endsWith("/crud-and-paged-records?page=0&size=20"));
}
@Test
void shouldListFirstPage() {
Response initResponse = given().accept("application/json")
.when().get("/crud-and-paged-records")
.thenReturn();
List<Integer> ids = initResponse.body().jsonPath().getList("id");
List<String> names = initResponse.body().jsonPath().getList("name");
int lastPage = ids.size() - 1;
Response response = given().accept("application/json")
.and().queryParam("page", 0)
.and().queryParam("size", 1)
.when().get("/crud-and-paged-records")
.thenReturn();
assertThat(response.statusCode()).isEqualTo(200);
assertThat(response.body().jsonPath().getList("id")).containsOnly(ids.get(0));
assertThat(response.body().jsonPath().getList("name")).containsOnly(names.get(0));
Map<String, String> expectedLinks = new HashMap<>(3);
expectedLinks.put("first", "/crud-and-paged-records?page=0&size=1");
expectedLinks.put("last", "/crud-and-paged-records?page=" + lastPage + "&size=1");
expectedLinks.put("next", "/crud-and-paged-records?page=1&size=1");
assertLinks(response.headers(), expectedLinks);
}
@Test
void shouldListFirstPageHal() {
Response initResponse = given().accept("application/json")
.when().get("/crud-and-paged-records")
.thenReturn();
List<Integer> ids = initResponse.body().jsonPath().getList("id");
List<String> names = initResponse.body().jsonPath().getList("name");
int lastPage = ids.size() - 1;
given().accept("application/hal+json")
.and().queryParam("page", 0)
.and().queryParam("size", 1)
.when().get("/crud-and-paged-records")
.then().statusCode(200)
.and().body("_embedded.crud-and-paged-records.id", contains(ids.get(0)))
.and().body("_embedded.crud-and-paged-records.name", contains(names.get(0)))
.and()
.body("_embedded.crud-and-paged-records._links.add.href",
hasItems(endsWith("/crud-and-paged-records"), endsWith("/crud-and-paged-records")))
.and()
.body("_embedded.crud-and-paged-records._links.list.href",
hasItems(endsWith("/crud-and-paged-records"), endsWith("/crud-and-paged-records")))
.and()
.body("_embedded.crud-and-paged-records._links.self.href",
contains(endsWith("/crud-and-paged-records/" + ids.get(0))))
.and()
.body("_embedded.crud-and-paged-records._links.update.href",
contains(endsWith("/crud-and-paged-records/" + ids.get(0))))
.and()
.body("_embedded.crud-and-paged-records._links.remove.href",
contains(endsWith("/crud-and-paged-records/" + ids.get(0))))
.and().body("_links.add.href", endsWith("/crud-and-paged-records"))
.and().body("_links.list.href", endsWith("/crud-and-paged-records"))
.and().body("_links.first.href", endsWith("/crud-and-paged-records?page=0&size=1"))
.and().body("_links.last.href", endsWith("/crud-and-paged-records?page=" + lastPage + "&size=1"))
.and().body("_links.next.href", endsWith("/crud-and-paged-records?page=1&size=1"));
}
@Test
void shouldListLastPage() {
Response initResponse = given().accept("application/json")
.when().get("/crud-and-paged-records")
.thenReturn();
List<Integer> ids = initResponse.body().jsonPath().getList("id");
List<String> names = initResponse.body().jsonPath().getList("name");
int lastPage = ids.size() - 1;
Response response = given().accept("application/json")
.and().queryParam("page", lastPage)
.and().queryParam("size", 1)
.when().get("/crud-and-paged-records")
.thenReturn();
assertThat(response.statusCode()).isEqualTo(200);
assertThat(response.body().jsonPath().getList("id")).containsOnly(ids.get(lastPage));
assertThat(response.body().jsonPath().getList("name")).containsOnly(names.get(lastPage));
Map<String, String> expectedLinks = new HashMap<>(3);
expectedLinks.put("first", "/crud-and-paged-records?page=0&size=1");
expectedLinks.put("last", "/crud-and-paged-records?page=" + lastPage + "&size=1");
expectedLinks.put("previous", "/crud-and-paged-records?page=" + (lastPage - 1) + "&size=1");
assertLinks(response.headers(), expectedLinks);
}
@Test
void shouldListLastPageHal() {
Response initResponse = given().accept("application/json")
.when().get("/crud-and-paged-records")
.thenReturn();
List<Integer> ids = initResponse.body().jsonPath().getList("id");
List<String> names = initResponse.body().jsonPath().getList("name");
int lastPage = ids.size() - 1;
given().accept("application/hal+json")
.and().queryParam("page", lastPage)
.and().queryParam("size", 1)
.when().get("/crud-and-paged-records")
.then().statusCode(200)
.and().body("_embedded.crud-and-paged-records.id", contains(ids.get(lastPage)))
.and().body("_embedded.crud-and-paged-records.name", contains(names.get(lastPage)))
.and()
.body("_embedded.crud-and-paged-records._links.add.href",
hasItems(endsWith("/crud-and-paged-records"), endsWith("/crud-and-paged-records")))
.and()
.body("_embedded.crud-and-paged-records._links.list.href",
hasItems(endsWith("/crud-and-paged-records"), endsWith("/crud-and-paged-records")))
.and()
.body("_embedded.crud-and-paged-records._links.self.href",
contains(endsWith("/crud-and-paged-records/" + ids.get(lastPage))))
.and()
.body("_embedded.crud-and-paged-records._links.update.href",
contains(endsWith("/crud-and-paged-records/" + ids.get(lastPage))))
.and()
.body("_embedded.crud-and-paged-records._links.remove.href",
contains(endsWith("/crud-and-paged-records/" + ids.get(lastPage))))
.and().body("_links.add.href", endsWith("/crud-and-paged-records"))
.and().body("_links.list.href", endsWith("/crud-and-paged-records"))
.and().body("_links.first.href", endsWith("/crud-and-paged-records?page=0&size=1"))
.and().body("_links.last.href", endsWith("/crud-and-paged-records?page=" + lastPage + "&size=1"))
.and().body("_links.previous.href", endsWith("/crud-and-paged-records?page=" + (lastPage - 1) + "&size=1"));
}
@Test
void shouldNotGetNonExistentPage() {
given().accept("application/json")
.and().queryParam("page", 100)
.when().get("/crud-and-paged-records")
.then().statusCode(200)
.and().body("id", is(empty()));
}
@Test
void shouldNotGetNegativePageOrSize() {
given().accept("application/json")
.and().queryParam("page", -1)
.and().queryParam("size", -1)
.when().get("/crud-and-paged-records")
.then().statusCode(200)
// Invalid page and size parameters are replaced with defaults
.and().body("id", hasItems(1, 2));
}
@Test
void shouldListAscending() {
Response response = given().accept("application/json")
.when().get("/crud-and-paged-records?sort=name,id")
.thenReturn();
List<String> actualNames = response.body().jsonPath().getList("name");
List<String> expectedNames = new LinkedList<>(actualNames);
expectedNames.sort(Comparator.naturalOrder());
assertThat(actualNames).isEqualTo(expectedNames);
}
@Test
void shouldListDescending() {
Response response = given().accept("application/json")
.when().get("/crud-and-paged-records?sort=-name,id")
.thenReturn();
List<String> actualNames = response.body().jsonPath().getList("name");
List<String> expectedNames = new LinkedList<>(actualNames);
expectedNames.sort(Comparator.reverseOrder());
assertThat(actualNames).isEqualTo(expectedNames);
}
@Test
void shouldCreate() {
Response response = given().accept("application/json")
.and().contentType("application/json")
.and().body("{\"name\": \"test-create\"}")
.when().post("/crud-and-paged-records")
.thenReturn();
assertThat(response.statusCode()).isEqualTo(201);
String location = response.header("Location");
int id = Integer.parseInt(location.substring(response.header("Location").lastIndexOf("/") + 1));
JsonPath body = response.body().jsonPath();
assertThat(body.getInt("id")).isEqualTo(id);
assertThat(body.getString("name")).isEqualTo("test-create");
given().accept("application/json")
.when().get(location)
.then().statusCode(200)
.and().body("id", is(equalTo(id)))
.and().body("name", is(equalTo("test-create")));
}
@Test
void shouldCreateHal() {
Response response = given().accept("application/hal+json")
.and().contentType("application/json")
.and().body("{\"name\": \"test-create-hal\"}")
.when().post("/crud-and-paged-records")
.thenReturn();
assertThat(response.statusCode()).isEqualTo(201);
String location = response.header("Location");
int id = Integer.parseInt(location.substring(response.header("Location").lastIndexOf("/") + 1));
JsonPath body = response.body().jsonPath();
assertThat(body.getInt("id")).isEqualTo(id);
assertThat(body.getString("name")).isEqualTo("test-create-hal");
assertThat(body.getString("_links.add.href")).endsWith("/crud-and-paged-records");
assertThat(body.getString("_links.list.href")).endsWith("/crud-and-paged-records");
assertThat(body.getString("_links.self.href")).endsWith("/crud-and-paged-records/" + id);
assertThat(body.getString("_links.update.href")).endsWith("/crud-and-paged-records/" + id);
assertThat(body.getString("_links.remove.href")).endsWith("/crud-and-paged-records/" + id);
given().accept("application/json")
.when().get(location)
.then().statusCode(200)
.and().body("id", is(equalTo(id)))
.and().body("name", is(equalTo("test-create-hal")));
}
@Test
void shouldCreateAndUpdate() {
Response createResponse = given().accept("application/json")
.and().contentType("application/json")
.and().body("{\"name\": \"test-update-create\"}")
.when().post("/crud-and-paged-records/")
.thenReturn();
assertThat(createResponse.statusCode()).isEqualTo(201);
String location = createResponse.header("Location");
int id = Integer.parseInt(location.substring(createResponse.header("Location").lastIndexOf("/") + 1));
JsonPath body = createResponse.body().jsonPath();
assertThat(body.getInt("id")).isEqualTo(id);
assertThat(body.getString("name")).isEqualTo("test-update-create");
given().accept("application/json")
.and().contentType("application/json")
.and().body("{\"id\": \"" + id + "\", \"name\": \"test-update\"}")
.when().put(location)
.then()
.statusCode(204);
given().accept("application/json")
.when().get(location)
.then().statusCode(200)
.and().body("id", is(equalTo(id)))
.and().body("name", is(equalTo("test-update")));
}
@Test
void shouldCreateAndUpdateHal() {
Response createResponse = given().accept("application/hal+json")
.and().contentType("application/json")
.and().body("{\"name\": \"test-update-create-hal\"}")
.when().post("/crud-and-paged-records/")
.thenReturn();
assertThat(createResponse.statusCode()).isEqualTo(201);
String location = createResponse.header("Location");
int id = Integer.parseInt(location.substring(createResponse.header("Location").lastIndexOf("/") + 1));
JsonPath body = createResponse.body().jsonPath();
assertThat(body.getInt("id")).isEqualTo(id);
assertThat(body.getString("name")).isEqualTo("test-update-create-hal");
assertThat(body.getString("_links.add.href")).endsWith("/crud-and-paged-records");
assertThat(body.getString("_links.list.href")).endsWith("/crud-and-paged-records");
assertThat(body.getString("_links.self.href")).endsWith("/crud-and-paged-records/" + id);
assertThat(body.getString("_links.update.href")).endsWith("/crud-and-paged-records/" + id);
assertThat(body.getString("_links.remove.href")).endsWith("/crud-and-paged-records/" + id);
given().accept("application/json")
.and().contentType("application/json")
.and().body("{\"id\": \"" + id + "\", \"name\": \"test-update-hal\"}")
.when().put(location)
.then()
.statusCode(204);
given().accept("application/json")
.when().get(location)
.then().statusCode(200)
.and().body("id", is(equalTo(id)))
.and().body("name", is(equalTo("test-update-hal")));
}
@Test
void shouldCreateAndDelete() {
Response createResponse = given().accept("application/json")
.and().contentType("application/json")
.and().body("{\"name\": \"test-delete\"}")
.when().post("/crud-and-paged-records")
.thenReturn();
assertThat(createResponse.statusCode()).isEqualTo(201);
String location = createResponse.header("Location");
when().delete(location)
.then().statusCode(204);
when().get(location)
.then().statusCode(404);
}
@Test
void shouldNotDeleteNonExistent() {
when().delete("/crud-and-paged-records/1000")
.then().statusCode(404);
}
private void assertLinks(Headers headers, Map<String, String> expectedLinks) {
List<Link> links = new LinkedList<>();
for (Header header : headers.getList("Link")) {
links.add(Link.valueOf(header.getValue()));
}
assertThat(links).hasSize(expectedLinks.size());
for (Map.Entry<String, String> expectedLink : expectedLinks.entrySet()) {
assertThat(links).anySatisfy(link -> {
assertThat(link.getUri().toString()).endsWith(expectedLink.getValue());
assertThat(link.getRel()).isEqualTo(expectedLink.getKey());
});
}
}
}
| CrudAndPagedResourceTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcBase.java | {
"start": 10631,
"end": 11090
} | class ____ extends
SecretManager<TestTokenIdentifier> {
@Override
public byte[] createPassword(TestTokenIdentifier id) {
return id.getBytes();
}
@Override
public byte[] retrievePassword(TestTokenIdentifier id)
throws InvalidToken {
return id.getBytes();
}
@Override
public TestTokenIdentifier createIdentifier() {
return new TestTokenIdentifier();
}
}
public static | TestTokenSecretManager |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/support/MockInvoker.java | {
"start": 8637,
"end": 8761
} | class ____ instance "
+ mockService
+ ", please check if there's mock | or |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SpringBatchEndpointBuilderFactory.java | {
"start": 10600,
"end": 10942
} | class ____ extends AbstractEndpointBuilder implements SpringBatchEndpointBuilder, AdvancedSpringBatchEndpointBuilder {
public SpringBatchEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new SpringBatchEndpointBuilderImpl(path);
}
} | SpringBatchEndpointBuilderImpl |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/WallMultiLineCommentTest.java | {
"start": 742,
"end": 1079
} | class ____ extends TestCase {
private String sql = "select f1 from t where a=1 /* and b=1 */";
public void testOracle() throws Exception {
assertFalse(WallUtils.isValidateOracle(sql));
}
public void testMySql() throws Exception {
assertFalse(WallUtils.isValidateMySql(sql));
}
}
| WallMultiLineCommentTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_endsWith_Test.java | {
"start": 961,
"end": 1298
} | class ____ extends LongArrayAssertBaseTest {
@Override
protected LongArrayAssert invoke_api_method() {
return assertions.endsWith(6L, 8L);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertEndsWith(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L));
}
}
| LongArrayAssert_endsWith_Test |
java | apache__camel | core/camel-core-languages/src/main/java/org/apache/camel/language/simple/SimpleLanguage.java | {
"start": 9025,
"end": 9649
} | class ____ implements Expression {
private final String text;
public SimpleExpression(String text) {
this.text = text;
}
@Override
public <T> T evaluate(Exchange exchange, Class<T> type) {
String r = ScriptHelper.resolveOptionalExternalScript(getCamelContext(), exchange, text);
Expression exp = SimpleLanguage.this.createExpression(r);
exp.init(getCamelContext());
return exp.evaluate(exchange, type);
}
@Override
public String toString() {
return text;
}
}
}
| SimpleExpression |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/stub/StubMethodHandler.java | {
"start": 1006,
"end": 1328
} | interface ____<T, R> {
/**
* Invoke method
*
* @param arguments may contain {@link org.apache.dubbo.common.stream.StreamObserver} or just
* single request instance.
* @return an Async or Sync future
*/
CompletableFuture<?> invoke(Object[] arguments);
}
| StubMethodHandler |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java | {
"start": 102445,
"end": 102971
} | interface ____ {
void accept(Set<String> xs, String x);
}
void test(ImmutableBiConsumer c) {
test(Set::add);
}
}
""")
.doTest();
}
@Test
public void methodReference_toConstructor() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.errorprone.annotations.Immutable;
import java.util.ArrayList;
abstract | ImmutableBiConsumer |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/appender/db/ColumnMapping.java | {
"start": 2268,
"end": 10247
} | class ____ implements org.apache.logging.log4j.core.util.Builder<ColumnMapping> {
@PluginConfiguration
private Configuration configuration;
@PluginElement("Layout")
private StringLayout layout;
@PluginBuilderAttribute
private String literal;
@PluginBuilderAttribute
@Required(message = "No column name provided")
private String name;
@PluginBuilderAttribute
private String parameter;
@PluginBuilderAttribute
private String pattern;
@PluginBuilderAttribute
private String source;
@PluginBuilderAttribute
@Deprecated
private Class<?> type;
@PluginBuilderAttribute
@Required(message = "No conversion type provided")
private Class<?> columnType = String.class;
@Override
public ColumnMapping build() {
if (pattern != null) {
layout = PatternLayout.newBuilder()
.setPattern(pattern)
.setConfiguration(configuration)
.setAlwaysWriteExceptions(false)
.build();
}
final Class<?> columnType = type != null ? type : this.columnType;
if (!(layout == null
|| literal == null
|| Date.class.isAssignableFrom(columnType)
|| ReadOnlyStringMap.class.isAssignableFrom(columnType)
|| ThreadContextMap.class.isAssignableFrom(columnType)
|| ThreadContextStack.class.isAssignableFrom(columnType))) {
LOGGER.error(
"No 'layout' or 'literal' value specified and type ({}) is not compatible with "
+ "ThreadContextMap, ThreadContextStack, or java.util.Date for the mapping",
columnType,
this);
return null;
}
if (literal != null && parameter != null) {
LOGGER.error("Only one of 'literal' or 'parameter' can be set on the column mapping {}", this);
return null;
}
return new ColumnMapping(name, source, layout, literal, parameter, columnType);
}
public Builder setConfiguration(final Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Layout of value to write to database (before type conversion). Not applicable if {@link #setType(Class)} is
* a {@link ReadOnlyStringMap}, {@link ThreadContextMap}, or {@link ThreadContextStack}.
*
* @return this.
*/
public Builder setLayout(final StringLayout layout) {
this.layout = layout;
return this;
}
/**
* Literal value to use for populating a column. This is generally useful for functions, stored procedures,
* etc. No escaping will be done on this value.
*
* @return this.
*/
public Builder setLiteral(final String literal) {
this.literal = literal;
return this;
}
/**
* Column name.
*
* @return this.
*/
public Builder setName(final String name) {
this.name = name;
return this;
}
/**
* Parameter value to use for populating a column, MUST contain a single parameter marker '?'. This is generally useful for functions, stored procedures,
* etc. No escaping will be done on this value.
*
* @return this.
*/
public Builder setParameter(final String parameter) {
this.parameter = parameter;
return this;
}
/**
* Pattern to use as a {@link PatternLayout}. Convenient shorthand for {@link #setLayout(StringLayout)} with a
* PatternLayout.
*
* @return this.
*/
public Builder setPattern(final String pattern) {
this.pattern = pattern;
return this;
}
/**
* Source name. Useful when combined with a {@link org.apache.logging.log4j.message.MapMessage} depending on the
* appender.
*
* @return this.
*/
public Builder setSource(final String source) {
this.source = source;
return this;
}
/**
* Class to convert value to before storing in database. If the type is compatible with {@link ThreadContextMap} or
* {@link ReadOnlyStringMap}, then the MDC will be used. If the type is compatible with {@link ThreadContextStack},
* then the NDC will be used. If the type is compatible with {@link Date}, then the event timestamp will be used.
*
* @return this.
*/
public Builder setColumnType(final Class<?> columnType) {
this.columnType = columnType;
return this;
}
/**
* @see Builder#setColumnType(Class)
*/
@Deprecated
public Builder setType(final Class<?> type) {
this.type = type;
return this;
}
@Override
public String toString() {
return "Builder [name=" + name + ", source=" + source + ", literal=" + literal + ", parameter=" + parameter
+ ", pattern=" + pattern + ", columnType=" + columnType + ", layout=" + layout + "]";
}
}
private static final Logger LOGGER = StatusLogger.getLogger();
@PluginBuilderFactory
public static Builder newBuilder() {
return new Builder();
}
public static String toKey(final String name) {
return toRootUpperCase(name);
}
private final StringLayout layout;
private final String literalValue;
private final String name;
private final String nameKey;
private final String parameter;
private final String source;
private final Class<?> type;
private ColumnMapping(
final String name,
final String source,
final StringLayout layout,
final String literalValue,
final String parameter,
final Class<?> type) {
this.name = Objects.requireNonNull(name);
this.nameKey = toKey(name);
this.source = source;
this.layout = layout;
this.literalValue = literalValue;
this.parameter = parameter;
this.type = type;
}
public StringLayout getLayout() {
return layout;
}
public String getLiteralValue() {
return literalValue;
}
public String getName() {
return name;
}
public String getNameKey() {
return nameKey;
}
public String getParameter() {
return parameter;
}
public String getSource() {
return source;
}
public Class<?> getType() {
return type;
}
@Override
public String toString() {
return "ColumnMapping [name=" + name + ", source=" + source + ", literalValue=" + literalValue + ", parameter="
+ parameter + ", type=" + type + ", layout=" + layout + "]";
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ColumnMapping that = (ColumnMapping) o;
return Objects.equals(layout, that.layout)
&& Objects.equals(literalValue, that.literalValue)
&& name.equals(that.name)
&& Objects.equals(parameter, that.parameter)
&& Objects.equals(source, that.source)
&& Objects.equals(type, that.type);
}
@Override
public int hashCode() {
return Objects.hash(layout, literalValue, name, parameter, source, type);
}
}
| Builder |
java | google__dagger | javatests/dagger/hilt/android/processor/internal/androidentrypoint/AndroidEntryPointProcessorTest.java | {
"start": 8804,
"end": 9635
} | class ____ extends Hilt_MyActivity { }");
HiltCompilerTests.hiltCompiler(testActivity).compile(subject -> {
subject.compilationDidFail();
subject.hasErrorContaining(
"Activities annotated with @AndroidEntryPoint must be a subclass of "
+ "androidx.activity.ComponentActivity. (e.g. FragmentActivity, AppCompatActivity, "
+ "etc.)"); });
}
@Test
public void checkBaseActivityWithTypeParameters() {
Source testActivity =
HiltCompilerTests.javaSource(
"test.BaseActivity",
"package test;",
"",
"import androidx.activity.ComponentActivity;",
"import dagger.hilt.android.AndroidEntryPoint;",
"",
"@AndroidEntryPoint(ComponentActivity.class)",
"public | MyActivity |
java | quarkusio__quarkus | extensions/vertx/deployment/src/test/java/io/quarkus/vertx/EventBusCodecTest.java | {
"start": 821,
"end": 3495
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.setArchiveProducer(() -> ShrinkWrap
.create(JavaArchive.class).addClasses(MyBean.class, MyNonLocalBean.class,
MyPetCodec.class, Person.class, Pet.class,
Event.class, SubclassEvent.class));
@Inject
MyBean bean;
/**
* Bean setting the consumption to be non-local.
* So, the user must configure the codec explicitly.
*/
@Inject
MyNonLocalBean nonLocalBean;
@Inject
Vertx vertx;
@Test
public void testWithGenericCodec() {
Greeting hello = vertx.eventBus().<Greeting> request("person", new Person("bob", "morane"))
.onItem().transform(Message::body)
.await().indefinitely();
assertThat(hello.getMessage()).isEqualTo("Hello bob morane");
}
@Test
public void testWithUserCodec() {
Greeting hello = vertx.eventBus().<Greeting> request("pet", new Pet("neo", "rabbit"))
.onItem().transform(Message::body)
.await().indefinitely();
assertThat(hello.getMessage()).isEqualTo("Hello NEO");
}
@Test
public void testWithUserCodecNonLocal() {
String hello = vertx.eventBus().<String> request("nl-pet", new Pet("neo", "rabbit"))
.onItem().transform(Message::body)
.await().indefinitely();
assertEquals("Non Local Hello NEO", hello);
}
@Test
public void testWithSubclass() {
Greeting hello = vertx.eventBus().<Greeting> request("subevent", new Event("my-event"))
.onItem().transform(Message::body)
.await().indefinitely();
assertThat(hello.getMessage()).isEqualTo("Hello my-event");
hello = vertx.eventBus().<Greeting> request("subevent", new SubclassEvent("my-subclass-event"))
.onItem().transform(Message::body)
.await().indefinitely();
assertThat(hello.getMessage()).isEqualTo("Hello my-subclass-event");
}
@Test
public void testWithInterfaceCodecTarget() {
Supplier<String> supplier = vertx.eventBus()
.<Supplier<String>> request("hello-supplier", new Function<String, String>() {
@Override
public String apply(String value) {
return value.toLowerCase();
}
})
.onItem().transform(Message::body)
.await().indefinitely();
assertEquals("foo", supplier.get());
}
static | EventBusCodecTest |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/windowing/sessionwindows/SessionEventGeneratorImpl.java | {
"start": 1177,
"end": 1313
} | class ____<K, E> implements EventGenerator<K, E> {
/** Event timing w.r.t the global watermark. */
public | SessionEventGeneratorImpl |
java | hibernate__hibernate-orm | hibernate-spatial/src/main/java/org/hibernate/spatial/dialect/postgis/AbstractPostGISJdbcType.java | {
"start": 1331,
"end": 4594
} | class ____ implements JdbcType {
private final Wkb.Dialect wkbDialect;
AbstractPostGISJdbcType(Wkb.Dialect dialect) {
wkbDialect = dialect;
}
@Override
public <T> JdbcLiteralFormatter<T> getJdbcLiteralFormatter(JavaType<T> javaType) {
return new PGGeometryLiteralFormatter<>( getConstructorFunction(), javaType );
}
public abstract int getDefaultSqlTypeCode();
protected abstract String getConstructorFunction();
protected abstract String getPGTypeName();
public Geometry<?> toGeometry(Object object) {
if ( object == null ) {
return null;
}
ByteBuffer buffer;
if ( object instanceof PGobject ) {
String pgValue = ( (PGobject) object ).getValue();
if (pgValue == null) {
return null;
}
if ( pgValue.startsWith( "00" ) || pgValue.startsWith( "01" ) ) {
//we have a WKB because this pgValue starts with the bit-order byte
buffer = ByteBuffer.from( pgValue );
final WkbDecoder decoder = Wkb.newDecoder( wkbDialect );
return decoder.decode( buffer );
}
else {
return parseWkt( pgValue );
}
}
throw new IllegalStateException( "Received object of type " + object.getClass().getCanonicalName() );
}
private static Geometry<?> parseWkt(String pgValue) {
final WktDecoder decoder = Wkt.newDecoder( Wkt.Dialect.POSTGIS_EWKT_1 );
return decoder.decode( pgValue );
}
@Override
public int getJdbcTypeCode() {
return Types.OTHER;
}
@Override
public <X> ValueBinder<X> getBinder(final JavaType<X> javaType) {
return new BasicBinder<X>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
final PGobject obj = toPGobject( value, options );
st.setObject( index, obj );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
final PGobject obj = toPGobject( value, options );
st.setObject( name, obj );
}
private PGobject toPGobject(X value, WrapperOptions options) throws SQLException {
final WkbEncoder encoder = Wkb.newEncoder( wkbDialect );
final Geometry<?> geometry = getJavaType().unwrap( value, Geometry.class, options );
final String hexString = encoder.encode( geometry, ByteOrder.NDR ).toString();
final PGobject obj = new PGobject();
obj.setType( getPGTypeName() );
obj.setValue( hexString );
return obj;
}
};
}
@Override
public <X> ValueExtractor<X> getExtractor(final JavaType<X> javaType) {
return new BasicExtractor<X>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getJavaType().wrap( toGeometry( rs.getObject( paramIndex ) ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return getJavaType().wrap( toGeometry( statement.getObject( index ) ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options)
throws SQLException {
return getJavaType().wrap( toGeometry( statement.getObject( name ) ), options );
}
};
}
static | AbstractPostGISJdbcType |
java | google__guava | guava/src/com/google/common/collect/JdkBackedImmutableBiMap.java | {
"start": 1264,
"end": 3620
} | class ____<K, V> extends ImmutableBiMap<K, V> {
static <K, V> ImmutableBiMap<K, V> create(int n, @Nullable Entry<K, V>[] entryArray) {
Map<K, V> forwardDelegate = Maps.newHashMapWithExpectedSize(n);
Map<V, K> backwardDelegate = Maps.newHashMapWithExpectedSize(n);
for (int i = 0; i < n; i++) {
// requireNonNull is safe because the first `n` elements have been filled in.
Entry<K, V> e = RegularImmutableMap.makeImmutable(requireNonNull(entryArray[i]));
entryArray[i] = e;
V oldValue = forwardDelegate.putIfAbsent(e.getKey(), e.getValue());
if (oldValue != null) {
throw conflictException("key", e.getKey() + "=" + oldValue, entryArray[i]);
}
K oldKey = backwardDelegate.putIfAbsent(e.getValue(), e.getKey());
if (oldKey != null) {
throw conflictException("value", oldKey + "=" + e.getValue(), entryArray[i]);
}
}
ImmutableList<Entry<K, V>> entryList = asImmutableList(entryArray, n);
return new JdkBackedImmutableBiMap<>(
entryList, forwardDelegate, backwardDelegate, /* inverse= */ null);
}
private final transient ImmutableList<Entry<K, V>> entries;
private final Map<K, V> forwardDelegate;
private final Map<V, K> backwardDelegate;
private final @Nullable JdkBackedImmutableBiMap<V, K> inverse;
private JdkBackedImmutableBiMap(
ImmutableList<Entry<K, V>> entries,
Map<K, V> forwardDelegate,
Map<V, K> backwardDelegate,
@Nullable JdkBackedImmutableBiMap<V, K> inverse) {
this.entries = entries;
this.forwardDelegate = forwardDelegate;
this.backwardDelegate = backwardDelegate;
this.inverse = inverse;
}
@Override
public int size() {
return entries.size();
}
@Override
public ImmutableBiMap<V, K> inverse() {
return inverse != null ? inverse : lazyInverse();
}
@LazyInit @RetainedWith private transient @Nullable JdkBackedImmutableBiMap<V, K> lazyInverse;
private ImmutableBiMap<V, K> lazyInverse() {
JdkBackedImmutableBiMap<V, K> result = lazyInverse;
return result == null
? lazyInverse =
new JdkBackedImmutableBiMap<>(
new InverseEntries<>(entries),
backwardDelegate,
forwardDelegate,
/* inverse= */ this)
: result;
}
private static final | JdkBackedImmutableBiMap |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/configuration/injection/MockInjection.java | {
"start": 572,
"end": 652
} | class ____ configure the way the injection of mocks will happen.
*/
public final | to |
java | apache__flink | flink-core/src/main/java/org/apache/flink/util/Utils.java | {
"start": 7023,
"end": 12484
} | class ____<T> extends RichOutputFormat<T> {
private static final long serialVersionUID = 1L;
private final String id;
private long counter;
private long checksum;
public ChecksumHashCodeHelper(String id) {
this.id = id;
this.counter = 0L;
this.checksum = 0L;
}
@Override
public void configure(Configuration parameters) {}
@Override
public void open(InitializationContext context) {}
@Override
public void writeRecord(T record) throws IOException {
counter++;
// convert 32-bit integer to non-negative long
checksum += record.hashCode() & 0xffffffffL;
}
@Override
public void close() throws IOException {
ChecksumHashCode update = new ChecksumHashCode(counter, checksum);
getRuntimeContext().addAccumulator(id, update);
}
}
// --------------------------------------------------------------------------------------------
/**
* Debugging utility to understand the hierarchy of serializers created by the Java API. Tested
* in GroupReduceITCase.testGroupByGenericType()
*/
public static <T> String getSerializerTree(TypeInformation<T> ti) {
return getSerializerTree(ti, 0);
}
private static <T> String getSerializerTree(TypeInformation<T> ti, int indent) {
String ret = "";
if (ti instanceof CompositeType) {
ret += StringUtils.repeat(' ', indent) + ti.getClass().getSimpleName() + "\n";
CompositeType<T> cti = (CompositeType<T>) ti;
String[] fieldNames = cti.getFieldNames();
for (int i = 0; i < cti.getArity(); i++) {
TypeInformation<?> fieldType = cti.getTypeAt(i);
ret +=
StringUtils.repeat(' ', indent + 2)
+ fieldNames[i]
+ ":"
+ getSerializerTree(fieldType, indent);
}
} else {
if (ti instanceof GenericTypeInfo) {
ret +=
StringUtils.repeat(' ', indent)
+ "GenericTypeInfo ("
+ ti.getTypeClass().getSimpleName()
+ ")\n";
ret += getGenericTypeTree(ti.getTypeClass(), indent + 4);
} else {
ret += StringUtils.repeat(' ', indent) + ti.toString() + "\n";
}
}
return ret;
}
private static String getGenericTypeTree(Class<?> type, int indent) {
String ret = "";
for (Field field : type.getDeclaredFields()) {
if (Modifier.isStatic(field.getModifiers())
|| Modifier.isTransient(field.getModifiers())) {
continue;
}
ret +=
StringUtils.repeat(' ', indent)
+ field.getName()
+ ":"
+ field.getType().getName()
+ (field.getType().isEnum() ? " (is enum)" : "")
+ "\n";
if (!field.getType().isPrimitive()) {
ret += getGenericTypeTree(field.getType(), indent + 4);
}
}
return ret;
}
// --------------------------------------------------------------------------------------------
/**
* Resolves the given factories. The thread local factory has preference over the static
* factory. If none is set, the method returns {@link Optional#empty()}.
*
* @param threadLocalFactory containing the thread local factory
* @param staticFactory containing the global factory
* @param <T> type of factory
* @return Optional containing the resolved factory if it exists, otherwise it's empty
*/
public static <T> Optional<T> resolveFactory(
ThreadLocal<T> threadLocalFactory, @Nullable T staticFactory) {
final T localFactory = threadLocalFactory.get();
final T factory = localFactory == null ? staticFactory : localFactory;
return Optional.ofNullable(factory);
}
/**
* Get the key from the given args. Keys have to start with '-' or '--'. For example, --key1
* value1 -key2 value2.
*
* @param args all given args.
* @param index the index of args to be parsed.
* @return the key of the given arg.
*/
public static String getKeyFromArgs(String[] args, int index) {
String key;
if (args[index].startsWith("--")) {
key = args[index].substring(2);
} else if (args[index].startsWith("-")) {
key = args[index].substring(1);
} else {
throw new IllegalArgumentException(
String.format(
"Error parsing arguments '%s' on '%s'. Please prefix keys with -- or -.",
Arrays.toString(args), args[index]));
}
if (key.isEmpty()) {
throw new IllegalArgumentException(
"The input " + Arrays.toString(args) + " contains an empty argument");
}
return key;
}
/** Private constructor to prevent instantiation. */
private Utils() {
throw new RuntimeException();
}
}
| ChecksumHashCodeHelper |
java | apache__camel | components/camel-stax/src/test/java/org/apache/camel/language/xtokenizer/SplitGroupMultiXmlTokenTest.java | {
"start": 1250,
"end": 3561
} | class ____ extends CamelTestSupport {
@TempDir
Path testDirectory;
@Test
public void testTokenXMLPairGroup() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:split");
mock.expectedMessageCount(3);
mock.message(0).body()
.isEqualTo(
"<group><order id=\"1\" xmlns=\"http:acme.com\">Camel in Action</order><order id=\"2\" xmlns=\"http:acme.com\">ActiveMQ in Action</order></group>");
mock.message(1).body()
.isEqualTo(
"<group><order id=\"3\" xmlns=\"http:acme.com\">Spring in Action</order><order id=\"4\" xmlns=\"http:acme.com\">Scala in Action</order></group>");
mock.message(2).body().isEqualTo("<group><order id=\"5\" xmlns=\"http:acme.com\">Groovy in Action</order></group>");
String body = createBody();
template.sendBodyAndHeader(TestSupport.fileUri(testDirectory), body, Exchange.FILE_NAME, "orders.xml");
MockEndpoint.assertIsSatisfied(context);
}
protected String createBody() {
StringBuilder sb = new StringBuilder("<?xml version=\"1.0\"?>\n");
sb.append("<orders xmlns=\"http:acme.com\">\n");
sb.append(" <order id=\"1\">Camel in Action</order>\n");
sb.append(" <order id=\"2\">ActiveMQ in Action</order>\n");
sb.append(" <order id=\"3\">Spring in Action</order>\n");
sb.append(" <order id=\"4\">Scala in Action</order>\n");
sb.append(" <order id=\"5\">Groovy in Action</order>\n");
sb.append("</orders>");
return sb.toString();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
Namespaces ns = new Namespaces("", "http:acme.com");
@Override
public void configure() throws Exception {
// START SNIPPET: e1
from(TestSupport.fileUri(testDirectory, "?initialDelay=0&delay=10"))
// split the order child tags, and inherit namespaces from
// the orders root tag
.split().xtokenize("//order", 'i', ns, 2).to("log:split").to("mock:split");
// END SNIPPET: e1
}
};
}
}
| SplitGroupMultiXmlTokenTest |
java | apache__maven | impl/maven-impl/src/main/java/org/apache/maven/api/services/model/PluginManagementInjector.java | {
"start": 1090,
"end": 1720
} | interface ____ {
/**
* Merges default values from the plugin management section of the given model into itself.
*
* @param model The model into which to merge the values specified by its plugin management section, must not be
* <code>null</code>.
* @param request The model building request that holds further settings, must not be {@code null}.
* @param problems The container used to collect problems that were encountered, must not be {@code null}.
*/
Model injectManagement(Model model, ModelBuilderRequest request, ModelProblemCollector problems);
}
| PluginManagementInjector |
java | apache__camel | components/camel-disruptor/src/main/java/org/apache/camel/component/disruptor/AbstractLifecycleAwareExchangeEventHandler.java | {
"start": 1133,
"end": 2533
} | class ____ implements LifecycleAwareExchangeEventHandler {
private volatile boolean started;
private volatile CountDownLatch startedLatch = new CountDownLatch(1);
private volatile CountDownLatch stoppedLatch = new CountDownLatch(1);
@Override
public abstract void onEvent(ExchangeEvent event, long sequence, boolean endOfBatch)
throws Exception;
@Override
public void awaitStarted() throws InterruptedException {
if (!started) {
startedLatch.await();
}
}
@Override
public boolean awaitStarted(final long timeout, final TimeUnit unit) throws InterruptedException {
return started || startedLatch.await(timeout, unit);
}
@Override
public void awaitStopped() throws InterruptedException {
if (started) {
stoppedLatch.await();
}
}
@Override
public boolean awaitStopped(final long timeout, final TimeUnit unit) throws InterruptedException {
return !started || stoppedLatch.await(timeout, unit);
}
@Override
public void onStart() {
stoppedLatch = new CountDownLatch(1);
startedLatch.countDown();
started = true;
}
@Override
public void onShutdown() {
startedLatch = new CountDownLatch(1);
stoppedLatch.countDown();
started = false;
}
}
| AbstractLifecycleAwareExchangeEventHandler |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/StatementSwitchToExpressionSwitchTest.java | {
"start": 110620,
"end": 111678
} | class ____ {
int x;
public Test(int foo) {
x = -1;
}
public int foo(Suit suit) {
switch (suit) {
case HEART:
x = 2;
x = 3;
break;
case DIAMOND:
this.x = (((x + 1) * (x * x)) << 1);
break;
case SPADE:
throw new RuntimeException();
default:
throw new NullPointerException();
}
return x;
}
}
""")
.setArgs(
"-XepOpt:StatementSwitchToExpressionSwitch:EnableAssignmentSwitchConversion",
"-XepOpt:StatementSwitchToExpressionSwitch:EnableDirectConversion=false")
.doTest();
}
@Test
public void switchByEnum_assignmentSwitchToSingleArray_error() {
refactoringHelper
.addInputLines(
"Test.java",
"""
| Test |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/bindings/conflicting/ConflictingStereotypeBindingOnBeanTest.java | {
"start": 1841,
"end": 2003
} | interface ____ {
}
@BarBinding
@Stereotype
@Target({ ElementType.TYPE, ElementType.METHOD })
@Retention(RetentionPolicy.RUNTIME)
@ | Stereotype1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.