language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__camel
|
components/camel-tahu/src/main/java/org/apache/camel/component/tahu/TahuConfiguration.java
|
{
"start": 1432,
"end": 7911
}
|
class ____ implements Cloneable {
private static final Pattern SERVER_DEF_PATTERN = Pattern
.compile("([^:]+):(?:(?!tcp|ssl)([^:]+):)?((?:tcp|ssl):(?://)?[\\p{Alnum}.-]+(?::\\d+)?),?");
@UriParam(label = "common")
@Metadata(applicableFor = { TahuConstants.EDGE_NODE_SCHEME, TahuConstants.HOST_APP_SCHEME }, required = true)
private String servers;
@UriParam(label = "common")
@Metadata(applicableFor = { TahuConstants.EDGE_NODE_SCHEME, TahuConstants.HOST_APP_SCHEME }, required = true)
private String clientId;
@UriParam(label = "common", defaultValue = "false")
@Metadata(applicableFor = { TahuConstants.EDGE_NODE_SCHEME, TahuConstants.HOST_APP_SCHEME })
private boolean checkClientIdLength = false;
@UriParam(label = "security", secret = true)
@Metadata(applicableFor = { TahuConstants.EDGE_NODE_SCHEME, TahuConstants.HOST_APP_SCHEME })
private String username;
@UriParam(label = "security", secret = true)
@Metadata(applicableFor = { TahuConstants.EDGE_NODE_SCHEME, TahuConstants.HOST_APP_SCHEME })
private String password;
@UriParam(label = "common", defaultValue = "5000")
@Metadata(applicableFor = { TahuConstants.EDGE_NODE_SCHEME, TahuConstants.HOST_APP_SCHEME })
private long rebirthDebounceDelay = 5000L;
@UriParam(label = "common", defaultValue = "30")
@Metadata(applicableFor = { TahuConstants.EDGE_NODE_SCHEME, TahuConstants.HOST_APP_SCHEME })
private int keepAliveTimeout = 30;
@UriParam(label = "security")
@Metadata(applicableFor = { TahuConstants.EDGE_NODE_SCHEME, TahuConstants.HOST_APP_SCHEME })
private SSLContextParameters sslContextParameters;
public String getServers() {
return servers;
}
/**
* MQTT server definitions, given with the following syntax in a comma-separated list:
* MqttServerName:(MqttClientId:)(tcp/ssl)://hostname(:port),...
*
* @param servers The comma-separated list of server definitions
*/
public void setServers(String servers) {
this.servers = servers;
}
public List<MqttServerDefinition> getServerDefinitionList() {
List<MqttServerDefinition> serverDefinitionList;
if (ObjectHelper.isEmpty(servers)) {
serverDefinitionList = List.of();
} else if (!SERVER_DEF_PATTERN.matcher(servers).find()) {
throw new RuntimeCamelException("Server definition list has invalid syntax: " + servers);
} else {
Matcher serverDefMatcher = SERVER_DEF_PATTERN.matcher(servers);
serverDefinitionList = serverDefMatcher.results().map(matchResult -> {
// MatchResult does not support named groups
String serverName = matchResult.group(1);
String clientId = matchResult.group(2);
String serverUrl = matchResult.group(3);
return parseFromUrlString(serverName, clientId, serverUrl);
}).toList();
}
return serverDefinitionList;
}
private MqttServerDefinition parseFromUrlString(
String serverName, String clientId, String serverUrl) {
try {
MqttServerName mqttServerName = new MqttServerName(ObjectHelper.notNullOrEmpty(serverName, "serverName"));
clientId = Stream.of(clientId, this.clientId).filter(ObjectHelper::isNotEmpty).findFirst()
.orElse(MqttClientId.generate("Camel"));
MqttClientId mqttClientId = new MqttClientId(clientId, checkClientIdLength);
return new MqttServerDefinition(
mqttServerName, mqttClientId, new MqttServerUrl(ObjectHelper.notNullOrEmpty(serverUrl, "serverUrl")),
username, password, keepAliveTimeout, null);
} catch (Exception e) {
throw new RuntimeCamelException(e);
}
}
public String getClientId() {
return clientId;
}
/**
* MQTT client ID to use for all server definitions, rather than specifying the same one for each. Note that if
* neither the 'clientId' parameter nor an 'MqttClientId' are defined for an MQTT Server, a random MQTT Client ID
* will be generated automatically, prefaced with 'Camel'
*
* @param clientId The MQTT Client ID to use for all server connections
*/
public void setClientId(String clientId) {
this.clientId = clientId;
}
public boolean isCheckClientIdLength() {
return checkClientIdLength;
}
/**
* MQTT client ID length check enabled
*
* @param checkClientIdLength
*/
public void setCheckClientIdLength(boolean checkClientIdLength) {
this.checkClientIdLength = checkClientIdLength;
}
public String getUsername() {
return username;
}
/**
* Username for MQTT server authentication
*
* @param username
*/
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
/**
* Password for MQTT server authentication
*
* @param password
*/
public void setPassword(String password) {
this.password = password;
}
public long getRebirthDebounceDelay() {
return rebirthDebounceDelay;
}
/**
* Delay before recurring node rebirth messages will be sent
*
* @param rebirthDebounceDelay
*/
public void setRebirthDebounceDelay(long rebirthDebounceDelay) {
this.rebirthDebounceDelay = rebirthDebounceDelay;
}
public int getKeepAliveTimeout() {
return keepAliveTimeout;
}
/**
* MQTT connection keep alive timeout, in seconds
*
* @param keepAliveTimeout
*/
public void setKeepAliveTimeout(int keepAliveTimeout) {
this.keepAliveTimeout = keepAliveTimeout;
}
public SSLContextParameters getSslContextParameters() {
return sslContextParameters;
}
/**
* SSL configuration for MQTT server connections
*
* @param sslContextParameters
*/
public void setSslContextParameters(SSLContextParameters sslContextParameters) {
this.sslContextParameters = sslContextParameters;
}
public TahuConfiguration copy() {
try {
return (TahuConfiguration) clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeCamelException(e);
}
}
}
|
TahuConfiguration
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverScheduler.java
|
{
"start": 1061,
"end": 2832
}
|
class ____ {
private final AtomicReference<Runnable> delayedTask = new AtomicReference<>();
private final AtomicReference<AbstractRunnable> scheduledTask = new AtomicReference<>();
private final AtomicBoolean completing = new AtomicBoolean();
void addOrRunDelayedTask(Runnable task) {
delayedTask.set(task);
if (completing.get()) {
final Runnable toRun = delayedTask.getAndSet(null);
if (toRun != null) {
assert task == toRun;
toRun.run();
}
}
}
void scheduleOrRunTask(Executor executor, AbstractRunnable task) {
final AbstractRunnable existing = scheduledTask.getAndSet(task);
assert existing == null : existing;
final Executor executorToUse = completing.get() ? EsExecutors.DIRECT_EXECUTOR_SERVICE : executor;
executorToUse.execute(new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
assert e instanceof EsRejectedExecutionException : new AssertionError(e);
if (scheduledTask.getAndUpdate(t -> t == task ? null : t) == task) {
task.onFailure(e);
}
}
@Override
protected void doRun() {
AbstractRunnable toRun = scheduledTask.getAndSet(null);
if (toRun == task) {
task.run();
}
}
});
}
void runPendingTasks() {
completing.set(true);
for (var taskHolder : List.of(scheduledTask, delayedTask)) {
final Runnable task = taskHolder.getAndSet(null);
if (task != null) {
task.run();
}
}
}
}
|
DriverScheduler
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webservices/src/main/java/org/springframework/boot/webservices/autoconfigure/WebServicesProperties.java
|
{
"start": 1070,
"end": 1634
}
|
class ____ {
/**
* Path that serves as the base URI for the services.
*/
private String path = "/services";
private final Servlet servlet = new Servlet();
public String getPath() {
return this.path;
}
public void setPath(String path) {
Assert.notNull(path, "'path' must not be null");
Assert.isTrue(path.length() > 1, "'path' must have length greater than 1");
Assert.isTrue(path.startsWith("/"), "'path' must start with '/'");
this.path = path;
}
public Servlet getServlet() {
return this.servlet;
}
public static
|
WebServicesProperties
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/engine/support/hierarchical/ParallelExecutionIntegrationTests.java
|
{
"start": 22946,
"end": 23570
}
|
class ____ {
static AtomicInteger sharedResource;
static CountDownLatch countDownLatch;
@BeforeAll
static void initialize() {
sharedResource = new AtomicInteger();
countDownLatch = new CountDownLatch(3);
}
@Test
void firstTest() throws Exception {
incrementBlockAndCheck(sharedResource, countDownLatch);
}
@Test
void secondTest() throws Exception {
incrementBlockAndCheck(sharedResource, countDownLatch);
}
@Test
void thirdTest() throws Exception {
incrementBlockAndCheck(sharedResource, countDownLatch);
}
}
@ExtendWith(ThreadReporter.class)
static
|
FailingWithoutLockTestCase
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/streaming/runtime/operators/windowing/AsyncTriggerConverter.java
|
{
"start": 2796,
"end": 4442
}
|
interface ____ {
/**
* Convert to an {@code AsyncTrigger}. The default implementation is only a wrapper of the
* trigger, whose behaviours are all sync.
*
* <p>TODO: Return {@code AsyncTrigger} if {@code AsyncTrigger} becomes @PublicEvolving.
*
* @return The {@code AsyncTrigger} for async state processing.
*/
@Nonnull
default Object convertToAsync() {
return UserDefinedAsyncTrigger.of((Trigger<?, ?>) AsyncTriggerConverter.this);
}
@SuppressWarnings("unchecked")
static <T, W extends Window> AsyncTrigger<T, W> convertToAsync(Trigger<T, W> trigger) {
if (trigger instanceof CountTrigger) {
return (AsyncTrigger<T, W>)
AsyncCountTrigger.of(((CountTrigger<?>) trigger).getMaxCount());
} else if (trigger instanceof EventTimeTrigger) {
return (AsyncTrigger<T, W>) AsyncEventTimeTrigger.create();
} else if (trigger instanceof ProcessingTimeTrigger) {
return (AsyncTrigger<T, W>) AsyncProcessingTimeTrigger.create();
} else if (trigger instanceof PurgingTrigger) {
return (AsyncTrigger<T, W>)
AsyncPurgingTrigger.of(
convertToAsync(((PurgingTrigger<?, ?>) trigger).getNestedTrigger()));
} else if (trigger instanceof AsyncTriggerConverter) {
return (AsyncTrigger<T, W>) ((AsyncTriggerConverter) trigger).convertToAsync();
} else {
return UserDefinedAsyncTrigger.of(trigger);
}
}
/** Convert non-support user-defined trigger to {@code AsyncTrigger}. */
|
AsyncTriggerConverter
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java
|
{
"start": 7599,
"end": 7660
}
|
class ____ for logging the application summary.
*/
static
|
is
|
java
|
apache__maven
|
impl/maven-logging/src/main/java/org/apache/maven/slf4j/MavenBaseLogger.java
|
{
"start": 6295,
"end": 15175
}
|
class ____ extends LegacyAbstractLogger {
protected static final int LOG_LEVEL_TRACE = LocationAwareLogger.TRACE_INT;
protected static final int LOG_LEVEL_DEBUG = LocationAwareLogger.DEBUG_INT;
protected static final int LOG_LEVEL_INFO = LocationAwareLogger.INFO_INT;
protected static final int LOG_LEVEL_WARN = LocationAwareLogger.WARN_INT;
protected static final int LOG_LEVEL_ERROR = LocationAwareLogger.ERROR_INT;
static final char SP = ' ';
static final String TID_PREFIX = "tid=";
// The OFF level can only be used in configuration files to disable logging.
// It has
// no printing method associated with it in o.s.Logger interface.
protected static final int LOG_LEVEL_OFF = LOG_LEVEL_ERROR + 10;
static final SimpleLoggerConfiguration CONFIG_PARAMS = new SimpleLoggerConfiguration();
static boolean initialized = false;
static void lazyInit() {
if (initialized) {
return;
}
initialized = true;
init();
}
// external software might be invoking this method directly. Do not rename
// or change its semantics.
static void init() {
CONFIG_PARAMS.init();
}
/** The current log level */
protected int currentLogLevel = LOG_LEVEL_INFO;
/** The short name of this simple log instance */
private transient String shortLogName = null;
/**
* Legacy SLF4J prefix maintained for backwards compatibility
*/
public static final String LEGACY_PREFIX = "org.slf4j.simpleLogger.";
/**
* Protected access allows only {@link MavenLoggerFactory} and also derived classes to instantiate
* MavenLoggerFactory instances.
*/
protected MavenBaseLogger(String name) {
this.name = name;
String levelString = recursivelyComputeLevelString();
if (levelString != null) {
this.currentLogLevel = SimpleLoggerConfiguration.stringToLevel(levelString);
} else {
this.currentLogLevel = CONFIG_PARAMS.defaultLogLevel;
}
}
String recursivelyComputeLevelString() {
String tempName = name;
String levelString = null;
int indexOfLastDot = tempName.length();
while ((levelString == null) && (indexOfLastDot > -1)) {
tempName = tempName.substring(0, indexOfLastDot);
levelString = CONFIG_PARAMS.getStringProperty(Constants.MAVEN_LOGGER_LOG_PREFIX + tempName, null);
indexOfLastDot = tempName.lastIndexOf(".");
}
return levelString;
}
/**
* To avoid intermingling of log messages and associated stack traces, the two
* operations are done in a synchronized block.
*
* @param buf The StringBuilder containing the log message to be written
* @param t The Throwable object whose stack trace should be written, may be null
*/
protected void write(StringBuilder buf, Throwable t) {
PrintStream targetStream = CONFIG_PARAMS.outputChoice.getTargetPrintStream();
synchronized (CONFIG_PARAMS) {
targetStream.println(buf.toString());
writeThrowable(t, targetStream);
}
}
protected void writeThrowable(Throwable t, PrintStream targetStream) {
if (t != null) {
t.printStackTrace(targetStream);
}
}
protected String computeShortName() {
return name.substring(name.lastIndexOf(".") + 1);
}
/**
* Is the given log level currently enabled?
*
* @param logLevel is this level enabled?
* @return whether the logger is enabled for the given level
*/
protected boolean isLevelEnabled(int logLevel) {
// log level are numerically ordered so can use simple numeric
// comparison
return (logLevel >= currentLogLevel);
}
/** Are {@code trace} messages currently enabled? */
@Override
public boolean isTraceEnabled() {
return isLevelEnabled(LOG_LEVEL_TRACE);
}
/** Are {@code debug} messages currently enabled? */
@Override
public boolean isDebugEnabled() {
return isLevelEnabled(LOG_LEVEL_DEBUG);
}
/** Are {@code info} messages currently enabled? */
@Override
public boolean isInfoEnabled() {
return isLevelEnabled(LOG_LEVEL_INFO);
}
/** Are {@code warn} messages currently enabled? */
@Override
public boolean isWarnEnabled() {
return isLevelEnabled(LOG_LEVEL_WARN);
}
/** Are {@code error} messages currently enabled? */
@Override
public boolean isErrorEnabled() {
return isLevelEnabled(LOG_LEVEL_ERROR);
}
/**
* SimpleLogger's implementation of
* {@link org.slf4j.helpers.AbstractLogger#handleNormalizedLoggingCall(Level, Marker, String, Object[], Throwable) AbstractLogger#handleNormalizedLoggingCall}
* }
*
* @param level the SLF4J level for this event
* @param marker The marker to be used for this event, may be null.
* @param messagePattern The message pattern which will be parsed and formatted
* @param arguments the array of arguments to be formatted, may be null
* @param throwable The exception whose stack trace should be logged, may be null
*/
@Override
protected void handleNormalizedLoggingCall(
Level level, Marker marker, String messagePattern, Object[] arguments, Throwable throwable) {
List<Marker> markers = null;
if (marker != null) {
markers = new ArrayList<>();
markers.add(marker);
}
innerHandleNormalizedLoggingCall(level, markers, messagePattern, arguments, throwable);
}
private void innerHandleNormalizedLoggingCall(
Level level, List<Marker> markers, String messagePattern, Object[] arguments, Throwable t) {
StringBuilder buf = new StringBuilder(32);
// Append date-time if so configured
if (CONFIG_PARAMS.showDateTime) {
DateTimeFormatter formatter = CONFIG_PARAMS.dateFormatter;
if (formatter != null) {
ZonedDateTime zonedDateTime = MonotonicClock.now().atZone(ZoneId.systemDefault());
String dateText = formatter.format(zonedDateTime);
buf.append(dateText);
buf.append(SP);
} else {
buf.append(MonotonicClock.elapsed().toMillis());
buf.append(SP);
}
}
// Append current thread name if so configured
if (CONFIG_PARAMS.showThreadName) {
buf.append('[');
buf.append(Thread.currentThread().getName());
buf.append("] ");
}
if (CONFIG_PARAMS.showThreadId) {
buf.append(TID_PREFIX);
buf.append(Thread.currentThread().getId());
buf.append(SP);
}
if (CONFIG_PARAMS.levelInBrackets) {
buf.append('[');
}
// Append a readable representation of the log level
String levelStr = renderLevel(level.toInt());
buf.append(levelStr);
if (CONFIG_PARAMS.levelInBrackets) {
buf.append(']');
}
buf.append(SP);
// Append the name of the log instance if so configured
if (CONFIG_PARAMS.showShortLogName) {
if (shortLogName == null) {
shortLogName = computeShortName();
}
buf.append(shortLogName).append(" - ");
} else if (CONFIG_PARAMS.showLogName) {
buf.append(name).append(" - ");
}
if (markers != null) {
buf.append(SP);
for (Marker marker : markers) {
buf.append(marker.getName()).append(SP);
}
}
String formattedMessage = MessageFormatter.basicArrayFormat(messagePattern, arguments);
// Append the message
buf.append(formattedMessage);
write(buf, t);
}
protected String renderLevel(int levelInt) {
return switch (levelInt) {
case LOG_LEVEL_TRACE -> "TRACE";
case LOG_LEVEL_DEBUG -> ("DEBUG");
case LOG_LEVEL_INFO -> "INFO";
case LOG_LEVEL_WARN -> "WARN";
case LOG_LEVEL_ERROR -> "ERROR";
default -> throw new IllegalStateException("Unrecognized level [" + levelInt + "]");
};
}
public void log(LoggingEvent event) {
int levelInt = event.getLevel().toInt();
if (!isLevelEnabled(levelInt)) {
return;
}
NormalizedParameters np = NormalizedParameters.normalize(event);
innerHandleNormalizedLoggingCall(
event.getLevel(), event.getMarkers(), np.getMessage(), np.getArguments(), event.getThrowable());
}
@Override
protected String getFullyQualifiedCallerName() {
return null;
}
}
|
MavenBaseLogger
|
java
|
square__javapoet
|
src/test/java/com/squareup/javapoet/JavaFileTest.java
|
{
"start": 14929,
"end": 14995
}
|
class ____ {\n"
+ " }\n"
+ "\n"
+ "
|
Twin
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/stat/internal/EntityStatisticsImpl.java
|
{
"start": 377,
"end": 2773
}
|
class ____ extends AbstractCacheableDataStatistics implements EntityStatistics, Serializable {
private final String rootEntityName;
private final LongAdder loadCount = new LongAdder();
private final LongAdder updateCount = new LongAdder();
private final LongAdder upsertCount = new LongAdder();
private final LongAdder insertCount = new LongAdder();
private final LongAdder deleteCount = new LongAdder();
private final LongAdder fetchCount = new LongAdder();
private final LongAdder optimisticFailureCount = new LongAdder();
EntityStatisticsImpl(EntityPersister rootEntityDescriptor) {
super( () -> {
final var cache = rootEntityDescriptor.getCacheAccessStrategy();
return cache != null ? cache.getRegion() : null;
} );
rootEntityName = rootEntityDescriptor.getRootEntityName();
}
public long getDeleteCount() {
return deleteCount.sum();
}
public long getInsertCount() {
return insertCount.sum();
}
public long getLoadCount() {
return loadCount.sum();
}
public long getUpdateCount() {
return updateCount.sum();
}
public long getUpsertCount() {
return upsertCount.sum();
}
public long getFetchCount() {
return fetchCount.sum();
}
public long getOptimisticFailureCount() {
return optimisticFailureCount.sum();
}
void incrementLoadCount() {
loadCount.increment();
}
void incrementFetchCount() {
fetchCount.increment();
}
void incrementUpdateCount() {
updateCount.increment();
}
void incrementUpsertCount() {
upsertCount.increment();
}
void incrementInsertCount() {
insertCount.increment();
}
void incrementDeleteCount() {
deleteCount.increment();
}
void incrementOptimisticFailureCount() {
optimisticFailureCount.increment();
}
public String toString() {
final var text = new StringBuilder()
.append( "EntityStatistics" )
.append( "[rootEntityName=" ).append( rootEntityName )
.append( ",loadCount=" ).append( this.loadCount )
.append( ",updateCount=" ).append( this.updateCount )
.append( ",upsertCount=" ).append( this.upsertCount )
.append( ",insertCount=" ).append( this.insertCount )
.append( ",deleteCount=" ).append( this.deleteCount )
.append( ",fetchCount=" ).append( this.fetchCount )
.append( ",optimisticLockFailureCount=" ).append( this.optimisticFailureCount );
appendCacheStats( text );
return text.append( ']' ).toString();
}
}
|
EntityStatisticsImpl
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/pool/DruidDataSourceTest_exceptionSorter_extend.java
|
{
"start": 605,
"end": 901
}
|
class ____ extends com.mysql.jdbc.Driver {
/**
* Construct a new driver and register it with DriverManager
*
* @throws SQLException if a database error occurs.
*/
public SubDriver() throws SQLException {
}
}
public static
|
SubDriver
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/builder/SpringApplicationBuilderTests.java
|
{
"start": 2325,
"end": 16060
}
|
class ____ {
private @Nullable ConfigurableApplicationContext context;
@AfterEach
void close() {
close(this.context);
SpringApplicationShutdownHookInstance.reset();
}
private void close(@Nullable ApplicationContext context) {
if (context != null) {
if (context instanceof ConfigurableApplicationContext configurableContext) {
configurableContext.close();
}
close(context.getParent());
}
}
@Test
@WithResource(name = "application.properties", content = """
b=file
c=file
""")
@WithResource(name = "application-foo.properties", content = "b=profile-specific-file")
void profileAndProperties() {
SpringApplicationBuilder application = new SpringApplicationBuilder().sources(ExampleConfig.class)
.contextFactory(ApplicationContextFactory.ofContextClass(StaticApplicationContext.class))
.profiles("foo")
.properties("a=default");
this.context = application.run();
assertThat(this.context).isInstanceOf(StaticApplicationContext.class);
assertThat(this.context.getEnvironment().getProperty("a")).isEqualTo("default");
assertThat(this.context.getEnvironment().getProperty("b")).isEqualTo("profile-specific-file");
assertThat(this.context.getEnvironment().getProperty("c")).isEqualTo("file");
assertThat(this.context.getEnvironment().acceptsProfiles(Profiles.of("foo"))).isTrue();
}
@Test
void propertiesAsMap() {
SpringApplicationBuilder application = new SpringApplicationBuilder().sources(ExampleConfig.class)
.contextFactory(ApplicationContextFactory.ofContextClass(StaticApplicationContext.class))
.properties(Collections.singletonMap("bar", "foo"));
this.context = application.run();
assertThat(this.context.getEnvironment().getProperty("bar")).isEqualTo("foo");
}
@Test
void propertiesAsProperties() {
Properties properties = StringUtils.splitArrayElementsIntoProperties(new String[] { "bar=foo" }, "=");
assertThat(properties).isNotNull();
SpringApplicationBuilder application = new SpringApplicationBuilder().sources(ExampleConfig.class)
.contextFactory(ApplicationContextFactory.ofContextClass(StaticApplicationContext.class))
.properties(properties);
this.context = application.run();
assertThat(this.context.getEnvironment().getProperty("bar")).isEqualTo("foo");
}
@Test
void propertiesWithRepeatSeparator() {
SpringApplicationBuilder application = new SpringApplicationBuilder().sources(ExampleConfig.class)
.contextFactory(ApplicationContextFactory.ofContextClass(StaticApplicationContext.class))
.properties("one=c:\\logging.file.name", "two=a:b", "three:c:\\logging.file.name", "four:a:b");
this.context = application.run();
ConfigurableEnvironment environment = this.context.getEnvironment();
assertThat(environment.getProperty("one")).isEqualTo("c:\\logging.file.name");
assertThat(environment.getProperty("two")).isEqualTo("a:b");
assertThat(environment.getProperty("three")).isEqualTo("c:\\logging.file.name");
assertThat(environment.getProperty("four")).isEqualTo("a:b");
}
@Test
void specificApplicationContextFactory() {
SpringApplicationBuilder application = new SpringApplicationBuilder().sources(ExampleConfig.class)
.contextFactory(ApplicationContextFactory.ofContextClass(StaticApplicationContext.class));
this.context = application.run();
assertThat(this.context).isInstanceOf(StaticApplicationContext.class);
}
@Test
void parentContextCreationThatIsRunDirectly() {
SpringApplicationBuilder application = new SpringApplicationBuilder(ChildConfig.class)
.contextFactory(ApplicationContextFactory.ofContextClass(SpyApplicationContext.class));
application.parent(ExampleConfig.class);
this.context = application.run("foo.bar=baz");
then(((SpyApplicationContext) this.context).getApplicationContext()).should()
.setParent(any(ApplicationContext.class));
assertThat(SpringApplicationShutdownHookInstance.get()).didNotRegisterApplicationContext(this.context);
ApplicationContext parent = this.context.getParent();
assertThat(parent).isNotNull();
assertThat(parent.getBean(ApplicationArguments.class).getNonOptionArgs()).contains("foo.bar=baz");
assertThat(this.context.getBean(ApplicationArguments.class).getNonOptionArgs()).contains("foo.bar=baz");
}
@Test
void parentContextCreationThatIsBuiltThenRun() {
SpringApplicationBuilder application = new SpringApplicationBuilder(ChildConfig.class)
.contextFactory(ApplicationContextFactory.ofContextClass(SpyApplicationContext.class));
application.parent(ExampleConfig.class);
this.context = application.build("a=alpha").run("b=bravo");
then(((SpyApplicationContext) this.context).getApplicationContext()).should()
.setParent(any(ApplicationContext.class));
assertThat(SpringApplicationShutdownHookInstance.get()).didNotRegisterApplicationContext(this.context);
ApplicationContext parent = this.context.getParent();
assertThat(parent).isNotNull();
assertThat(parent.getBean(ApplicationArguments.class).getNonOptionArgs()).contains("a=alpha");
assertThat(this.context.getBean(ApplicationArguments.class).getNonOptionArgs()).contains("b=bravo");
}
@Test
void parentContextCreationWithChildShutdown() {
SpringApplicationBuilder application = new SpringApplicationBuilder(ChildConfig.class)
.contextFactory(ApplicationContextFactory.ofContextClass(SpyApplicationContext.class))
.registerShutdownHook(true);
application.parent(ExampleConfig.class);
this.context = application.run();
then(((SpyApplicationContext) this.context).getApplicationContext()).should()
.setParent(any(ApplicationContext.class));
assertThat(SpringApplicationShutdownHookInstance.get()).registeredApplicationContext(this.context);
}
@Test
void contextWithClassLoader() {
SpringApplicationBuilder application = new SpringApplicationBuilder(ExampleConfig.class)
.contextFactory(ApplicationContextFactory.ofContextClass(SpyApplicationContext.class));
ClassLoader classLoader = new URLClassLoader(new URL[0], getClass().getClassLoader());
application.resourceLoader(new DefaultResourceLoader(classLoader));
this.context = application.run();
assertThat(this.context.getClassLoader()).isEqualTo(classLoader);
}
@Test
void parentContextWithClassLoader() {
SpringApplicationBuilder application = new SpringApplicationBuilder(ChildConfig.class)
.contextFactory(ApplicationContextFactory.ofContextClass(SpyApplicationContext.class));
ClassLoader classLoader = new URLClassLoader(new URL[0], getClass().getClassLoader());
application.resourceLoader(new DefaultResourceLoader(classLoader));
application.parent(ExampleConfig.class);
this.context = application.run();
ResourceLoader resourceLoader = ((SpyApplicationContext) this.context).getResourceLoader();
assertThat(resourceLoader).isNotNull();
assertThat(resourceLoader.getClassLoader()).isEqualTo(classLoader);
}
@Test
void parentFirstCreation() {
SpringApplicationBuilder application = new SpringApplicationBuilder(ExampleConfig.class)
.child(ChildConfig.class);
application.contextFactory(ApplicationContextFactory.ofContextClass(SpyApplicationContext.class));
this.context = application.run();
then(((SpyApplicationContext) this.context).getApplicationContext()).should()
.setParent(any(ApplicationContext.class));
assertThat(SpringApplicationShutdownHookInstance.get()).didNotRegisterApplicationContext(this.context);
}
@Test
@WithResource(name = "application-node.properties", content = "bar=spam")
void parentFirstCreationWithProfileAndDefaultArgs() {
SpringApplicationBuilder application = new SpringApplicationBuilder(ExampleConfig.class).profiles("node")
.properties("transport=redis")
.child(ChildConfig.class)
.web(WebApplicationType.NONE);
this.context = application.run();
assertThat(this.context.getEnvironment().acceptsProfiles(Profiles.of("node"))).isTrue();
assertThat(this.context.getEnvironment().getProperty("transport")).isEqualTo("redis");
ApplicationContext parent = this.context.getParent();
assertThat(parent).isNotNull();
assertThat(parent.getEnvironment().acceptsProfiles(Profiles.of("node"))).isTrue();
assertThat(parent.getEnvironment().getProperty("transport")).isEqualTo("redis");
// only defined in node profile
assertThat(this.context.getEnvironment().getProperty("bar")).isEqualTo("spam");
}
@Test
void parentFirstWithDifferentProfile() {
SpringApplicationBuilder application = new SpringApplicationBuilder(ExampleConfig.class).profiles("node")
.properties("transport=redis")
.child(ChildConfig.class)
.profiles("admin")
.web(WebApplicationType.NONE);
this.context = application.run();
assertThat(this.context.getEnvironment().acceptsProfiles(Profiles.of("node", "admin"))).isTrue();
ApplicationContext parent = this.context.getParent();
assertThat(parent).isNotNull();
assertThat(parent.getEnvironment().acceptsProfiles(Profiles.of("admin"))).isFalse();
}
@Test
void parentWithDifferentProfile() {
SpringApplicationBuilder shared = new SpringApplicationBuilder(ExampleConfig.class).profiles("node")
.properties("transport=redis");
SpringApplicationBuilder application = shared.child(ChildConfig.class)
.profiles("admin")
.web(WebApplicationType.NONE);
shared.profiles("parent");
this.context = application.run();
assertThat(this.context.getEnvironment().acceptsProfiles(Profiles.of("node", "admin"))).isTrue();
ApplicationContext parent = this.context.getParent();
assertThat(parent).isNotNull();
assertThat(parent.getEnvironment().acceptsProfiles(Profiles.of("node", "parent"))).isTrue();
assertThat(parent.getEnvironment().acceptsProfiles(Profiles.of("admin"))).isFalse();
}
@Test
void parentFirstWithDifferentProfileAndExplicitEnvironment() {
SpringApplicationBuilder application = new SpringApplicationBuilder(ExampleConfig.class)
.environment(new StandardEnvironment())
.profiles("node")
.properties("transport=redis")
.child(ChildConfig.class)
.profiles("admin")
.web(WebApplicationType.NONE);
this.context = application.run();
assertThat(this.context.getEnvironment().acceptsProfiles(Profiles.of("node", "admin"))).isTrue();
// Now they share an Environment explicitly so there's no way to keep the profiles
// separate
ApplicationContext parent = this.context.getParent();
assertThat(parent).isNotNull();
assertThat(parent.getEnvironment().acceptsProfiles(Profiles.of("admin"))).isTrue();
}
@Test
void parentContextIdentical() {
SpringApplicationBuilder application = new SpringApplicationBuilder(ExampleConfig.class);
application.parent(ExampleConfig.class);
application.contextFactory(ApplicationContextFactory.ofContextClass(SpyApplicationContext.class));
this.context = application.run();
then(((SpyApplicationContext) this.context).getApplicationContext()).should()
.setParent(any(ApplicationContext.class));
}
@Test
void initializersCreatedOnce() {
SpringApplicationBuilder application = new SpringApplicationBuilder(ExampleConfig.class)
.web(WebApplicationType.NONE);
this.context = application.run();
assertThat(application.application().getInitializers()).hasSize(3);
}
@Test
void initializersCreatedOnceForChild() {
SpringApplicationBuilder application = new SpringApplicationBuilder(ExampleConfig.class)
.child(ChildConfig.class)
.web(WebApplicationType.NONE);
this.context = application.run();
assertThat(application.application().getInitializers()).hasSize(4);
}
@Test
void initializersIncludeDefaults() {
SpringApplicationBuilder application = new SpringApplicationBuilder(ExampleConfig.class)
.web(WebApplicationType.NONE)
.initializers((ConfigurableApplicationContext applicationContext) -> {
});
this.context = application.run();
assertThat(application.application().getInitializers()).hasSize(4);
}
@Test
void sourcesWithBoundSources() {
SpringApplicationBuilder application = new SpringApplicationBuilder().web(WebApplicationType.NONE)
.sources(ExampleConfig.class)
.properties("spring.main.sources=" + ChildConfig.class.getName());
this.context = application.run();
this.context.getBean(ExampleConfig.class);
this.context.getBean(ChildConfig.class);
}
@Test
void addBootstrapRegistryInitializer() {
SpringApplicationBuilder application = new SpringApplicationBuilder(ExampleConfig.class)
.web(WebApplicationType.NONE)
.addBootstrapRegistryInitializer((context) -> context.addCloseListener(
(event) -> event.getApplicationContext().getBeanFactory().registerSingleton("test", "spring")));
this.context = application.run();
assertThat(this.context.getBean("test")).isEqualTo("spring");
}
@Test
void setEnvironmentPrefix() {
SpringApplicationBuilder builder = new SpringApplicationBuilder(ExampleConfig.class).environmentPrefix("test");
assertThat(builder.application().getEnvironmentPrefix()).isEqualTo("test");
}
@Test
void customApplicationWithResourceLoader() {
ResourceLoader resourceLoader = mock(ResourceLoader.class);
given(resourceLoader.getClassLoader()).willReturn(getClass().getClassLoader());
SpringApplicationBuilder applicationBuilder = new SpringApplicationBuilder(resourceLoader,
ExampleConfig.class) {
@Override
protected SpringApplication createSpringApplication(@Nullable ResourceLoader resourceLoader,
Class<?>... sources) {
return new CustomSpringApplication(resourceLoader, sources);
}
};
SpringApplication application = applicationBuilder.build();
assertThat(application).asInstanceOf(InstanceOfAssertFactories.type(CustomSpringApplication.class))
.satisfies((customApp) -> assertThat(customApp.resourceLoader).isEqualTo(resourceLoader));
}
@Configuration(proxyBeanMethods = false)
static
|
SpringApplicationBuilderTests
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/metainfo/StateMetaInfoWriter.java
|
{
"start": 1065,
"end": 1477
}
|
interface ____ {
/**
* Writes the given snapshot to the output view.
*
* @param snapshot the snapshot to write.
* @param outputView the output to write into.
* @throws IOException on write problems.
*/
void writeStateMetaInfoSnapshot(
@Nonnull StateMetaInfoSnapshot snapshot, @Nonnull DataOutputView outputView)
throws IOException;
}
|
StateMetaInfoWriter
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/CannotParseTimestampException.java
|
{
"start": 327,
"end": 499
}
|
class ____ extends Exception {
public CannotParseTimestampException(String message, Throwable cause) {
super(message, cause);
}
}
|
CannotParseTimestampException
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/version/OffsetDateTimeVersionTest.java
|
{
"start": 870,
"end": 1386
}
|
class ____ {
@AfterEach
void dropTestData(SessionFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
public void testInstantUsageAsVersion(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (session) -> {
session.persist( new TheEntity( 1 ) );
} );
factoryScope.inTransaction( (session) -> {
var e = session.find( TheEntity.class, 1 );
assertNotNull( e.getTs() );
} );
}
@Entity(name = "TheEntity")
@Table(name="the_entity")
public static
|
OffsetDateTimeVersionTest
|
java
|
google__dagger
|
hilt-core/main/java/dagger/hilt/DefineComponent.java
|
{
"start": 1303,
"end": 1412
}
|
interface ____ {}
* </code></pre>
*/
@Retention(CLASS)
@Target(TYPE)
@GeneratesRootInput
public @
|
ChildComponent
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java
|
{
"start": 11951,
"end": 43622
}
|
class
____<AStatement> statements = new ArrayList<>();
for (StatementContext statement : ctx.statement()) {
statements.add((AStatement) visit(statement));
}
// generate the execute method from the collected statements and parameters
SFunction execute = new SFunction(
nextIdentifier(),
location(ctx),
"<internal>",
"execute",
List.of(),
List.of(),
new SBlock(nextIdentifier(), location(ctx), statements),
false,
false,
false,
false
);
functions.add(execute);
return new SClass(nextIdentifier(), location(ctx), functions);
}
@Override
public ANode visitFunction(FunctionContext ctx) {
String rtnType = ctx.decltype().getText();
String name = ctx.ID().getText();
List<String> paramTypes = ctx.parameters().decltype().stream().map(DecltypeContext::getText).toList();
List<String> paramNames = ctx.parameters().ID().stream().map(TerminalNode::getText).toList();
List<AStatement> statements = new ArrayList<>();
for (StatementContext statement : ctx.block().statement()) {
statements.add((AStatement) visit(statement));
}
if (ctx.block().dstatement() != null) {
statements.add((AStatement) visit(ctx.block().dstatement()));
}
return new SFunction(
nextIdentifier(),
location(ctx),
rtnType,
name,
paramTypes,
paramNames,
new SBlock(nextIdentifier(), location(ctx), statements),
false,
false,
false,
false
);
}
@Override
public ANode visitParameters(ParametersContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
@Override
public ANode visitStatement(StatementContext ctx) {
if (ctx.rstatement() != null) {
return visit(ctx.rstatement());
} else if (ctx.dstatement() != null) {
return visit(ctx.dstatement());
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
}
@Override
public ANode visitIf(IfContext ctx) {
AExpression expression = (AExpression) visit(ctx.expression());
SBlock ifblock = (SBlock) visit(ctx.trailer(0));
if (ctx.trailer().size() > 1) {
SBlock elseblock = (SBlock) visit(ctx.trailer(1));
return new SIfElse(nextIdentifier(), location(ctx), expression, ifblock, elseblock);
} else {
return new SIf(nextIdentifier(), location(ctx), expression, ifblock);
}
}
@Override
public ANode visitWhile(WhileContext ctx) {
AExpression expression = (AExpression) visit(ctx.expression());
if (ctx.trailer() != null) {
SBlock block = (SBlock) visit(ctx.trailer());
return new SWhile(nextIdentifier(), location(ctx), expression, block);
} else if (ctx.empty() != null) {
return new SWhile(nextIdentifier(), location(ctx), expression, null);
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
}
@Override
public ANode visitDo(DoContext ctx) {
AExpression expression = (AExpression) visit(ctx.expression());
SBlock block = (SBlock) visit(ctx.block());
return new SDo(nextIdentifier(), location(ctx), expression, block);
}
@Override
public ANode visitFor(ForContext ctx) {
ANode initializer = ctx.initializer() == null ? null : visit(ctx.initializer());
AExpression expression = ctx.expression() == null ? null : (AExpression) visit(ctx.expression());
AExpression afterthought = ctx.afterthought() == null ? null : (AExpression) visit(ctx.afterthought());
if (ctx.trailer() != null) {
SBlock block = (SBlock) visit(ctx.trailer());
return new SFor(nextIdentifier(), location(ctx), initializer, expression, afterthought, block);
} else if (ctx.empty() != null) {
return new SFor(nextIdentifier(), location(ctx), initializer, expression, afterthought, null);
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
}
@Override
public ANode visitEach(EachContext ctx) {
String type = ctx.decltype().getText();
String name = ctx.ID().getText();
AExpression expression = (AExpression) visit(ctx.expression());
SBlock block = (SBlock) visit(ctx.trailer());
return new SEach(nextIdentifier(), location(ctx), type, name, expression, block);
}
@Override
public ANode visitIneach(IneachContext ctx) {
String name = ctx.ID().getText();
AExpression expression = (AExpression) visit(ctx.expression());
SBlock block = (SBlock) visit(ctx.trailer());
return new SEach(nextIdentifier(), location(ctx), "def", name, expression, block);
}
@Override
public ANode visitDecl(DeclContext ctx) {
return visit(ctx.declaration());
}
@Override
public ANode visitContinue(ContinueContext ctx) {
return new SContinue(nextIdentifier(), location(ctx));
}
@Override
public ANode visitBreak(BreakContext ctx) {
return new SBreak(nextIdentifier(), location(ctx));
}
@Override
public ANode visitReturn(ReturnContext ctx) {
AExpression expression = null;
if (ctx.expression() != null) {
expression = (AExpression) visit(ctx.expression());
}
return new SReturn(nextIdentifier(), location(ctx), expression);
}
@Override
public ANode visitTry(TryContext ctx) {
SBlock block = (SBlock) visit(ctx.block());
List<SCatch> catches = new ArrayList<>();
for (TrapContext trap : ctx.trap()) {
catches.add((SCatch) visit(trap));
}
return new STry(nextIdentifier(), location(ctx), block, catches);
}
@Override
public ANode visitThrow(ThrowContext ctx) {
AExpression expression = (AExpression) visit(ctx.expression());
return new SThrow(nextIdentifier(), location(ctx), expression);
}
@Override
public ANode visitExpr(ExprContext ctx) {
AExpression expression = (AExpression) visit(ctx.expression());
return new SExpression(nextIdentifier(), location(ctx), expression);
}
@Override
public ANode visitTrailer(TrailerContext ctx) {
if (ctx.block() != null) {
return visit(ctx.block());
} else if (ctx.statement() != null) {
List<AStatement> statements = new ArrayList<>();
statements.add((AStatement) visit(ctx.statement()));
return new SBlock(nextIdentifier(), location(ctx), statements);
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
}
@Override
public ANode visitBlock(BlockContext ctx) {
if (ctx.statement().isEmpty() && ctx.dstatement() == null) {
return null;
} else {
List<AStatement> statements = new ArrayList<>();
for (StatementContext statement : ctx.statement()) {
statements.add((AStatement) visit(statement));
}
if (ctx.dstatement() != null) {
statements.add((AStatement) visit(ctx.dstatement()));
}
return new SBlock(nextIdentifier(), location(ctx), statements);
}
}
@Override
public ANode visitEmpty(EmptyContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
@Override
public ANode visitInitializer(InitializerContext ctx) {
if (ctx.declaration() != null) {
return visit(ctx.declaration());
} else if (ctx.expression() != null) {
return visit(ctx.expression());
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
}
@Override
public ANode visitAfterthought(AfterthoughtContext ctx) {
return visit(ctx.expression());
}
@Override
public ANode visitDeclaration(DeclarationContext ctx) {
String type = ctx.decltype().getText();
List<SDeclaration> declarations = new ArrayList<>();
for (DeclvarContext declvar : ctx.declvar()) {
String name = declvar.ID().getText();
AExpression expression = declvar.expression() == null ? null : (AExpression) visit(declvar.expression());
declarations.add(new SDeclaration(nextIdentifier(), location(declvar), type, name, expression));
}
return new SDeclBlock(nextIdentifier(), location(ctx), declarations);
}
@Override
public ANode visitDecltype(DecltypeContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
@Override
public ANode visitType(TypeContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
@Override
public ANode visitDeclvar(DeclvarContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
@Override
public ANode visitTrap(TrapContext ctx) {
String type = ctx.type().getText();
String name = ctx.ID().getText();
SBlock block = (SBlock) visit(ctx.block());
return new SCatch(nextIdentifier(), location(ctx), Exception.class, type, name, block);
}
@Override
public ANode visitSingle(SingleContext ctx) {
return visit(ctx.unary());
}
@Override
public ANode visitBinary(BinaryContext ctx) {
AExpression left = (AExpression) visit(ctx.noncondexpression(0));
AExpression right = (AExpression) visit(ctx.noncondexpression(1));
final Operation operation;
if (ctx.MUL() != null) {
operation = Operation.MUL;
} else if (ctx.DIV() != null) {
operation = Operation.DIV;
} else if (ctx.REM() != null) {
operation = Operation.REM;
} else if (ctx.ADD() != null) {
operation = Operation.ADD;
} else if (ctx.SUB() != null) {
operation = Operation.SUB;
} else if (ctx.FIND() != null) {
operation = Operation.FIND;
} else if (ctx.MATCH() != null) {
operation = Operation.MATCH;
} else if (ctx.LSH() != null) {
operation = Operation.LSH;
} else if (ctx.RSH() != null) {
operation = Operation.RSH;
} else if (ctx.USH() != null) {
operation = Operation.USH;
} else if (ctx.BWAND() != null) {
operation = Operation.BWAND;
} else if (ctx.XOR() != null) {
operation = Operation.XOR;
} else if (ctx.BWOR() != null) {
operation = Operation.BWOR;
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
return new EBinary(nextIdentifier(), location(ctx), left, right, operation);
}
@Override
public ANode visitComp(CompContext ctx) {
AExpression left = (AExpression) visit(ctx.noncondexpression(0));
AExpression right = (AExpression) visit(ctx.noncondexpression(1));
final Operation operation;
if (ctx.LT() != null) {
operation = Operation.LT;
} else if (ctx.LTE() != null) {
operation = Operation.LTE;
} else if (ctx.GT() != null) {
operation = Operation.GT;
} else if (ctx.GTE() != null) {
operation = Operation.GTE;
} else if (ctx.EQ() != null) {
operation = Operation.EQ;
} else if (ctx.EQR() != null) {
operation = Operation.EQR;
} else if (ctx.NE() != null) {
operation = Operation.NE;
} else if (ctx.NER() != null) {
operation = Operation.NER;
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
return new EComp(nextIdentifier(), location(ctx), left, right, operation);
}
@Override
public ANode visitInstanceof(InstanceofContext ctx) {
AExpression expr = (AExpression) visit(ctx.noncondexpression());
String type = ctx.decltype().getText();
return new EInstanceof(nextIdentifier(), location(ctx), expr, type);
}
@Override
public ANode visitBool(BoolContext ctx) {
AExpression left = (AExpression) visit(ctx.noncondexpression(0));
AExpression right = (AExpression) visit(ctx.noncondexpression(1));
final Operation operation;
if (ctx.BOOLAND() != null) {
operation = Operation.AND;
} else if (ctx.BOOLOR() != null) {
operation = Operation.OR;
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
return new EBooleanComp(nextIdentifier(), location(ctx), left, right, operation);
}
@Override
public ANode visitElvis(ElvisContext ctx) {
AExpression left = (AExpression) visit(ctx.noncondexpression(0));
AExpression right = (AExpression) visit(ctx.noncondexpression(1));
return new EElvis(nextIdentifier(), location(ctx), left, right);
}
@Override
public ANode visitNonconditional(NonconditionalContext ctx) {
return visit(ctx.noncondexpression());
}
@Override
public ANode visitConditional(ConditionalContext ctx) {
AExpression condition = (AExpression) visit(ctx.noncondexpression());
AExpression left = (AExpression) visit(ctx.expression(0));
AExpression right = (AExpression) visit(ctx.expression(1));
return new EConditional(nextIdentifier(), location(ctx), condition, left, right);
}
@Override
public ANode visitAssignment(AssignmentContext ctx) {
AExpression lhs = (AExpression) visit(ctx.noncondexpression());
AExpression rhs = (AExpression) visit(ctx.expression());
final Operation operation;
if (ctx.ASSIGN() != null) {
operation = null;
} else if (ctx.AMUL() != null) {
operation = Operation.MUL;
} else if (ctx.ADIV() != null) {
operation = Operation.DIV;
} else if (ctx.AREM() != null) {
operation = Operation.REM;
} else if (ctx.AADD() != null) {
operation = Operation.ADD;
} else if (ctx.ASUB() != null) {
operation = Operation.SUB;
} else if (ctx.ALSH() != null) {
operation = Operation.LSH;
} else if (ctx.ARSH() != null) {
operation = Operation.RSH;
} else if (ctx.AUSH() != null) {
operation = Operation.USH;
} else if (ctx.AAND() != null) {
operation = Operation.BWAND;
} else if (ctx.AXOR() != null) {
operation = Operation.XOR;
} else if (ctx.AOR() != null) {
operation = Operation.BWOR;
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
return new EAssignment(nextIdentifier(), location(ctx), lhs, rhs, false, operation);
}
@Override
public ANode visitPre(PreContext ctx) {
AExpression expression = (AExpression) visit(ctx.chain());
final Operation operation;
if (ctx.INCR() != null) {
operation = Operation.ADD;
} else if (ctx.DECR() != null) {
operation = Operation.SUB;
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
return new EAssignment(
nextIdentifier(),
location(ctx),
expression,
new ENumeric(nextIdentifier(), location(ctx), "1", 10),
false,
operation
);
}
@Override
public ANode visitAddsub(AddsubContext ctx) {
AExpression expression = (AExpression) visit(ctx.unary());
final Operation operation;
if (ctx.ADD() != null) {
operation = Operation.ADD;
} else if (ctx.SUB() != null) {
operation = Operation.SUB;
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
return new EUnary(nextIdentifier(), location(ctx), expression, operation);
}
@Override
public ANode visitNotaddsub(NotaddsubContext ctx) {
return visit(ctx.unarynotaddsub());
}
@Override
public ANode visitRead(ReadContext ctx) {
return visit(ctx.chain());
}
@Override
public ANode visitPost(PostContext ctx) {
AExpression expression = (AExpression) visit(ctx.chain());
final Operation operation;
if (ctx.INCR() != null) {
operation = Operation.ADD;
} else if (ctx.DECR() != null) {
operation = Operation.SUB;
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
return new EAssignment(
nextIdentifier(),
location(ctx),
expression,
new ENumeric(nextIdentifier(), location(ctx), "1", 10),
true,
operation
);
}
@Override
public ANode visitNot(NotContext ctx) {
AExpression expression = (AExpression) visit(ctx.unary());
final Operation operation;
if (ctx.BOOLNOT() != null) {
operation = Operation.NOT;
} else if (ctx.BWNOT() != null) {
operation = Operation.BWNOT;
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
return new EUnary(nextIdentifier(), location(ctx), expression, operation);
}
@Override
public ANode visitCast(CastContext ctx) {
return visit(ctx.castexpression());
}
@Override
public ANode visitPrimordefcast(PainlessParser.PrimordefcastContext ctx) {
String type = ctx.primordefcasttype().getText();
AExpression child = (AExpression) visit(ctx.unary());
return new EExplicit(nextIdentifier(), location(ctx), type, child);
}
@Override
public ANode visitRefcast(PainlessParser.RefcastContext ctx) {
String type = ctx.refcasttype().getText();
AExpression child = (AExpression) visit(ctx.unarynotaddsub());
return new EExplicit(nextIdentifier(), location(ctx), type, child);
}
@Override
public ANode visitPrimordefcasttype(PainlessParser.PrimordefcasttypeContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
@Override
public ANode visitRefcasttype(PainlessParser.RefcasttypeContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
@Override
public ANode visitDynamic(DynamicContext ctx) {
AExpression primary = (AExpression) visit(ctx.primary());
return buildPostfixChain(primary, null, ctx.postfix());
}
@Override
public ANode visitNewarray(NewarrayContext ctx) {
return visit(ctx.arrayinitializer());
}
@Override
public ANode visitPrecedence(PrecedenceContext ctx) {
return visit(ctx.expression());
}
@Override
public ANode visitNumeric(NumericContext ctx) {
if (ctx.DECIMAL() != null) {
return new EDecimal(nextIdentifier(), location(ctx), ctx.DECIMAL().getText());
} else if (ctx.HEX() != null) {
return new ENumeric(nextIdentifier(), location(ctx), ctx.HEX().getText().substring(2), 16);
} else if (ctx.INTEGER() != null) {
return new ENumeric(nextIdentifier(), location(ctx), ctx.INTEGER().getText(), 10);
} else if (ctx.OCTAL() != null) {
return new ENumeric(nextIdentifier(), location(ctx), ctx.OCTAL().getText().substring(1), 8);
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
}
@Override
public ANode visitTrue(TrueContext ctx) {
return new EBooleanConstant(nextIdentifier(), location(ctx), true);
}
@Override
public ANode visitFalse(FalseContext ctx) {
return new EBooleanConstant(nextIdentifier(), location(ctx), false);
}
@Override
public ANode visitNull(NullContext ctx) {
return new ENull(nextIdentifier(), location(ctx));
}
@Override
public ANode visitString(StringContext ctx) {
StringBuilder string = new StringBuilder(ctx.STRING().getText());
// Strip the leading and trailing quotes and replace the escape sequences with their literal equivalents
int src = 1;
int dest = 0;
int end = string.length() - 1;
assert string.charAt(0) == '"' || string.charAt(0) == '\'' : "expected string to start with a quote but was [" + string + "]";
assert string.charAt(end) == '"' || string.charAt(end) == '\'' : "expected string to end with a quote was [" + string + "]";
while (src < end) {
char current = string.charAt(src);
if (current == '\\') {
src++;
current = string.charAt(src);
}
string.setCharAt(dest, current);
src++;
dest++;
}
string.setLength(dest);
return new EString(nextIdentifier(), location(ctx), string.toString());
}
@Override
public ANode visitRegex(RegexContext ctx) {
String text = ctx.REGEX().getText();
int lastSlash = text.lastIndexOf('/');
String pattern = text.substring(1, lastSlash);
String flags = text.substring(lastSlash + 1);
return new ERegex(nextIdentifier(), location(ctx), pattern, flags);
}
@Override
public ANode visitListinit(ListinitContext ctx) {
return visit(ctx.listinitializer());
}
@Override
public ANode visitMapinit(MapinitContext ctx) {
return visit(ctx.mapinitializer());
}
@Override
public ANode visitVariable(VariableContext ctx) {
String name = ctx.ID().getText();
return new ESymbol(nextIdentifier(), location(ctx), name);
}
@Override
public ANode visitCalllocal(CalllocalContext ctx) {
String name = ctx.ID() == null ? ctx.DOLLAR().getText() : ctx.ID().getText();
List<AExpression> arguments = collectArguments(ctx.arguments());
return new ECallLocal(nextIdentifier(), location(ctx), name, arguments);
}
@Override
public ANode visitNewobject(NewobjectContext ctx) {
String type = ctx.type().getText();
List<AExpression> arguments = collectArguments(ctx.arguments());
return new ENewObj(nextIdentifier(), location(ctx), type, arguments);
}
private AExpression buildPostfixChain(AExpression primary, PostdotContext postdot, List<PostfixContext> postfixes) {
AExpression prefix = primary;
if (postdot != null) {
prefix = visitPostdot(postdot, prefix);
}
for (PostfixContext postfix : postfixes) {
prefix = visitPostfix(postfix, prefix);
}
return prefix;
}
@Override
public ANode visitPostfix(PostfixContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
public AExpression visitPostfix(PostfixContext ctx, AExpression prefix) {
if (ctx.callinvoke() != null) {
return visitCallinvoke(ctx.callinvoke(), prefix);
} else if (ctx.fieldaccess() != null) {
return visitFieldaccess(ctx.fieldaccess(), prefix);
} else if (ctx.braceaccess() != null) {
return visitBraceaccess(ctx.braceaccess(), prefix);
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
}
@Override
public ANode visitPostdot(PostdotContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
public AExpression visitPostdot(PostdotContext ctx, AExpression prefix) {
if (ctx.callinvoke() != null) {
return visitCallinvoke(ctx.callinvoke(), prefix);
} else if (ctx.fieldaccess() != null) {
return visitFieldaccess(ctx.fieldaccess(), prefix);
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
}
@Override
public ANode visitCallinvoke(CallinvokeContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
public AExpression visitCallinvoke(CallinvokeContext ctx, AExpression prefix) {
String name = ctx.DOTID().getText();
List<AExpression> arguments = collectArguments(ctx.arguments());
return new ECall(nextIdentifier(), location(ctx), prefix, name, arguments, ctx.NSDOT() != null);
}
@Override
public ANode visitFieldaccess(FieldaccessContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
public AExpression visitFieldaccess(FieldaccessContext ctx, AExpression prefix) {
final String value;
if (ctx.DOTID() != null) {
value = ctx.DOTID().getText();
} else if (ctx.DOTINTEGER() != null) {
value = ctx.DOTINTEGER().getText();
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
return new EDot(nextIdentifier(), location(ctx), prefix, value, ctx.NSDOT() != null);
}
@Override
public ANode visitBraceaccess(BraceaccessContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
public AExpression visitBraceaccess(BraceaccessContext ctx, AExpression prefix) {
AExpression expression = (AExpression) visit(ctx.expression());
return new EBrace(nextIdentifier(), location(ctx), prefix, expression);
}
@Override
public ANode visitNewstandardarray(NewstandardarrayContext ctx) {
StringBuilder type = new StringBuilder(ctx.type().getText());
List<AExpression> expressions = new ArrayList<>();
for (ExpressionContext expression : ctx.expression()) {
type.append("[]");
expressions.add((AExpression) visit(expression));
}
return buildPostfixChain(
new ENewArray(nextIdentifier(), location(ctx), type.toString(), expressions, false),
ctx.postdot(),
ctx.postfix()
);
}
@Override
public ANode visitNewinitializedarray(NewinitializedarrayContext ctx) {
String type = ctx.type().getText() + "[]";
List<AExpression> expressions = new ArrayList<>();
for (ExpressionContext expression : ctx.expression()) {
expressions.add((AExpression) visit(expression));
}
return buildPostfixChain(new ENewArray(nextIdentifier(), location(ctx), type, expressions, true), null, ctx.postfix());
}
@Override
public ANode visitListinitializer(ListinitializerContext ctx) {
List<AExpression> values = new ArrayList<>();
for (ExpressionContext expression : ctx.expression()) {
values.add((AExpression) visit(expression));
}
return new EListInit(nextIdentifier(), location(ctx), values);
}
@Override
public ANode visitMapinitializer(MapinitializerContext ctx) {
List<AExpression> keys = new ArrayList<>();
List<AExpression> values = new ArrayList<>();
for (MaptokenContext maptoken : ctx.maptoken()) {
keys.add((AExpression) visit(maptoken.expression(0)));
values.add((AExpression) visit(maptoken.expression(1)));
}
return new EMapInit(nextIdentifier(), location(ctx), keys, values);
}
@Override
public ANode visitMaptoken(MaptokenContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
@Override
public ANode visitArguments(ArgumentsContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
private List<AExpression> collectArguments(ArgumentsContext ctx) {
List<AExpression> arguments = new ArrayList<>(ctx.argument().size());
for (ArgumentContext argument : ctx.argument()) {
arguments.add((AExpression) visit(argument));
}
return arguments;
}
@Override
public ANode visitArgument(ArgumentContext ctx) {
if (ctx.expression() != null) {
return visit(ctx.expression());
} else if (ctx.lambda() != null) {
return visit(ctx.lambda());
} else if (ctx.funcref() != null) {
return visit(ctx.funcref());
} else {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
}
@Override
public ANode visitLambda(LambdaContext ctx) {
List<String> paramTypes = new ArrayList<>(ctx.lamtype().size());
List<String> paramNames = new ArrayList<>(ctx.lamtype().size());
SBlock block;
for (LamtypeContext lamtype : ctx.lamtype()) {
if (lamtype.decltype() == null) {
paramTypes.add(null);
} else {
paramTypes.add(lamtype.decltype().getText());
}
paramNames.add(lamtype.ID().getText());
}
if (ctx.expression() != null) {
// single expression
AExpression expression = (AExpression) visit(ctx.expression());
block = new SBlock(
nextIdentifier(),
location(ctx),
Collections.singletonList(new SReturn(nextIdentifier(), location(ctx), expression))
);
} else {
block = (SBlock) visit(ctx.block());
}
return new ELambda(nextIdentifier(), location(ctx), paramTypes, paramNames, block);
}
@Override
public ANode visitLamtype(LamtypeContext ctx) {
throw location(ctx).createError(new IllegalStateException("illegal tree structure"));
}
@Override
public ANode visitClassfuncref(ClassfuncrefContext ctx) {
return new EFunctionRef(nextIdentifier(), location(ctx), ctx.decltype().getText(), ctx.ID().getText());
}
@Override
public ANode visitConstructorfuncref(ConstructorfuncrefContext ctx) {
return ctx.decltype().LBRACE().isEmpty()
? new EFunctionRef(nextIdentifier(), location(ctx), ctx.decltype().getText(), ctx.NEW().getText())
: new ENewArrayFunctionRef(nextIdentifier(), location(ctx), ctx.decltype().getText());
}
@Override
public ANode visitLocalfuncref(LocalfuncrefContext ctx) {
return new EFunctionRef(nextIdentifier(), location(ctx), ctx.THIS().getText(), ctx.ID().getText());
}
}
|
List
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java
|
{
"start": 5761,
"end": 7498
}
|
class ____ implements Runnable {
@Override
public void run() {
StopWatch sw = new StopWatch();
Map<String, GcTimes> gcTimesBeforeSleep = getGcTimes();
LOG.info("Starting JVM pause monitor");
while (shouldRun) {
sw.reset().start();
try {
Thread.sleep(SLEEP_INTERVAL_MS);
} catch (InterruptedException ie) {
return;
}
long extraSleepTime = sw.now(TimeUnit.MILLISECONDS) - SLEEP_INTERVAL_MS;
Map<String, GcTimes> gcTimesAfterSleep = getGcTimes();
if (extraSleepTime > warnThresholdMs) {
++numGcWarnThresholdExceeded;
LOG.warn(formatMessage(
extraSleepTime, gcTimesAfterSleep, gcTimesBeforeSleep));
} else if (extraSleepTime > infoThresholdMs) {
++numGcInfoThresholdExceeded;
LOG.info(formatMessage(
extraSleepTime, gcTimesAfterSleep, gcTimesBeforeSleep));
}
totalGcExtraSleepTime += extraSleepTime;
gcTimesBeforeSleep = gcTimesAfterSleep;
}
}
}
/**
* Simple 'main' to facilitate manual testing of the pause monitor.
*
* This main function just leaks memory into a list. Running this class
* with a 1GB heap will very quickly go into "GC hell" and result in
* log messages about the GC pauses.
*
* @param args args.
* @throws Exception Exception.
*/
@SuppressWarnings("resource")
public static void main(String []args) throws Exception {
JvmPauseMonitor monitor = new JvmPauseMonitor();
monitor.init(new Configuration());
monitor.start();
List<String> list = Lists.newArrayList();
int i = 0;
while (true) {
list.add(String.valueOf(i++));
}
}
}
|
Monitor
|
java
|
google__auto
|
common/src/main/java/com/google/auto/common/SimpleAnnotationMirror.java
|
{
"start": 1820,
"end": 5414
}
|
class ____ implements AnnotationMirror {
private final TypeElement annotationType;
private final ImmutableMap<String, ? extends AnnotationValue> namedValues;
private final ImmutableMap<ExecutableElement, ? extends AnnotationValue> elementValues;
private SimpleAnnotationMirror(
TypeElement annotationType, Map<String, ? extends AnnotationValue> namedValues) {
checkArgument(
annotationType.getKind().equals(ElementKind.ANNOTATION_TYPE),
"annotationType must be an annotation: %s",
annotationType);
Map<String, AnnotationValue> values = new LinkedHashMap<>();
Map<String, AnnotationValue> unusedValues = new LinkedHashMap<>(namedValues);
List<String> missingMembers = new ArrayList<>();
for (ExecutableElement method : methodsIn(annotationType.getEnclosedElements())) {
String memberName = method.getSimpleName().toString();
if (unusedValues.containsKey(memberName)) {
values.put(memberName, unusedValues.remove(memberName));
} else if (method.getDefaultValue() != null) {
values.put(memberName, method.getDefaultValue());
} else {
missingMembers.add(memberName);
}
}
checkArgument(
unusedValues.isEmpty(),
"namedValues has entries for members that are not in %s: %s",
annotationType,
unusedValues);
checkArgument(
missingMembers.isEmpty(), "namedValues is missing entries for: %s", missingMembers);
this.annotationType = annotationType;
this.namedValues = ImmutableMap.copyOf(namedValues);
this.elementValues =
methodsIn(annotationType.getEnclosedElements()).stream()
.collect(
toImmutableMap(
e -> e,
// requireNonNull is safe because we inserted into `values` for all methods.
e -> requireNonNull(values.get(e.getSimpleName().toString()))));
}
/**
* An object representing an {@linkplain ElementKind#ANNOTATION_TYPE annotation} instance. If
* {@code annotationType} has any annotation members, they must have default values.
*/
public static AnnotationMirror of(TypeElement annotationType) {
return of(annotationType, ImmutableMap.of());
}
/**
* An object representing an {@linkplain ElementKind#ANNOTATION_TYPE annotation} instance. If
* {@code annotationType} has any annotation members, they must either be present in {@code
* namedValues} or have default values.
*/
public static AnnotationMirror of(
TypeElement annotationType, Map<String, ? extends AnnotationValue> namedValues) {
return new SimpleAnnotationMirror(annotationType, namedValues);
}
@Override
public DeclaredType getAnnotationType() {
return MoreTypes.asDeclared(annotationType.asType());
}
@Override
public Map<ExecutableElement, ? extends AnnotationValue> getElementValues() {
return elementValues;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder("@").append(annotationType.getQualifiedName());
if (!namedValues.isEmpty()) {
builder
.append('(')
.append(Joiner.on(", ").withKeyValueSeparator(" = ").join(namedValues))
.append(')');
}
return builder.toString();
}
@Override
public boolean equals(@Nullable Object other) {
return other instanceof AnnotationMirror
&& AnnotationMirrors.equivalence().equivalent(this, (AnnotationMirror) other);
}
@Override
public int hashCode() {
return AnnotationMirrors.equivalence().hash(this);
}
}
|
SimpleAnnotationMirror
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/network/Selector.java
|
{
"start": 3611,
"end": 3648
}
|
class ____ not thread safe!
*/
public
|
is
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java
|
{
"start": 973,
"end": 1465
}
|
class ____ extends TransportSingleItemBulkWriteAction<DeleteRequest, DeleteResponse> {
public static final String NAME = "indices:data/write/delete";
public static final ActionType<DeleteResponse> TYPE = new ActionType<>(NAME);
@Inject
public TransportDeleteAction(TransportService transportService, ActionFilters actionFilters, TransportBulkAction bulkAction) {
super(NAME, transportService, actionFilters, DeleteRequest::new, bulkAction);
}
}
|
TransportDeleteAction
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ComponentCreatorTest.java
|
{
"start": 11601,
"end": 12609
}
|
interface ____ {",
" SimpleComponent create(@BindsInstance Object object);",
" }")
.addLines("}")
.buildSource();
CompilerTests.daggerCompiler(componentFile)
.withProcessingOptions(compilerOptions)
.compile(
subject -> {
subject.hasErrorCount(0);
subject.hasWarningCount(0);
subject.generatedSource(goldenFileRule.goldenSource("test/DaggerSimpleComponent"));
});
}
@Test
public void testCreatorWithPrimitiveBindsInstance() throws Exception {
assume().that(compilerType).isEqualTo(JAVAC);
Source componentFile =
javaFileBuilder("test.SimpleComponent")
.addLines(
"package test;",
"",
"import dagger.BindsInstance;",
"import dagger.Component;",
"import javax.inject.Provider;",
"",
"@Component",
"
|
Factory
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webmvc/src/main/java/org/springframework/cloud/gateway/server/mvc/invoke/OperationInvoker.java
|
{
"start": 842,
"end": 1218
}
|
interface ____ {
/**
* Invoke the underlying operation using the given {@code context}.
* @param context the context to use to invoke the operation
* @return the result of the operation, may be {@code null}
* @throws MissingParametersException if parameters are missing
*/
<T> T invoke(InvocationContext context) throws MissingParametersException;
}
|
OperationInvoker
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_1200/Issue1235_noasm.java
|
{
"start": 921,
"end": 1183
}
|
interface ____ {
public static final String TYPE_SECTION = "section";
public static final String TYPE_FLOORV1 = "floorV1";
public static final String TYPE_FLOORV2 = "floorV2";
}
@JSONType(typeName = "floorV2")
private static
|
Area
|
java
|
elastic__elasticsearch
|
x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/action/TransportPutAutoscalingPolicyActionTests.java
|
{
"start": 1705,
"end": 11854
}
|
class ____ extends AutoscalingTestCase {
private static final PolicyValidator NO_VALIDATION = policy -> {};
public void testWriteBlock() {
ThreadPool threadPool = mock(ThreadPool.class);
TransportService transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(threadPool);
final TransportPutAutoscalingPolicyAction action = new TransportPutAutoscalingPolicyAction(
transportService,
mock(ClusterService.class),
threadPool,
mock(ActionFilters.class),
NO_VALIDATION,
new AutoscalingLicenseChecker(() -> true)
);
final ClusterBlocks blocks = ClusterBlocks.builder()
.addGlobalBlock(
randomFrom(
Metadata.CLUSTER_READ_ONLY_BLOCK,
Metadata.CLUSTER_READ_ONLY_ALLOW_DELETE_BLOCK,
NoMasterBlockService.NO_MASTER_BLOCK_WRITES
)
)
.build();
final ClusterState state = ClusterState.builder(new ClusterName(randomAlphaOfLength(8))).blocks(blocks).build();
final ClusterBlockException e = action.checkBlock(randomPutAutoscalingPolicyRequest(), state);
assertThat(e, not(nullValue()));
}
public void testNoWriteBlock() {
ThreadPool threadPool = mock(ThreadPool.class);
TransportService transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(threadPool);
final TransportPutAutoscalingPolicyAction action = new TransportPutAutoscalingPolicyAction(
transportService,
mock(ClusterService.class),
threadPool,
mock(ActionFilters.class),
NO_VALIDATION,
new AutoscalingLicenseChecker(() -> true)
);
final ClusterBlocks blocks = ClusterBlocks.builder().build();
final ClusterState state = ClusterState.builder(new ClusterName(randomAlphaOfLength(8))).blocks(blocks).build();
final ClusterBlockException e = action.checkBlock(randomPutAutoscalingPolicyRequest(), state);
assertThat(e, nullValue());
}
public void testAddPolicy() {
final ClusterState currentState;
{
final ClusterState.Builder builder = ClusterState.builder(new ClusterName(randomAlphaOfLength(8)));
if (randomBoolean()) {
builder.metadata(Metadata.builder().putCustom(AutoscalingMetadata.NAME, randomAutoscalingMetadata()));
}
currentState = builder.build();
}
// put an entirely new policy
final PutAutoscalingPolicyAction.Request request = randomPutAutoscalingPolicyRequest();
final Logger mockLogger = mock(Logger.class);
final ClusterState state = TransportPutAutoscalingPolicyAction.putAutoscalingPolicy(
currentState,
request,
NO_VALIDATION,
mockLogger
);
// ensure the new policy is in the updated cluster state
final AutoscalingMetadata metadata = state.metadata().custom(AutoscalingMetadata.NAME);
assertNotNull(metadata);
assertThat(metadata.policies(), hasKey(request.name()));
assertThat(metadata.policies().get(request.name()).policy().roles(), equalTo(request.roles()));
if (request.deciders() != null) {
assertThat(metadata.policies().get(request.name()).policy().deciders(), equalTo(request.deciders()));
} else {
assertThat(metadata.policies().get(request.name()).policy().deciders(), equalTo(Map.of()));
}
verify(mockLogger).info("adding autoscaling policy [{}]", request.name());
verifyNoMoreInteractions(mockLogger);
// ensure that existing policies were preserved
final AutoscalingMetadata currentMetadata = currentState.metadata().custom(AutoscalingMetadata.NAME);
if (currentMetadata != null) {
for (final Map.Entry<String, AutoscalingPolicyMetadata> entry : currentMetadata.policies().entrySet()) {
assertThat(metadata.policies(), hasKey(entry.getKey()));
assertThat(metadata.policies().get(entry.getKey()).policy(), equalTo(entry.getValue().policy()));
}
}
}
public void testAddPolicyWithNoRoles() {
PutAutoscalingPolicyAction.Request request = new PutAutoscalingPolicyAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
randomAlphaOfLength(8),
null,
randomAutoscalingDeciders()
);
final Logger mockLogger = mock(Logger.class);
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> TransportPutAutoscalingPolicyAction.putAutoscalingPolicy(ClusterState.EMPTY_STATE, request, NO_VALIDATION, mockLogger)
);
assertThat(
exception.getMessage(),
equalTo("new policy " + request.name() + " with no roles defined, must provide empty list for " + "no roles")
);
}
public void testUpdatePolicy() {
final ClusterState currentState;
{
final ClusterState.Builder builder = ClusterState.builder(new ClusterName(randomAlphaOfLength(8)));
builder.metadata(
Metadata.builder().putCustom(AutoscalingMetadata.NAME, randomAutoscalingMetadataOfPolicyCount(randomIntBetween(1, 8)))
);
currentState = builder.build();
}
final AutoscalingMetadata currentMetadata = currentState.metadata().custom(AutoscalingMetadata.NAME);
final String name = randomFrom(currentMetadata.policies().keySet());
// add to the existing deciders, to ensure the policy has changed
final PutAutoscalingPolicyAction.Request request = new PutAutoscalingPolicyAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
name,
randomBoolean() ? randomRoles() : null,
mutateAutoscalingDeciders(currentMetadata.policies().get(name).policy().deciders())
);
final AutoscalingPolicy expectedPolicy = new AutoscalingPolicy(
name,
request.roles() != null ? request.roles() : currentMetadata.policies().get(name).policy().roles(),
request.deciders()
);
final Logger mockLogger = mock(Logger.class);
final ClusterState state = TransportPutAutoscalingPolicyAction.putAutoscalingPolicy(
currentState,
request,
NO_VALIDATION,
mockLogger
);
// ensure the updated policy is in the updated cluster state
final AutoscalingMetadata metadata = state.metadata().custom(AutoscalingMetadata.NAME);
assertNotNull(metadata);
assertThat(metadata.policies(), hasKey(request.name()));
assertThat(metadata.policies().get(request.name()).policy(), equalTo(expectedPolicy));
verify(mockLogger).info("updating autoscaling policy [{}]", request.name());
verifyNoMoreInteractions(mockLogger);
// ensure that existing policies were otherwise preserved
for (final Map.Entry<String, AutoscalingPolicyMetadata> entry : currentMetadata.policies().entrySet()) {
if (entry.getKey().equals(name)) {
continue;
}
assertThat(metadata.policies(), hasKey(entry.getKey()));
assertThat(metadata.policies().get(entry.getKey()).policy(), equalTo(entry.getValue().policy()));
}
}
public void testNoOpUpdatePolicy() {
final ClusterState currentState;
{
final ClusterState.Builder builder = ClusterState.builder(new ClusterName(randomAlphaOfLength(8)));
builder.metadata(
Metadata.builder().putCustom(AutoscalingMetadata.NAME, randomAutoscalingMetadataOfPolicyCount(randomIntBetween(1, 8)))
);
currentState = builder.build();
}
// randomly put an existing policy
final AutoscalingMetadata currentMetadata = currentState.metadata().custom(AutoscalingMetadata.NAME);
final AutoscalingPolicy policy = randomFrom(currentMetadata.policies().values()).policy();
final PutAutoscalingPolicyAction.Request request = new PutAutoscalingPolicyAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
policy.name(),
randomBoolean() ? policy.roles() : null,
randomBoolean() ? policy.deciders() : null
);
final Logger mockLogger = mock(Logger.class);
final ClusterState state = TransportPutAutoscalingPolicyAction.putAutoscalingPolicy(
currentState,
request,
NO_VALIDATION,
mockLogger
);
assertThat(state, sameInstance(currentState));
verify(mockLogger).info("skipping updating autoscaling policy [{}] due to no change in policy", policy.name());
verifyNoMoreInteractions(mockLogger);
}
public void testPolicyValidator() {
final PutAutoscalingPolicyAction.Request request = new PutAutoscalingPolicyAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
randomAlphaOfLength(8),
randomRoles(),
Collections.emptySortedMap()
);
final Logger mockLogger = mock(Logger.class);
expectThrows(
IllegalArgumentException.class,
() -> TransportPutAutoscalingPolicyAction.putAutoscalingPolicy(ClusterState.EMPTY_STATE, request, p -> {
throw new IllegalArgumentException();
}, mockLogger)
);
verifyNoMoreInteractions(mockLogger);
}
static PutAutoscalingPolicyAction.Request randomPutAutoscalingPolicyRequest() {
return new PutAutoscalingPolicyAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
randomAlphaOfLength(8),
randomRoles(),
randomBoolean() ? randomAutoscalingDeciders() : null
);
}
}
|
TransportPutAutoscalingPolicyActionTests
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/Record.java
|
{
"start": 1189,
"end": 1401
}
|
class ____ is extended by generated classes.
*
* @deprecated Replaced by <a href="http://hadoop.apache.org/avro/">Avro</a>.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract
|
that
|
java
|
apache__dubbo
|
dubbo-registry/dubbo-registry-api/src/test/java/org/apache/dubbo/registry/client/metadata/StandardMetadataServiceURLBuilderTest.java
|
{
"start": 1527,
"end": 3867
}
|
class ____ {
@BeforeAll
public static void setUp() {
ApplicationConfig applicationConfig = new ApplicationConfig("demo");
applicationConfig.setMetadataServicePort(7001);
ApplicationModel.defaultModel().getApplicationConfigManager().setApplication(applicationConfig);
}
@AfterAll
public static void clearUp() {
ApplicationModel.reset();
}
@Test
void testBuild() {
ExtensionLoader<MetadataServiceURLBuilder> loader =
ApplicationModel.defaultModel().getExtensionLoader(MetadataServiceURLBuilder.class);
MetadataServiceURLBuilder builder = loader.getExtension(StandardMetadataServiceURLBuilder.NAME);
// test generateUrlWithoutMetadata
List<URL> urls =
builder.build(new DefaultServiceInstance("test", "127.0.0.1", 8080, ApplicationModel.defaultModel()));
assertEquals(1, urls.size());
URL url = urls.get(0);
assertEquals("dubbo", url.getProtocol());
assertEquals("127.0.0.1", url.getHost());
assertEquals(7001, url.getPort());
assertEquals(MetadataService.class.getName(), url.getServiceInterface());
assertEquals("test", url.getGroup());
assertEquals("consumer", url.getSide());
assertEquals("1.0.0", url.getVersion());
// assertEquals(url.getParameters().get("getAndListenInstanceMetadata.1.callback"), "true");
assertEquals("false", url.getParameters().get("reconnect"));
assertEquals("5000", url.getParameters().get("timeout"));
assertEquals(ApplicationModel.defaultModel(), url.getApplicationModel());
// test generateWithMetadata
urls = builder.build(serviceInstance);
assertEquals(1, urls.size());
url = urls.get(0);
assertEquals("rest", url.getProtocol());
assertEquals("127.0.0.1", url.getHost());
assertEquals(20880, url.getPort());
assertEquals(MetadataService.class.getName(), url.getServiceInterface());
assertEquals("test", url.getGroup());
assertEquals("consumer", url.getSide());
assertEquals("1.0.0", url.getVersion());
assertEquals("dubbo-provider-demo", url.getApplication());
assertEquals("5000", url.getParameters().get("timeout"));
}
}
|
StandardMetadataServiceURLBuilderTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/embeddable/initializer/EmbeddableInitializerTests.java
|
{
"start": 625,
"end": 1850
}
|
class ____ {
@Test
public void testGet(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final Order order = session.get( Order.class, new OrderId( 1, 1 ) );
assertThat( order ).isNotNull();
assertThat( order.orderNumber ).isNotNull();
assertThat( order.customer ).isNotNull();
assertThat( order.customer.id ).isNotNull();
assertThat( order.customer.name ).isNotNull();
});
}
@Test
public void testQuery(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.createQuery( "from Order o where o.orderNumber = 123" ).list();
} );
}
@BeforeEach
public void createTestData(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final Customer acme = new Customer( 1, "acme" );
final Customer spacely = new Customer( 2, "spacely" );
session.persist( acme );
session.persist( spacely );
final Order acmeOrder1 = new Order( acme, 1, 123F );
final Order acmeOrder2 = new Order( acme, 2, 123F );
session.persist( acmeOrder1 );
session.persist( acmeOrder2 );
} );
}
@AfterEach
public void dropTestData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
}
|
EmbeddableInitializerTests
|
java
|
spring-projects__spring-boot
|
module/spring-boot-graphql/src/test/java/org/springframework/boot/graphql/autoconfigure/rsocket/RSocketGraphQlClientAutoConfigurationTests.java
|
{
"start": 1382,
"end": 2655
}
|
class ____ {
private static final RSocketGraphQlClient.Builder<?> builderInstance = RSocketGraphQlClient.builder();
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(RSocketStrategiesAutoConfiguration.class,
RSocketRequesterAutoConfiguration.class, RSocketGraphQlClientAutoConfiguration.class));
@Test
void shouldCreateBuilder() {
this.contextRunner.run((context) -> assertThat(context).hasSingleBean(RSocketGraphQlClient.Builder.class));
}
@Test
void shouldGetPrototypeScopedBean() {
this.contextRunner.run((context) -> {
RSocketGraphQlClient.Builder<?> first = context.getBean(RSocketGraphQlClient.Builder.class);
RSocketGraphQlClient.Builder<?> second = context.getBean(RSocketGraphQlClient.Builder.class);
assertThat(first).isNotEqualTo(second);
});
}
@Test
void shouldNotCreateBuilderIfAlreadyPresent() {
this.contextRunner.withUserConfiguration(CustomRSocketGraphQlClientBuilder.class).run((context) -> {
RSocketGraphQlClient.Builder<?> builder = context.getBean(RSocketGraphQlClient.Builder.class);
assertThat(builder).isEqualTo(builderInstance);
});
}
@Configuration(proxyBeanMethods = false)
static
|
RSocketGraphQlClientAutoConfigurationTests
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/QueryRenderer.java
|
{
"start": 13926,
"end": 14779
}
|
class ____ extends QueryRenderer {
public static final QueryRenderer INSTANCE = new EmptyQueryRenderer();
@Override
String render() {
return "";
}
@Override
QueryRenderer append(QueryTokenStream tokens) {
if (tokens.isEmpty()) {
return this;
}
if (tokens instanceof QueryRenderer qr) {
return qr;
}
return QueryRenderer.from(tokens);
}
@Override
public List<QueryToken> toList() {
return Collections.emptyList();
}
@Override
public Stream<QueryToken> stream() {
return Stream.empty();
}
@Override
public Iterator<QueryToken> iterator() {
return Collections.emptyIterator();
}
@Override
public boolean isEmpty() {
return true;
}
@Override
public int size() {
return 0;
}
@Override
public boolean isExpression() {
return false;
}
}
}
|
EmptyQueryRenderer
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSource.java
|
{
"start": 1065,
"end": 1560
}
|
interface ____ {
/**
* Return a statistics instance.
* <p>
* It is not a requirement that the same instance is returned every time.
* {@link IOStatisticsSource}.
* <p>
* If the object implementing this is Closeable, this method
* may return null if invoked on a closed object, even if
* it returns a valid instance when called earlier.
* @return an IOStatistics instance or null
*/
default IOStatistics getIOStatistics() {
return null;
}
}
|
IOStatisticsSource
|
java
|
apache__flink
|
flink-python/src/main/java/org/apache/beam/runners/fnexecution/state/GrpcStateService.java
|
{
"start": 4668,
"end": 7146
}
|
class ____ implements StreamObserver<StateRequest> {
private final StreamObserver<StateResponse> outboundObserver;
Inbound(StreamObserver<StateResponse> outboundObserver) {
this.outboundObserver = outboundObserver;
}
@Override
public void onNext(StateRequest request) {
StateRequestHandler handler =
requestHandlers.getOrDefault(request.getInstructionId(), this::handlerNotFound);
try {
CompletionStage<StateResponse.Builder> result = handler.handle(request);
result.whenComplete(
(StateResponse.Builder responseBuilder, Throwable t) ->
// note that this is threadsafe if and only if outboundObserver is
// threadsafe.
outboundObserver.onNext(
t == null
? responseBuilder.setId(request.getId()).build()
: createErrorResponse(request.getId(), t)));
} catch (Exception e) {
outboundObserver.onNext(createErrorResponse(request.getId(), e));
}
}
@Override
public void onError(Throwable t) {
if (!t.getMessage().contains("cancelled before receiving half close")) {
// ignore the exception "cancelled before receiving half close" as we don't care
// about it.
outboundObserver.onError(t);
}
}
@Override
public void onCompleted() {
outboundObserver.onCompleted();
}
private CompletionStage<StateResponse.Builder> handlerNotFound(StateRequest request) {
CompletableFuture<StateResponse.Builder> result = new CompletableFuture<>();
result.complete(
StateResponse.newBuilder()
.setError(
String.format(
"Unknown process bundle instruction id '%s'",
request.getInstructionId())));
return result;
}
private StateResponse createErrorResponse(String id, Throwable t) {
return StateResponse.newBuilder().setId(id).setError(getStackTraceAsString(t)).build();
}
}
}
|
Inbound
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/streaming/api/functions/co/KeyedBroadcastProcessFunction.java
|
{
"start": 2842,
"end": 7357
}
|
class ____<KS, IN1, IN2, OUT>
extends BaseBroadcastProcessFunction {
private static final long serialVersionUID = -2584726797564976453L;
/**
* This method is called for each element in the (non-broadcast) {@link
* org.apache.flink.streaming.api.datastream.KeyedStream keyed stream}.
*
* <p>It can output zero or more elements using the {@link Collector} parameter, query the
* current processing/event time, and also query and update the local keyed state. In addition,
* it can get a {@link TimerService} for registering timers and querying the time. Finally, it
* has <b>read-only</b> access to the broadcast state. The context is only valid during the
* invocation of this method, do not store it.
*
* @param value The stream element.
* @param ctx A {@link ReadOnlyContext} that allows querying the timestamp of the element,
* querying the current processing/event time and iterating the broadcast state with
* <b>read-only</b> access. The context is only valid during the invocation of this method,
* do not store it.
* @param out The collector to emit resulting elements to
* @throws Exception The function may throw exceptions which cause the streaming program to fail
* and go into recovery.
*/
public abstract void processElement(
final IN1 value, final ReadOnlyContext ctx, final Collector<OUT> out) throws Exception;
/**
* This method is called for each element in the {@link
* org.apache.flink.streaming.api.datastream.BroadcastStream broadcast stream}.
*
* <p>It can output zero or more elements using the {@link Collector} parameter, query the
* current processing/event time, and also query and update the internal {@link
* org.apache.flink.api.common.state.BroadcastState broadcast state}. In addition, it can
* register a {@link KeyedStateFunction function} to be applied to all keyed states on the local
* partition. These can be done through the provided {@link Context}. The context is only valid
* during the invocation of this method, do not store it.
*
* @param value The stream element.
* @param ctx A {@link Context} that allows querying the timestamp of the element, querying the
* current processing/event time and updating the broadcast state. In addition, it allows
* the registration of a {@link KeyedStateFunction function} to be applied to all keyed
* state with a given {@link StateDescriptor} on the local partition. The context is only
* valid during the invocation of this method, do not store it.
* @param out The collector to emit resulting elements to
* @throws Exception The function may throw exceptions which cause the streaming program to fail
* and go into recovery.
*/
public abstract void processBroadcastElement(
final IN2 value, final Context ctx, final Collector<OUT> out) throws Exception;
/**
* Called when a timer set using {@link TimerService} fires.
*
* @param timestamp The timestamp of the firing timer.
* @param ctx An {@link OnTimerContext} that allows querying the timestamp of the firing timer,
* querying the current processing/event time, iterating the broadcast state with
* <b>read-only</b> access, querying the {@link TimeDomain} of the firing timer and getting
* a {@link TimerService} for registering timers and querying the time. The context is only
* valid during the invocation of this method, do not store it.
* @param out The collector for returning result values.
* @throws Exception This method may throw exceptions. Throwing an exception will cause the
* operation to fail and may trigger recovery.
*/
public void onTimer(final long timestamp, final OnTimerContext ctx, final Collector<OUT> out)
throws Exception {
// the default implementation does nothing.
}
/**
* A {@link BaseBroadcastProcessFunction.Context context} available to the broadcast side of a
* {@link org.apache.flink.streaming.api.datastream.BroadcastConnectedStream}.
*
* <p>Apart from the basic functionality of a {@link BaseBroadcastProcessFunction.Context
* context}, this also allows to apply a {@link KeyedStateFunction} to the (local) states of all
* active keys in the your backend.
*/
public abstract
|
KeyedBroadcastProcessFunction
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/web/servlet/ServletListenerRegistrationBeanTests.java
|
{
"start": 1292,
"end": 2370
}
|
class ____ {
@Mock
@SuppressWarnings("NullAway.Init")
private ServletContextListener listener;
@Mock
@SuppressWarnings("NullAway.Init")
private ServletContext servletContext;
@Test
void startupWithDefaults() throws Exception {
ServletListenerRegistrationBean<ServletContextListener> bean = new ServletListenerRegistrationBean<>(
this.listener);
bean.onStartup(this.servletContext);
then(this.servletContext).should().addListener(this.listener);
}
@Test
void disable() throws Exception {
ServletListenerRegistrationBean<ServletContextListener> bean = new ServletListenerRegistrationBean<>(
this.listener);
bean.setEnabled(false);
bean.onStartup(this.servletContext);
then(this.servletContext).should(never()).addListener(any(ServletContextListener.class));
}
@Test
void cannotRegisterUnsupportedType() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new ServletListenerRegistrationBean<>(new EventListener() {
}))
.withMessageContaining("'listener' is not of a supported type");
}
}
|
ServletListenerRegistrationBeanTests
|
java
|
quarkusio__quarkus
|
extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/AllowForwardedHeadersOverrideXForwardedHeadersTest.java
|
{
"start": 349,
"end": 1488
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(ForwardedHandlerInitializer.class)
.addAsResource(new StringAsset("quarkus.http.proxy.proxy-address-forwarding=true\n" +
"quarkus.http.proxy.allow-forwarded=true\n" +
"quarkus.http.proxy.allow-x-forwarded=true\n" +
"quarkus.http.proxy.strict-forwarded-control=false\n"),
"application.properties"));
@Test
public void testXForwardedProtoOverridesForwardedProto() {
assertThat(RestAssured.get("/path").asString()).startsWith("http|");
RestAssured.given()
.header("Forwarded", "proto=https;for=backend2:5555;host=somehost2")
.header("X-Forwarded-Proto", "http")
.get("/path")
.then()
.body(Matchers.equalTo("https|somehost2|backend2:5555|/path|https://somehost2/path"));
}
}
|
AllowForwardedHeadersOverrideXForwardedHeadersTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/ImportAwareTests.java
|
{
"start": 10964,
"end": 11173
}
|
class ____ {
private final AnnotationMetadata importMetadata;
public MetadataHolder(AnnotationMetadata importMetadata) {
this.importMetadata = importMetadata;
}
}
private static final
|
MetadataHolder
|
java
|
apache__spark
|
common/network-common/src/main/java/org/apache/spark/network/shuffledb/RocksDBIterator.java
|
{
"start": 1062,
"end": 2426
}
|
class ____ implements DBIterator {
private final RocksIterator it;
private boolean checkedNext;
private boolean closed;
private Map.Entry<byte[], byte[]> next;
public RocksDBIterator(RocksIterator it) {
this.it = it;
}
@Override
public boolean hasNext() {
if (!checkedNext && !closed) {
next = loadNext();
checkedNext = true;
}
if (!closed && next == null) {
try {
close();
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
return next != null;
}
@Override
public Map.Entry<byte[], byte[]> next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
checkedNext = false;
Map.Entry<byte[], byte[]> ret = next;
next = null;
return ret;
}
@Override
public void close() throws IOException {
if (!closed) {
it.close();
closed = true;
next = null;
}
}
@Override
public void seek(byte[] key) {
it.seek(key);
}
private Map.Entry<byte[], byte[]> loadNext() {
if (it.isValid()) {
Map.Entry<byte[], byte[]> nextEntry =
new AbstractMap.SimpleEntry<>(it.key(), it.value());
it.next();
return nextEntry;
}
return null;
}
}
|
RocksDBIterator
|
java
|
elastic__elasticsearch
|
plugins/discovery-gce/src/test/java/org/elasticsearch/cloud/gce/GceInstancesServiceImplTests.java
|
{
"start": 1146,
"end": 2749
}
|
class ____ extends ESTestCase {
public void testHeaderContainsMetadataFlavor() throws Exception {
final AtomicBoolean addMetdataFlavor = new AtomicBoolean();
final MockHttpTransport transport = new MockHttpTransport() {
@Override
public LowLevelHttpRequest buildRequest(String method, final String url) {
return new MockLowLevelHttpRequest() {
@Override
public LowLevelHttpResponse execute() {
MockLowLevelHttpResponse response = new MockLowLevelHttpResponse();
response.setStatusCode(200);
response.setContentType(Json.MEDIA_TYPE);
response.setContent("value");
if (addMetdataFlavor.get()) {
response.addHeader("Metadata-Flavor", "Google");
}
return response;
}
};
}
};
final GceInstancesServiceImpl service = new GceInstancesServiceImpl(Settings.EMPTY) {
@Override
protected synchronized HttpTransport getGceHttpTransport() {
return transport;
}
};
final String serviceURL = "/computeMetadata/v1/project/project-id";
assertThat(service.getAppEngineValueFromMetadataServer(serviceURL), is(nullValue()));
addMetdataFlavor.set(true);
assertThat(service.getAppEngineValueFromMetadataServer(serviceURL), is("value"));
}
}
|
GceInstancesServiceImplTests
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java
|
{
"start": 26542,
"end": 27384
}
|
class ____ extends NewNodeInfo {
String[] hostnames;
public HostNameBasedNodes(String[] hostnames,
Set<String> nodesToBeExcluded, Set<String> nodesToBeIncluded) {
this.hostnames = hostnames;
this.nodesToBeExcluded = nodesToBeExcluded;
this.nodesToBeIncluded = nodesToBeIncluded;
}
@Override
String[] getNames() {
return hostnames;
}
@Override
int getNumberofNewNodes() {
return hostnames.length;
}
@Override
int getNumberofIncludeNodes() {
return nodesToBeIncluded.size();
}
@Override
int getNumberofExcludeNodes() {
return nodesToBeExcluded.size();
}
}
/**
* The number of data nodes to be started are specified.
* The data nodes will have same host name, but different port numbers.
*
*/
static
|
HostNameBasedNodes
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-rest-openapi/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/openapi/model/Parameter.java
|
{
"start": 1474,
"end": 7069
}
|
enum ____ {
MATRIX("matrix"),
LABEL("label"),
FORM("form"),
SIMPLE("simple"),
SPACE_DELIMITED("spaceDelimited"),
PIPE_DELIMITED("pipeDelimited"),
DEEP_OBJECT("deepObject");
private final String value;
Style(String value) {
this.value = value;
}
@Override
public String toString() {
return value;
}
}
private final String name;
private final In in;
private String description;
private Boolean required;
private Boolean deprecated;
private Boolean allowEmptyValue;
private Style style;
private Boolean explode;
private Boolean allowReserved;
private Schema schema;
private Object example;
private Map<String, Example> examples;
private Map<String, MediaType> contents;
private transient ParameterMeta meta;
public Parameter(String name, In in) {
this.name = Objects.requireNonNull(name);
this.in = Objects.requireNonNull(in);
}
public String getName() {
return name;
}
public In getIn() {
return in;
}
public String getDescription() {
return description;
}
public Parameter setDescription(String description) {
this.description = description;
return this;
}
public Boolean getRequired() {
return required;
}
public Parameter setRequired(Boolean required) {
this.required = required;
return this;
}
public Boolean getDeprecated() {
return deprecated;
}
public Parameter setDeprecated(Boolean deprecated) {
this.deprecated = deprecated;
return this;
}
public Boolean getAllowEmptyValue() {
return allowEmptyValue;
}
public Parameter setAllowEmptyValue(Boolean allowEmptyValue) {
this.allowEmptyValue = allowEmptyValue;
return this;
}
public Style getStyle() {
return style;
}
public Parameter setStyle(Style style) {
this.style = style;
return this;
}
public Boolean getExplode() {
return explode;
}
public Parameter setExplode(Boolean explode) {
this.explode = explode;
return this;
}
public Boolean getAllowReserved() {
return allowReserved;
}
public Parameter setAllowReserved(Boolean allowReserved) {
this.allowReserved = allowReserved;
return this;
}
public Schema getSchema() {
return schema;
}
public Parameter setSchema(Schema schema) {
this.schema = schema;
return this;
}
public Object getExample() {
return example;
}
public Parameter setExample(Object example) {
this.example = example;
return this;
}
public Map<String, Example> getExamples() {
return examples;
}
public Parameter setExamples(Map<String, Example> examples) {
this.examples = examples;
return this;
}
public Parameter addExample(String name, Example example) {
if (examples == null) {
examples = new LinkedHashMap<>();
}
examples.put(name, example);
return this;
}
public Parameter removeExample(String name) {
if (examples != null) {
examples.remove(name);
}
return this;
}
public Map<String, MediaType> getContents() {
return contents;
}
public Parameter setContents(Map<String, MediaType> contents) {
this.contents = contents;
return this;
}
public Parameter addContent(String name, MediaType content) {
if (contents == null) {
contents = new LinkedHashMap<>();
}
contents.put(name, content);
return this;
}
public Parameter removeContent(String name) {
if (contents != null) {
contents.remove(name);
}
return this;
}
public ParameterMeta getMeta() {
return meta;
}
public Parameter setMeta(ParameterMeta meta) {
this.meta = meta;
return this;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != Parameter.class) {
return false;
}
Parameter other = (Parameter) obj;
return name.equals(other.name) && in == other.in;
}
@Override
public int hashCode() {
return 31 * name.hashCode() + in.hashCode();
}
@Override
public Parameter clone() {
Parameter clone = super.clone();
clone.schema = clone(schema);
clone.examples = clone(examples);
clone.contents = clone(contents);
return clone;
}
@Override
public Map<String, Object> writeTo(Map<String, Object> node, Context context) {
write(node, "name", name);
write(node, "in", in.toString());
write(node, "description", description);
write(node, "required", required);
write(node, "deprecated", deprecated);
write(node, "allowEmptyValue", allowEmptyValue);
write(node, "style", style);
write(node, "explode", explode);
write(node, "allowReserved", allowReserved);
write(node, "schema", schema, context);
write(node, "example", example);
write(node, "examples", examples, context);
write(node, "content", contents, context);
writeExtensions(node);
return node;
}
}
|
Style
|
java
|
apache__camel
|
components/camel-zookeeper/src/main/java/org/apache/camel/component/zookeeper/ZooKeeperCuratorConfiguration.java
|
{
"start": 1225,
"end": 7876
}
|
class ____ implements Cloneable {
private CuratorFramework curatorFramework;
private List<String> nodes;
private String namespace;
private long reconnectBaseSleepTime;
private TimeUnit reconnectBaseSleepTimeUnit;
private int reconnectMaxRetries;
private long reconnectMaxSleepTime;
private TimeUnit reconnectMaxSleepTimeUnit;
private long sessionTimeout;
private TimeUnit sessionTimeoutUnit;
private long connectionTimeout;
private TimeUnit connectionTimeoutUnit;
private List<AuthInfo> authInfoList;
private long maxCloseWait;
private TimeUnit maxCloseWaitUnit;
private RetryPolicy retryPolicy;
private String basePath;
public ZooKeeperCuratorConfiguration() {
this.reconnectBaseSleepTime = 1000;
this.reconnectBaseSleepTimeUnit = TimeUnit.MILLISECONDS;
this.reconnectMaxSleepTime = Integer.MAX_VALUE;
this.reconnectMaxSleepTimeUnit = TimeUnit.MILLISECONDS;
this.reconnectMaxRetries = 3;
// from org.apache.curator.framework.CuratorFrameworkFactory
this.sessionTimeout = Integer.getInteger("curator-default-session-timeout", 60 * 1000);
this.sessionTimeoutUnit = TimeUnit.MILLISECONDS;
// from org.apache.curator.framework.CuratorFrameworkFactory
this.connectionTimeout = Integer.getInteger("curator-default-connection-timeout", 15 * 1000);
this.connectionTimeoutUnit = TimeUnit.MILLISECONDS;
// from org.apache.curator.framework.CuratorFrameworkFactory
this.maxCloseWait = 1000;
this.maxCloseWaitUnit = TimeUnit.MILLISECONDS;
}
// *******************************
// Properties
// *******************************
public CuratorFramework getCuratorFramework() {
return curatorFramework;
}
public void setCuratorFramework(CuratorFramework curatorFramework) {
this.curatorFramework = curatorFramework;
}
public List<String> getNodes() {
return nodes;
}
public void setNodes(String nodes) {
this.nodes = Arrays.stream(nodes.split(",")).collect(Collectors.toUnmodifiableList());
}
public void setNodes(List<String> nodes) {
this.nodes = Collections.unmodifiableList(new ArrayList<>(nodes));
}
public String getNamespace() {
return namespace;
}
public void setNamespace(String namespace) {
this.namespace = namespace;
}
public long getReconnectBaseSleepTime() {
return reconnectBaseSleepTime;
}
public void setReconnectBaseSleepTime(long reconnectBaseSleepTime) {
this.reconnectBaseSleepTime = reconnectBaseSleepTime;
}
public void setReconnectBaseSleepTime(long reconnectBaseSleepTime, TimeUnit reconnectBaseSleepTimeUnit) {
this.reconnectBaseSleepTime = reconnectBaseSleepTime;
this.reconnectBaseSleepTimeUnit = reconnectBaseSleepTimeUnit;
}
public TimeUnit getReconnectBaseSleepTimeUnit() {
return reconnectBaseSleepTimeUnit;
}
public void setReconnectBaseSleepTimeUnit(TimeUnit reconnectBaseSleepTimeUnit) {
this.reconnectBaseSleepTimeUnit = reconnectBaseSleepTimeUnit;
}
public long getReconnectMaxSleepTime() {
return reconnectMaxSleepTime;
}
public void setReconnectMaxSleepTime(long reconnectMaxSleepTime) {
this.reconnectMaxSleepTime = reconnectMaxSleepTime;
}
public void setReconnectMaxSleepTime(long reconnectMaxSleepTime, TimeUnit reconnectBaseSleepTimeUnit) {
this.reconnectMaxSleepTime = reconnectMaxSleepTime;
this.reconnectBaseSleepTimeUnit = reconnectBaseSleepTimeUnit;
}
public TimeUnit getReconnectMaxSleepTimeUnit() {
return reconnectMaxSleepTimeUnit;
}
public void setReconnectMaxSleepTimeUnit(TimeUnit reconnectMaxSleepTimeUnit) {
this.reconnectMaxSleepTimeUnit = reconnectMaxSleepTimeUnit;
}
public int getReconnectMaxRetries() {
return reconnectMaxRetries;
}
public void setReconnectMaxRetries(int reconnectMaxRetries) {
this.reconnectMaxRetries = reconnectMaxRetries;
}
public long getSessionTimeout() {
return sessionTimeout;
}
public void setSessionTimeout(long sessionTimeout) {
this.sessionTimeout = sessionTimeout;
}
public void setSessionTimeout(long sessionTimeout, TimeUnit sessionTimeoutUnit) {
this.sessionTimeout = sessionTimeout;
this.sessionTimeoutUnit = sessionTimeoutUnit;
}
public TimeUnit getSessionTimeoutUnit() {
return sessionTimeoutUnit;
}
public void setSessionTimeoutUnit(TimeUnit sessionTimeoutUnit) {
this.sessionTimeoutUnit = sessionTimeoutUnit;
}
public long getConnectionTimeout() {
return connectionTimeout;
}
public void setConnectionTimeout(long connectionTimeout) {
this.connectionTimeout = connectionTimeout;
}
public void setConnectionTimeout(long connectionTimeout, TimeUnit connectionTimeotUnit) {
this.connectionTimeout = connectionTimeout;
this.connectionTimeoutUnit = connectionTimeotUnit;
}
public TimeUnit getConnectionTimeoutUnit() {
return connectionTimeoutUnit;
}
public void setConnectionTimeoutUnit(TimeUnit connectionTimeoutUnit) {
this.connectionTimeoutUnit = connectionTimeoutUnit;
}
public List<AuthInfo> getAuthInfoList() {
return authInfoList;
}
public void setAuthInfoList(List<AuthInfo> authInfoList) {
this.authInfoList = authInfoList;
}
public long getMaxCloseWait() {
return maxCloseWait;
}
public void setMaxCloseWait(long maxCloseWait) {
this.maxCloseWait = maxCloseWait;
}
public TimeUnit getMaxCloseWaitUnit() {
return maxCloseWaitUnit;
}
public void setMaxCloseWaitUnit(TimeUnit maxCloseWaitUnit) {
this.maxCloseWaitUnit = maxCloseWaitUnit;
}
public RetryPolicy getRetryPolicy() {
return retryPolicy;
}
public void setRetryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = retryPolicy;
}
public String getBasePath() {
return basePath;
}
public void setBasePath(String basePath) {
this.basePath = basePath;
}
// *******************************
// Clone
// *******************************
public ZooKeeperCuratorConfiguration copy() {
try {
return (ZooKeeperCuratorConfiguration) clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeCamelException(e);
}
}
}
|
ZooKeeperCuratorConfiguration
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/core/TestTokenManager.java
|
{
"start": 319,
"end": 1390
}
|
class ____ extends TokenManager {
private TokenListener listener;
public TestTokenManager(IdentityProvider identityProvider, TokenManagerConfig tokenManagerConfig) {
super(identityProvider, tokenManagerConfig);
}
@Override
public void start(TokenListener listener, boolean waitForToken) {
this.listener = listener;
}
@Override
public void stop() {
// Cleanup logic if needed
}
public void emitToken(SimpleToken token) {
if (listener != null) {
listener.onTokenRenewed(token);
}
}
public void emitError(Exception exception) {
if (listener != null) {
listener.onError(exception);
}
}
public void emitTokenWithDelay(SimpleToken token, long delayMillis) {
new Thread(() -> {
try {
Thread.sleep(delayMillis);
emitToken(token);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}).start();
}
}
|
TestTokenManager
|
java
|
bumptech__glide
|
annotation/compiler/test/src/test/resources/EmptyAppGlideModuleTest/GeneratedAppGlideModuleImpl.java
|
{
"start": 261,
"end": 1365
}
|
class ____ extends GeneratedAppGlideModule {
private final EmptyAppModule appGlideModule;
public GeneratedAppGlideModuleImpl(Context context) {
appGlideModule = new EmptyAppModule();
if (Log.isLoggable("Glide", Log.DEBUG)) {
Log.d("Glide", "Discovered AppGlideModule from annotation: com.bumptech.glide.test.EmptyAppModule");
}
}
@Override
public void applyOptions(@NonNull Context context, @NonNull GlideBuilder builder) {
appGlideModule.applyOptions(context, builder);
}
@Override
public void registerComponents(@NonNull Context context, @NonNull Glide glide,
@NonNull Registry registry) {
appGlideModule.registerComponents(context, glide, registry);
}
@Override
public boolean isManifestParsingEnabled() {
return appGlideModule.isManifestParsingEnabled();
}
@Override
@NonNull
public Set<Class<?>> getExcludedModuleClasses() {
return Collections.emptySet();
}
@Override
@NonNull
GeneratedRequestManagerFactory getRequestManagerFactory() {
return new GeneratedRequestManagerFactory();
}
}
|
GeneratedAppGlideModuleImpl
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/mock/handler/MySqlMockExecuteHandlerImpl.java
|
{
"start": 1447,
"end": 6393
}
|
class ____ implements MockExecuteHandler {
@Override
public ResultSet executeQuery(MockStatementBase statement, String sql) throws SQLException {
SQLStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> stmtList = parser.parseStatementList();
if (stmtList.size() > 1) {
throw new SQLException("not support multi-statment. " + sql);
}
if (stmtList.isEmpty()) {
throw new SQLException("executeQueryError : " + sql);
}
SQLStatement stmt = stmtList.get(0);
if (stmt instanceof CobarShowStatus) {
return showStatus(statement);
}
if (!(stmt instanceof SQLSelectStatement)) {
throw new SQLException("executeQueryError : " + sql);
}
SQLSelect select = ((SQLSelectStatement) stmt).getSelect();
SQLSelectQuery query = select.getQuery();
if (query instanceof SQLSelectQueryBlock) {
return executeQuery(statement, (SQLSelectQueryBlock) query);
}
throw new SQLException("TODO");
}
public ResultSet executeQuery(MockStatementBase statement, SQLSelectQueryBlock query) throws SQLException {
SQLTableSource from = query.getFrom();
if (from instanceof SQLExprTableSource) {
SQLExpr expr = ((SQLExprTableSource) from).getExpr();
if (expr instanceof SQLIdentifierExpr) {
String ident = ((SQLIdentifierExpr) expr).getName();
if ("dual".equalsIgnoreCase(ident)) {
return executeQueryFromDual(statement, query);
}
}
throw new SQLException("TODO : " + query);
} else if (from == null) {
return executeQueryFromDual(statement, query);
} else {
throw new SQLException("TODO");
}
}
public ResultSet showStatus(MockStatementBase statement) throws SQLException {
MockResultSet rs = new MockResultSet(statement);
MockResultSetMetaData metaData = rs.getMockMetaData();
Object[] row = new Object[]{"on"};
ColumnMetaData column = new ColumnMetaData();
column.setColumnType(Types.NVARCHAR);
metaData.getColumns().add(column);
rs.getRows().add(row);
return rs;
}
public ResultSet executeQueryFromDual(MockStatementBase statement, SQLSelectQueryBlock query) throws SQLException {
MockResultSet rs = statement.getConnection().getDriver().createMockResultSet(statement);
MockResultSetMetaData metaData = rs.getMockMetaData();
Object[] row = new Object[query.getSelectList().size()];
for (int i = 0, size = query.getSelectList().size(); i < size; ++i) {
ColumnMetaData column = new ColumnMetaData();
SQLSelectItem item = query.getSelectList().get(i);
SQLExpr expr = item.getExpr();
if (expr instanceof SQLIntegerExpr) {
row[i] = ((SQLNumericLiteralExpr) expr).getNumber();
column.setColumnType(Types.INTEGER);
} else if (expr instanceof SQLNumberExpr) {
row[i] = ((SQLNumericLiteralExpr) expr).getNumber();
column.setColumnType(Types.DECIMAL);
} else if (expr instanceof SQLCharExpr) {
row[i] = ((SQLCharExpr) expr).getText();
column.setColumnType(Types.VARCHAR);
} else if (expr instanceof SQLNCharExpr) {
row[i] = ((SQLNCharExpr) expr).getText();
column.setColumnType(Types.NVARCHAR);
} else if (expr instanceof SQLBooleanExpr) {
row[i] = ((SQLBooleanExpr) expr).getBooleanValue();
column.setColumnType(Types.NVARCHAR);
} else if (expr instanceof SQLNullExpr) {
row[i] = null;
} else if (expr instanceof SQLMethodInvokeExpr) {
SQLMethodInvokeExpr methodInvokeExpr = (SQLMethodInvokeExpr) expr;
if ("NOW".equalsIgnoreCase(methodInvokeExpr.getMethodName())) {
row[i] = new Timestamp(System.currentTimeMillis());
} else {
throw new SQLException("TODO");
}
} else if (expr instanceof SQLVariantRefExpr) {
SQLVariantRefExpr varExpr = (SQLVariantRefExpr) expr;
int varIndex = varExpr.getIndex();
if (statement instanceof MockPreparedStatement) {
MockPreparedStatement mockPstmt = (MockPreparedStatement) statement;
row[i] = mockPstmt.getParameters().get(varIndex);
} else {
row[i] = null;
}
} else {
row[i] = null;
}
metaData.getColumns().add(column);
}
rs.getRows().add(row);
return rs;
}
}
|
MySqlMockExecuteHandlerImpl
|
java
|
netty__netty
|
handler/src/main/java/io/netty/handler/ssl/OpenSslDefaultApplicationProtocolNegotiator.java
|
{
"start": 911,
"end": 1775
}
|
class ____ implements OpenSslApplicationProtocolNegotiator {
private final ApplicationProtocolConfig config;
public OpenSslDefaultApplicationProtocolNegotiator(ApplicationProtocolConfig config) {
this.config = checkNotNull(config, "config");
}
@Override
public List<String> protocols() {
return config.supportedProtocols();
}
@Override
public ApplicationProtocolConfig.Protocol protocol() {
return config.protocol();
}
@Override
public ApplicationProtocolConfig.SelectorFailureBehavior selectorFailureBehavior() {
return config.selectorFailureBehavior();
}
@Override
public ApplicationProtocolConfig.SelectedListenerFailureBehavior selectedListenerFailureBehavior() {
return config.selectedListenerFailureBehavior();
}
}
|
OpenSslDefaultApplicationProtocolNegotiator
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/serializer/CollectionSerializerTest.java
|
{
"start": 358,
"end": 2869
}
|
class ____ extends TestCase {
public void test_0() throws Exception {
SerializeWriter out = new SerializeWriter();
CollectionCodec listSerializer = new CollectionCodec();
listSerializer.write(new JSONSerializer(out), Collections.EMPTY_LIST, null, null, 0);
Assert.assertEquals("[]", out.toString());
}
public void test_1() throws Exception {
SerializeWriter out = new SerializeWriter();
CollectionCodec listSerializer = new CollectionCodec();
listSerializer.write(new JSONSerializer(out), Collections.singletonList(1), null, null, 0);
Assert.assertEquals("[1]", out.toString());
}
public void test_2_s() throws Exception {
SerializeWriter out = new SerializeWriter();
CollectionCodec listSerializer = new CollectionCodec();
List<Object> list = new ArrayList<Object>();
list.add(1);
list.add(2);
listSerializer.write(new JSONSerializer(out), list, null, null, 0);
Assert.assertEquals("[1,2]", out.toString());
}
public void test_3_s() throws Exception {
SerializeWriter out = new SerializeWriter();
CollectionCodec listSerializer = new CollectionCodec();
List<Object> list = new ArrayList<Object>();
list.add(1);
list.add(2);
list.add(3);
listSerializer.write(new JSONSerializer(out), list, null, null, 0);
Assert.assertEquals("[1,2,3]", out.toString());
}
public void test_4_s() throws Exception {
SerializeWriter out = new SerializeWriter();
CollectionCodec listSerializer = new CollectionCodec();
List<Object> list = new ArrayList<Object>();
list.add(1L);
list.add(2L);
list.add(3L);
list.add(Collections.emptyMap());
listSerializer.write(new JSONSerializer(out), list, null, null, 0);
Assert.assertEquals("[1,2,3,{}]", out.toString());
}
public void test_5_s() throws Exception {
SerializeWriter out = new SerializeWriter();
CollectionCodec listSerializer = new CollectionCodec();
List<Object> list = new ArrayList<Object>();
list.add(1L);
list.add(21474836480L);
list.add(null);
list.add(Collections.emptyMap());
list.add(21474836480L);
listSerializer.write(new JSONSerializer(out), list, null, null, 0);
Assert.assertEquals("[1,21474836480,null,{},21474836480]", out.toString());
}
}
|
CollectionSerializerTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/testFixtures/java/org/springframework/context/testfixture/beans/factory/FooRegistrar.java
|
{
"start": 858,
"end": 1046
}
|
class ____ implements BeanRegistrar {
@Override
public void register(BeanRegistry registry, Environment env) {
registry.registerBean(Foo.class);
}
public record Foo() {}
}
|
FooRegistrar
|
java
|
dropwizard__dropwizard
|
dropwizard-example/src/test/java/com/example/helloworld/core/PersonTest.java
|
{
"start": 355,
"end": 1089
}
|
class ____ {
private static final ObjectMapper MAPPER = newObjectMapper();
@Test
void serializesToJSON() throws Exception {
final Person person = new Person("Luther Blissett", "Lead Tester", 1902);
final String expected = MAPPER.writeValueAsString(
MAPPER.readValue(getClass().getResource("/person.json"), Person.class));
assertThat(MAPPER.writeValueAsString(person)).isEqualTo(expected);
}
@Test
public void deserializesFromJSON() throws Exception {
final Person person = new Person("Luther Blissett", "Lead Tester", 1902);
assertThat(MAPPER.readValue(getClass().getResource("/person.json"), Person.class))
.isEqualTo(person);
}
}
|
PersonTest
|
java
|
spring-projects__spring-boot
|
loader/spring-boot-loader-tools/src/test/java/org/springframework/boot/loader/tools/SizeCalculatingEntryWriterTests.java
|
{
"start": 995,
"end": 2195
}
|
class ____ {
@Test
void getWhenWithinThreshold() throws Exception {
TestEntryWriter original = new TestEntryWriter(SizeCalculatingEntryWriter.THRESHOLD - 1);
EntryWriter writer = SizeCalculatingEntryWriter.get(original);
assertThat(writer).isNotNull();
assertThat(writer.size()).isEqualTo(original.getBytes().length);
assertThat(writeBytes(writer)).isEqualTo(original.getBytes());
assertThat(writer).extracting("content").isNotInstanceOf(File.class);
}
@Test
void getWhenExceedingThreshold() throws Exception {
TestEntryWriter original = new TestEntryWriter(SizeCalculatingEntryWriter.THRESHOLD + 1);
EntryWriter writer = SizeCalculatingEntryWriter.get(original);
assertThat(writer).isNotNull();
assertThat(writer.size()).isEqualTo(original.getBytes().length);
assertThat(writeBytes(writer)).isEqualTo(original.getBytes());
assertThat(writer).extracting("content").isInstanceOf(File.class);
}
private byte[] writeBytes(EntryWriter writer) throws IOException {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
writer.write(outputStream);
outputStream.close();
return outputStream.toByteArray();
}
private static
|
SizeCalculatingEntryWriterTests
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/DoNotCallChecker.java
|
{
"start": 9439,
"end": 9785
}
|
class ____ the field represented by this Field using"
+ " getDeclaringClass")
.put(
instanceMethod().onExactClass("java.lang.reflect.Method").named("getClass"),
"Calling getClass on Method returns the Class object for Method, you probably meant"
+ " to retrieve the
|
containing
|
java
|
apache__flink
|
flink-datastream/src/main/java/org/apache/flink/datastream/impl/context/DefaultTaskInfo.java
|
{
"start": 974,
"end": 2039
}
|
class ____ implements TaskInfo {
private final int parallelism;
private final int maxParallelism;
private final String taskName;
private final int indexOfSubtask;
private final int attemptNumber;
public DefaultTaskInfo(
int parallelism,
int maxParallelism,
String taskName,
int indexOfSubtask,
int attemptNumber) {
this.parallelism = parallelism;
this.maxParallelism = maxParallelism;
this.taskName = taskName;
this.indexOfSubtask = indexOfSubtask;
this.attemptNumber = attemptNumber;
}
@Override
public int getParallelism() {
return parallelism;
}
@Override
public int getMaxParallelism() {
return maxParallelism;
}
@Override
public String getTaskName() {
return taskName;
}
@Override
public int getIndexOfThisSubtask() {
return indexOfSubtask;
}
@Override
public int getAttemptNumber() {
return attemptNumber;
}
}
|
DefaultTaskInfo
|
java
|
apache__maven
|
impl/maven-core/src/test/java/org/apache/maven/project/PluginConnectionSimpleTest.java
|
{
"start": 1418,
"end": 5166
}
|
class ____ {
@Test
void testPluginModificationPersistsInModel() {
// Create a test project with a plugin
Model model = new Model();
model.setGroupId("test.group");
model.setArtifactId("test-artifact");
model.setVersion("1.0.0");
Build build = new Build();
model.setBuild(build);
// Add a test plugin
Plugin originalPlugin = new Plugin();
originalPlugin.setGroupId("org.apache.maven.plugins");
originalPlugin.setArtifactId("maven-compiler-plugin");
originalPlugin.setVersion("3.8.1");
build.addPlugin(originalPlugin);
MavenProject project = new MavenProject(model);
// Get the plugin using getPlugin() method
Plugin retrievedPlugin = project.getPlugin("org.apache.maven.plugins:maven-compiler-plugin");
assertNotNull(retrievedPlugin, "Plugin should be found");
assertEquals("3.8.1", retrievedPlugin.getVersion(), "Initial version should match");
// Modify the plugin version
retrievedPlugin.setVersion("3.11.0");
// Verify the change persists when getting the plugin again
Plugin pluginAfterModification = project.getPlugin("org.apache.maven.plugins:maven-compiler-plugin");
assertEquals(
"3.11.0",
pluginAfterModification.getVersion(),
"Version change should persist - this verifies the plugin is connected to the model");
// Also verify the change is reflected in the build plugins list
Plugin pluginFromBuildList = project.getBuild().getPlugins().stream()
.filter(p -> "org.apache.maven.plugins:maven-compiler-plugin".equals(p.getKey()))
.findFirst()
.orElse(null);
assertNotNull(pluginFromBuildList, "Plugin should be found in build plugins list");
assertEquals(
"3.11.0", pluginFromBuildList.getVersion(), "Version change should be reflected in build plugins list");
}
@Test
void testPluginConnectionBeforeAndAfterFix() {
// This test demonstrates the difference between the old broken behavior and the new fixed behavior
Model model = new Model();
model.setGroupId("test.group");
model.setArtifactId("test-artifact");
model.setVersion("1.0.0");
Build build = new Build();
model.setBuild(build);
Plugin originalPlugin = new Plugin();
originalPlugin.setGroupId("org.apache.maven.plugins");
originalPlugin.setArtifactId("maven-surefire-plugin");
originalPlugin.setVersion("2.22.2");
build.addPlugin(originalPlugin);
MavenProject project = new MavenProject(model);
// The old broken implementation would have done:
// var plugin = getBuild().getDelegate().getPluginsAsMap().get(pluginKey);
// return plugin != null ? new Plugin(plugin) : null;
// This would create a disconnected Plugin that doesn't persist changes.
// The new fixed implementation does:
// Find the plugin in the connected plugins list
Plugin connectedPlugin = project.getPlugin("org.apache.maven.plugins:maven-surefire-plugin");
assertNotNull(connectedPlugin, "Plugin should be found");
// Test that modifications persist (this would fail with the old implementation)
connectedPlugin.setVersion("3.0.0-M7");
Plugin pluginAfterChange = project.getPlugin("org.apache.maven.plugins:maven-surefire-plugin");
assertEquals(
"3.0.0-M7",
pluginAfterChange.getVersion(),
"Plugin modifications should persist - this proves the fix is working");
}
}
|
PluginConnectionSimpleTest
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/server/handler/DefaultWebFilterChain.java
|
{
"start": 1531,
"end": 3656
}
|
class ____ implements WebFilterChain {
private final List<WebFilter> allFilters;
private final WebHandler handler;
private final @Nullable WebFilter currentFilter;
private final @Nullable DefaultWebFilterChain chain;
/**
* Public constructor with the list of filters and the target handler to use.
* @param handler the target handler
* @param filters the filters ahead of the handler
* @since 5.1
*/
public DefaultWebFilterChain(WebHandler handler, List<WebFilter> filters) {
Assert.notNull(handler, "WebHandler is required");
this.allFilters = Collections.unmodifiableList(filters);
this.handler = handler;
DefaultWebFilterChain chain = initChain(filters, handler);
this.currentFilter = chain.currentFilter;
this.chain = chain.chain;
}
private static DefaultWebFilterChain initChain(List<WebFilter> filters, WebHandler handler) {
DefaultWebFilterChain chain = new DefaultWebFilterChain(filters, handler, null, null);
ListIterator<? extends WebFilter> iterator = filters.listIterator(filters.size());
while (iterator.hasPrevious()) {
chain = new DefaultWebFilterChain(filters, handler, iterator.previous(), chain);
}
return chain;
}
/**
* Private constructor to represent one link in the chain.
*/
private DefaultWebFilterChain(List<WebFilter> allFilters, WebHandler handler,
@Nullable WebFilter currentFilter, @Nullable DefaultWebFilterChain chain) {
this.allFilters = allFilters;
this.currentFilter = currentFilter;
this.handler = handler;
this.chain = chain;
}
public List<WebFilter> getFilters() {
return this.allFilters;
}
public WebHandler getHandler() {
return this.handler;
}
@Override
public Mono<Void> filter(ServerWebExchange exchange) {
return Mono.defer(() ->
this.currentFilter != null && this.chain != null ?
invokeFilter(this.currentFilter, this.chain, exchange) :
this.handler.handle(exchange));
}
private Mono<Void> invokeFilter(WebFilter filter, DefaultWebFilterChain chain, ServerWebExchange exchange) {
return filter.filter(exchange, chain).checkpoint(filter.toString());
}
}
|
DefaultWebFilterChain
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/proxy/SqlStatisticTest.java
|
{
"start": 738,
"end": 1238
}
|
class ____ extends TestCase {
public void test_sql_stat() throws Exception {
JdbcSqlStat stat = new JdbcSqlStat("SELECT * FROM t_user");
assertEquals(null, stat.getExecuteLastStartTime());
assertEquals(null, stat.getExecuteNanoSpanMaxOccurTime());
assertEquals(null, stat.getExecuteErrorLastTime());
stat.error(new Exception());
assertNotNull(stat.getExecuteErrorLast());
assertNotNull(stat.getExecuteErrorLastTime());
}
}
|
SqlStatisticTest
|
java
|
spring-projects__spring-framework
|
spring-websocket/src/main/java/org/springframework/web/socket/server/support/AbstractHandshakeHandler.java
|
{
"start": 2088,
"end": 2753
}
|
class ____ {@link HandshakeHandler} implementations, independent of the Servlet API.
*
* <p>Performs initial validation of the WebSocket handshake request - possibly rejecting it
* through the appropriate HTTP status code - while also allowing its subclasses to override
* various parts of the negotiation process: for example, origin validation, sub-protocol
* negotiation, extensions negotiation, etc.
*
* <p>If the negotiation succeeds, the actual upgrade is delegated to a server-specific
* {@link org.springframework.web.socket.server.RequestUpgradeStrategy}, which will update
* the response as necessary and initialize the WebSocket. As of 7.0, this
|
for
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/formatstring/InlineFormatStringTest.java
|
{
"start": 938,
"end": 1403
}
|
class ____ {
private final BugCheckerRefactoringTestHelper refactoringHelper =
BugCheckerRefactoringTestHelper.newInstance(InlineFormatString.class, getClass());
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(InlineFormatString.class, getClass());
@Test
public void refactoring() {
refactoringHelper
.addInputLines(
"Test.java",
"""
|
InlineFormatStringTest
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java
|
{
"start": 739,
"end": 1583
}
|
class ____ extends AExpression {
private final AExpression childNode;
private final Operation operation;
public EUnary(int identifier, Location location, AExpression childNode, Operation operation) {
super(identifier, location);
this.childNode = Objects.requireNonNull(childNode);
this.operation = Objects.requireNonNull(operation);
}
public AExpression getChildNode() {
return childNode;
}
public Operation getOperation() {
return operation;
}
@Override
public <Scope> void visit(UserTreeVisitor<Scope> userTreeVisitor, Scope scope) {
userTreeVisitor.visitUnary(this, scope);
}
@Override
public <Scope> void visitChildren(UserTreeVisitor<Scope> userTreeVisitor, Scope scope) {
childNode.visit(userTreeVisitor, scope);
}
}
|
EUnary
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/function/json/PostgreSQLJsonArrayAppendFunction.java
|
{
"start": 679,
"end": 3808
}
|
class ____ extends AbstractJsonArrayAppendFunction {
private final boolean supportsLax;
public PostgreSQLJsonArrayAppendFunction(boolean supportsLax, TypeConfiguration typeConfiguration) {
super( typeConfiguration );
this.supportsLax = supportsLax;
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> arguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> translator) {
final Expression json = (Expression) arguments.get( 0 );
final Expression jsonPath = (Expression) arguments.get( 1 );
final SqlAstNode value = arguments.get( 2 );
sqlAppender.appendSql( "(select " );
if ( supportsLax ) {
sqlAppender.appendSql( "jsonb_set_lax" );
}
else {
sqlAppender.appendSql( "case when (t.d)#>t.p is not null then jsonb_set" );
}
sqlAppender.appendSql( "(t.d,t.p,(t.d)#>t.p||" );
if ( value instanceof Literal literal && literal.getLiteralValue() == null ) {
sqlAppender.appendSql( "null::jsonb" );
}
else {
sqlAppender.appendSql( "to_jsonb(" );
value.accept( translator );
if ( value instanceof Literal literal && literal.getJdbcMapping().getJdbcType().isString() ) {
// PostgreSQL until version 16 is not smart enough to infer the type of a string literal
sqlAppender.appendSql( "::text" );
}
sqlAppender.appendSql( ')' );
}
sqlAppender.appendSql( ",false" );
if ( supportsLax ) {
sqlAppender.appendSql( ",'return_target')" );
}
else {
sqlAppender.appendSql( ") else t.d end" );
}
sqlAppender.appendSql( " from (values(" );
final boolean needsCast = !isJsonType( json );
if ( needsCast ) {
sqlAppender.appendSql( "cast(" );
}
json.accept( translator );
if ( needsCast ) {
sqlAppender.appendSql( " as jsonb)" );
}
sqlAppender.appendSql( ',' );
List<JsonPathHelper.JsonPathElement> jsonPathElements =
JsonPathHelper.parseJsonPathElements( translator.getLiteralValue( jsonPath ) );
sqlAppender.appendSql( "array" );
char separator = '[';
for ( JsonPathHelper.JsonPathElement pathElement : jsonPathElements ) {
sqlAppender.appendSql( separator );
if ( pathElement instanceof JsonPathHelper.JsonAttribute attribute ) {
sqlAppender.appendSingleQuoteEscapedString( attribute.attribute() );
}
else if ( pathElement instanceof JsonPathHelper.JsonParameterIndexAccess ) {
final String parameterName = ( (JsonPathHelper.JsonParameterIndexAccess) pathElement ).parameterName();
throw new QueryException( "JSON path [" + jsonPath + "] uses parameter [" + parameterName + "] that is not passed" );
}
else {
sqlAppender.appendSql( '\'' );
sqlAppender.appendSql( ( (JsonPathHelper.JsonIndexAccess) pathElement ).index() + 1 );
sqlAppender.appendSql( '\'' );
}
separator = ',';
}
sqlAppender.appendSql( "]::text[]" );
sqlAppender.appendSql( ")) t(d,p))" );
}
private static boolean isJsonType(Expression expression) {
final JdbcMappingContainer expressionType = expression.getExpressionType();
return expressionType != null && expressionType.getSingleJdbcMapping().getJdbcType().isJson();
}
}
|
PostgreSQLJsonArrayAppendFunction
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/api/common/typeutils/SingleThreadAccessCheckingTypeSerializer.java
|
{
"start": 7594,
"end": 8103
}
|
class ____ implements AutoCloseable {
private final AtomicReference<Thread> currentThreadRef;
private SingleThreadAccessCheck(AtomicReference<Thread> currentThreadRef) {
this.currentThreadRef = currentThreadRef;
}
@Override
public void close() {
assert (currentThreadRef.compareAndSet(Thread.currentThread(), null))
: "The checker has concurrent access from " + currentThreadRef.get();
}
}
}
|
SingleThreadAccessCheck
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ShouldHaveParent.java
|
{
"start": 830,
"end": 2611
}
|
class ____ extends BasicErrorMessageFactory {
private static final String PATH_NO_PARENT = "%nExpecting path%n %s%nto have parent:%n %s%nbut did not have one.";
private static final String PATH_NOT_EXPECTED_PARENT = "%nExpecting path%n %s%nto have parent:%n %s%nbut had:%n %s.";
private static final String FILE_NO_PARENT = "%nExpecting file%n %s%nto have parent:%n %s%nbut did not have one.";
private static final String FILE_NOT_EXPECTED_PARENT = "%nExpecting file%n %s%nto have parent:%n %s%nbut had:%n %s.";
public static ShouldHaveParent shouldHaveParent(File actual, File expected) {
return actual.getParentFile() == null ? new ShouldHaveParent(actual, expected)
: new ShouldHaveParent(actual,
actual.getParentFile(), expected);
}
public static ShouldHaveParent shouldHaveParent(Path actual, Path expected) {
final Path actualParent = actual.getParent();
return actualParent == null
? new ShouldHaveParent(actual, expected)
: new ShouldHaveParent(actual, actualParent, expected);
}
public static ShouldHaveParent shouldHaveParent(Path actual, Path actualParent, Path expected) {
return new ShouldHaveParent(actual, actualParent, expected);
}
private ShouldHaveParent(File actual, File expected) {
super(FILE_NO_PARENT, actual, expected);
}
private ShouldHaveParent(File actual, File actualParent, File expected) {
super(FILE_NOT_EXPECTED_PARENT, actual, expected, actualParent);
}
private ShouldHaveParent(Path actual, Path expected) {
super(PATH_NO_PARENT, actual, expected);
}
private ShouldHaveParent(Path actual, Path actualParent, Path expected) {
super(PATH_NOT_EXPECTED_PARENT, actual, expected, actualParent);
}
}
|
ShouldHaveParent
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/error/future/ShouldBeCompleted_create_Test.java
|
{
"start": 1038,
"end": 1526
}
|
class ____ {
@Test
void should_create_error_message() {
// WHEN
String error = shouldBeCompleted(new CompletableFuture<>()).create(new TestDescription("TEST"));
// THEN
then(error).isEqualTo(format("[TEST] %n" +
"Expecting%n" +
" <CompletableFuture[Incomplete]>%n" +
"to be completed.%n%s",
WARNING));
}
}
|
ShouldBeCompleted_create_Test
|
java
|
quarkusio__quarkus
|
extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/webjar/WebJarResourcesFilter.java
|
{
"start": 651,
"end": 1323
}
|
class ____ implements Closeable {
private final InputStream stream;
private final boolean changed;
public FilterResult(InputStream stream, boolean changed) {
this.stream = stream;
this.changed = changed;
}
public InputStream getStream() {
return stream;
}
public boolean isChanged() {
return changed;
}
public boolean hasStream() {
return stream != null;
}
@Override
public void close() throws IOException {
if (hasStream()) {
stream.close();
}
}
}
}
|
FilterResult
|
java
|
grpc__grpc-java
|
alts/src/main/java/io/grpc/alts/internal/TsiPeer.java
|
{
"start": 2198,
"end": 2454
}
|
class ____ extends Property<Long> {
public SignedInt64Property(@Nonnull String name, @Nonnull Long value) {
super(name, value);
}
}
/** A peer property corresponding to an unsigned 64-bit integer. */
public static final
|
SignedInt64Property
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configuration/WebSecurityConfigurationTests.java
|
{
"start": 27243,
"end": 27616
}
|
class ____ {
@Order(1)
@Bean
public WebSecurityCustomizer webSecurityCustomizer1() {
return (web) -> web.ignoring().requestMatchers("/ignore1");
}
@Order(2)
@Bean
public WebSecurityCustomizer webSecurityCustomizer2() {
return (web) -> web.ignoring().requestMatchers("/ignore2");
}
}
@Configuration
@EnableWebSecurity
static
|
OrderedCustomizerConfig
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/common/serialization/SerializerConfigImplTest.java
|
{
"start": 10117,
"end": 11514
}
|
class ____.apache.flink.api.common.serialization.SerializerConfigImplTest");
}
@Test
void testCopyDefaultSerializationConfig() {
SerializerConfig config = new SerializerConfigImpl();
Configuration configuration = new Configuration();
config.configure(configuration, SerializerConfigImplTest.class.getClassLoader());
assertThat(config.copy()).isEqualTo(config);
}
@Test
void testCopySerializerConfig() {
SerializerConfigImpl serializerConfig = new SerializerConfigImpl();
Configuration configuration = new Configuration();
serializerConfig.configure(configuration, SerializerConfigImplTest.class.getClassLoader());
serializerConfig
.getDefaultKryoSerializerClasses()
.forEach(serializerConfig::registerTypeWithKryoSerializer);
assertThat(serializerConfig.copy()).isEqualTo(serializerConfig);
}
private SerializerConfig getConfiguredSerializerConfig(String serializationConfigStr) {
Configuration configuration = new Configuration();
configuration.setString(SERIALIZATION_CONFIG.key(), serializationConfigStr);
SerializerConfig serializerConfig = new SerializerConfigImpl();
serializerConfig.configure(configuration, Thread.currentThread().getContextClassLoader());
return serializerConfig;
}
private static
|
org
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/over/RowTimeRangeBoundedPrecedingFunction.java
|
{
"start": 2455,
"end": 12578
}
|
class ____<K>
extends KeyedProcessFunction<K, RowData, RowData> {
private static final long serialVersionUID = 1L;
private static final Logger LOG =
LoggerFactory.getLogger(RowTimeRangeBoundedPrecedingFunction.class);
private final GeneratedAggsHandleFunction genAggsHandler;
private final LogicalType[] accTypes;
private final LogicalType[] inputFieldTypes;
private final long precedingOffset;
private final int rowTimeIdx;
private transient JoinedRowData output;
// the state which keeps the last triggering timestamp
private transient ValueState<Long> lastTriggeringTsState;
// the state which used to materialize the accumulator for incremental calculation
private transient ValueState<RowData> accState;
// the state which keeps the safe timestamp to cleanup states
private transient ValueState<Long> cleanupTsState;
// the state which keeps all the data that are not expired.
// The first element (as the mapState key) of the tuple is the time stamp. Per each time stamp,
// the second element of tuple is a list that contains the entire data of all the rows belonging
// to this time stamp.
private transient MapState<Long, List<RowData>> inputState;
private transient AggsHandleFunction function;
// ------------------------------------------------------------------------
// Metrics
// ------------------------------------------------------------------------
private static final String LATE_ELEMENTS_DROPPED_METRIC_NAME = "numLateRecordsDropped";
private transient Counter numLateRecordsDropped;
@VisibleForTesting
protected Counter getCounter() {
return numLateRecordsDropped;
}
public RowTimeRangeBoundedPrecedingFunction(
GeneratedAggsHandleFunction genAggsHandler,
LogicalType[] accTypes,
LogicalType[] inputFieldTypes,
long precedingOffset,
int rowTimeIdx) {
Preconditions.checkNotNull(precedingOffset);
this.genAggsHandler = genAggsHandler;
this.accTypes = accTypes;
this.inputFieldTypes = inputFieldTypes;
this.precedingOffset = precedingOffset;
this.rowTimeIdx = rowTimeIdx;
}
@Override
public void open(OpenContext openContext) throws Exception {
function = genAggsHandler.newInstance(getRuntimeContext().getUserCodeClassLoader());
function.open(new PerKeyStateDataViewStore(getRuntimeContext()));
output = new JoinedRowData();
ValueStateDescriptor<Long> lastTriggeringTsDescriptor =
new ValueStateDescriptor<Long>("lastTriggeringTsState", Types.LONG);
lastTriggeringTsState = getRuntimeContext().getState(lastTriggeringTsDescriptor);
InternalTypeInfo<RowData> accTypeInfo = InternalTypeInfo.ofFields(accTypes);
ValueStateDescriptor<RowData> accStateDesc =
new ValueStateDescriptor<RowData>("accState", accTypeInfo);
accState = getRuntimeContext().getState(accStateDesc);
// input element are all binary row as they are came from network
InternalTypeInfo<RowData> inputType = InternalTypeInfo.ofFields(inputFieldTypes);
ListTypeInfo<RowData> rowListTypeInfo = new ListTypeInfo<RowData>(inputType);
MapStateDescriptor<Long, List<RowData>> inputStateDesc =
new MapStateDescriptor<Long, List<RowData>>(
"inputState", Types.LONG, rowListTypeInfo);
inputState = getRuntimeContext().getMapState(inputStateDesc);
ValueStateDescriptor<Long> cleanupTsStateDescriptor =
new ValueStateDescriptor<>("cleanupTsState", Types.LONG);
this.cleanupTsState = getRuntimeContext().getState(cleanupTsStateDescriptor);
// metrics
this.numLateRecordsDropped =
getRuntimeContext().getMetricGroup().counter(LATE_ELEMENTS_DROPPED_METRIC_NAME);
}
@Override
public void processElement(
RowData input,
KeyedProcessFunction<K, RowData, RowData>.Context ctx,
Collector<RowData> out)
throws Exception {
// triggering timestamp for trigger calculation
long triggeringTs = input.getLong(rowTimeIdx);
Long lastTriggeringTs = lastTriggeringTsState.value();
if (lastTriggeringTs == null) {
lastTriggeringTs = 0L;
}
// check if the data is expired, if not, save the data and register event time timer
if (triggeringTs > lastTriggeringTs) {
List<RowData> data = inputState.get(triggeringTs);
if (null != data) {
data.add(input);
inputState.put(triggeringTs, data);
} else {
data = new ArrayList<RowData>();
data.add(input);
inputState.put(triggeringTs, data);
// register event time timer
ctx.timerService().registerEventTimeTimer(triggeringTs);
}
registerCleanupTimer(ctx, triggeringTs);
} else {
numLateRecordsDropped.inc();
}
}
private void registerCleanupTimer(
KeyedProcessFunction<K, RowData, RowData>.Context ctx, long timestamp)
throws Exception {
// calculate safe timestamp to cleanup states
long minCleanupTimestamp = timestamp + precedingOffset + 1;
long maxCleanupTimestamp = timestamp + (long) (precedingOffset * 1.5) + 1;
// update timestamp and register timer if needed
Long curCleanupTimestamp = cleanupTsState.value();
if (curCleanupTimestamp == null || curCleanupTimestamp < minCleanupTimestamp) {
// we don't delete existing timer since it may delete timer for data processing
// TODO Use timer with namespace to distinguish timers
ctx.timerService().registerEventTimeTimer(maxCleanupTimestamp);
cleanupTsState.update(maxCleanupTimestamp);
}
}
@Override
public void onTimer(
long timestamp,
KeyedProcessFunction<K, RowData, RowData>.OnTimerContext ctx,
Collector<RowData> out)
throws Exception {
Long cleanupTimestamp = cleanupTsState.value();
// if cleanupTsState has not been updated then it is safe to cleanup states
if (cleanupTimestamp != null && cleanupTimestamp <= timestamp) {
inputState.clear();
accState.clear();
lastTriggeringTsState.clear();
cleanupTsState.clear();
function.cleanup();
return;
}
// gets all window data from state for the calculation
List<RowData> inputs = inputState.get(timestamp);
if (null != inputs) {
int dataListIndex = 0;
RowData accumulators = accState.value();
// initialize when first run or failover recovery per key
if (null == accumulators) {
accumulators = function.createAccumulators();
}
// set accumulators in context first
function.setAccumulators(accumulators);
// keep up timestamps of retract data
List<Long> retractTsList = new ArrayList<Long>();
// do retraction
Iterator<Map.Entry<Long, List<RowData>>> iter = inputState.entries().iterator();
while (iter.hasNext()) {
Map.Entry<Long, List<RowData>> data = iter.next();
Long dataTs = data.getKey();
Long offset = timestamp - dataTs;
if (offset > precedingOffset) {
List<RowData> retractDataList = data.getValue();
if (retractDataList != null) {
dataListIndex = 0;
while (dataListIndex < retractDataList.size()) {
RowData retractRow = retractDataList.get(dataListIndex);
function.retract(retractRow);
dataListIndex += 1;
}
retractTsList.add(dataTs);
} else {
// Does not retract values which are outside of window if the state is
// cleared already.
LOG.warn(
"The state is cleared because of state ttl. "
+ "This will result in incorrect result. "
+ "You can increase the state ttl to avoid this.");
}
}
}
// do accumulation
dataListIndex = 0;
while (dataListIndex < inputs.size()) {
RowData curRow = inputs.get(dataListIndex);
// accumulate current row
function.accumulate(curRow);
dataListIndex += 1;
}
// get aggregate result
RowData aggValue = function.getValue();
// copy forwarded fields to output row and emit output row
dataListIndex = 0;
while (dataListIndex < inputs.size()) {
RowData curRow = inputs.get(dataListIndex);
output.replace(curRow, aggValue);
out.collect(output);
dataListIndex += 1;
}
// remove the data that has been retracted
dataListIndex = 0;
while (dataListIndex < retractTsList.size()) {
inputState.remove(retractTsList.get(dataListIndex));
dataListIndex += 1;
}
// update the value of accumulators for future incremental computation
accumulators = function.getAccumulators();
accState.update(accumulators);
}
lastTriggeringTsState.update(timestamp);
}
@Override
public void close() throws Exception {
if (null != function) {
function.close();
}
}
}
|
RowTimeRangeBoundedPrecedingFunction
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/annotation/ProfileValueUtilsTests.java
|
{
"start": 11596,
"end": 11713
}
|
class ____
implements CustomProfileValueSourceTestInterface {
}
}
|
EnabledWithCustomProfileValueSourceOnTestInterface
|
java
|
spring-projects__spring-boot
|
module/spring-boot-devtools/src/test/java/org/springframework/boot/devtools/test/MockClientHttpRequestFactory.java
|
{
"start": 2591,
"end": 3220
}
|
class ____ extends MockClientHttpRequest {
MockRequest(URI uri, HttpMethod httpMethod) {
super(httpMethod, uri);
}
@Override
protected ClientHttpResponse executeInternal() throws IOException {
MockClientHttpRequestFactory.this.executedRequests.add(this);
Object response = MockClientHttpRequestFactory.this.responses.pollFirst();
if (response instanceof IOException ioException) {
throw ioException;
}
if (response == null) {
response = new Response(0, null, HttpStatus.GONE);
}
return ((Response) response).asHttpResponse(MockClientHttpRequestFactory.this.seq);
}
}
static
|
MockRequest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-micrometer-tracing-brave/src/test/java/org/springframework/boot/micrometer/tracing/brave/autoconfigure/CompositePropagationFactoryTests.java
|
{
"start": 1322,
"end": 2577
}
|
class ____ {
@Test
void supportsJoin() {
Propagation.Factory supported = Mockito.mock(Propagation.Factory.class);
given(supported.supportsJoin()).willReturn(true);
given(supported.get()).willReturn(new DummyPropagation("a"));
Propagation.Factory unsupported = Mockito.mock(Propagation.Factory.class);
given(unsupported.supportsJoin()).willReturn(false);
given(unsupported.get()).willReturn(new DummyPropagation("a"));
CompositePropagationFactory factory = new CompositePropagationFactory(List.of(supported), List.of(unsupported));
assertThat(factory.supportsJoin()).isFalse();
}
@Test
void requires128BitTraceId() {
Propagation.Factory required = Mockito.mock(Propagation.Factory.class);
given(required.requires128BitTraceId()).willReturn(true);
given(required.get()).willReturn(new DummyPropagation("a"));
Propagation.Factory notRequired = Mockito.mock(Propagation.Factory.class);
given(notRequired.requires128BitTraceId()).willReturn(false);
given(notRequired.get()).willReturn(new DummyPropagation("a"));
CompositePropagationFactory factory = new CompositePropagationFactory(List.of(required), List.of(notRequired));
assertThat(factory.requires128BitTraceId()).isTrue();
}
@Nested
|
CompositePropagationFactoryTests
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/security/token/ExceptionThrowingDelegationTokenReceiver.java
|
{
"start": 1096,
"end": 2457
}
|
class ____ implements DelegationTokenReceiver {
public static volatile ThreadLocal<Boolean> throwInInit =
ThreadLocal.withInitial(() -> Boolean.FALSE);
public static volatile ThreadLocal<Boolean> throwInUsage =
ThreadLocal.withInitial(() -> Boolean.FALSE);
public static volatile ThreadLocal<Boolean> constructed =
ThreadLocal.withInitial(() -> Boolean.FALSE);
public static volatile ThreadLocal<Integer> onNewTokensObtainedCallCount =
ThreadLocal.withInitial(() -> 0);
public static void reset() {
throwInInit.set(false);
throwInUsage.set(false);
constructed.set(false);
onNewTokensObtainedCallCount.set(0);
}
public ExceptionThrowingDelegationTokenReceiver() {
constructed.set(true);
}
@Override
public String serviceName() {
return "throw";
}
@Override
public void init(Configuration configuration) {
if (throwInInit.get()) {
throw new IllegalArgumentException();
}
}
@Override
public void onNewTokensObtained(byte[] tokens) throws Exception {
if (throwInUsage.get()) {
throw new IllegalArgumentException();
}
onNewTokensObtainedCallCount.set(onNewTokensObtainedCallCount.get() + 1);
}
}
|
ExceptionThrowingDelegationTokenReceiver
|
java
|
quarkusio__quarkus
|
extensions/hibernate-search-orm-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/orm/elasticsearch/runtime/HibernateSearchElasticsearchRuntimeConfigPersistenceUnit.java
|
{
"start": 6666,
"end": 7650
}
|
interface ____ {
/**
* Configuration for synchronization with the index when indexing automatically.
*
* @deprecated Use {@code quarkus.hibernate-search-orm.indexing.plan.synchronization.strategy} instead.
*/
AutomaticIndexingSynchronizationConfig synchronization();
/**
* Whether to check if dirty properties are relevant to indexing before actually reindexing an entity.
* <p>
* When enabled, re-indexing of an entity is skipped if the only changes are on properties that are not used when
* indexing.
*
* @deprecated This property is deprecated with no alternative to replace it.
* In the future, a dirty check will always be performed when considering whether to trigger reindexing.
*/
@WithDefault("true")
@Deprecated
boolean enableDirtyCheck();
}
@ConfigGroup
@Deprecated
|
AutomaticIndexingConfig
|
java
|
google__guava
|
guava-testlib/test/com/google/common/testing/NullPointerTesterTest.java
|
{
"start": 37249,
"end": 37808
}
|
class ____ extends DefaultValueChecker {
@SuppressWarnings("unused") // called by NullPointerTester
@Keep
public void checkArray(@SuppressWarnings("rawtypes") Class cls, String s) {
calledWith(cls, s);
}
void check() {
runTester();
Class<?> defaultClass = (Class<?>) getDefaultParameterValue(0);
assertEquals(Object.class, defaultClass);
}
}
public void testNonGenericClassDefaultValue() {
new NonGenericClassTypeDefaultValueChecker().check();
}
private static
|
NonGenericClassTypeDefaultValueChecker
|
java
|
google__guice
|
core/src/com/google/inject/internal/InjectionRequestProcessor.java
|
{
"start": 3576,
"end": 6521
}
|
class ____ {
final InjectorImpl injector;
final Object source;
final StaticInjectionRequest request;
ImmutableList<SingleMemberInjector> memberInjectors;
public StaticInjection(InjectorImpl injector, StaticInjectionRequest request) {
this.injector = injector;
this.source = request.getSource();
this.request = request;
}
void validate() {
Errors errorsForMember = errors.withSource(source);
Set<InjectionPoint> injectionPoints;
try {
injectionPoints = request.getInjectionPoints();
} catch (ConfigurationException e) {
errorsForMember.merge(e.getErrorMessages());
injectionPoints = e.getPartialValue();
}
if (injectionPoints != null) {
memberInjectors =
injector.membersInjectorStore.getInjectors(injectionPoints, errorsForMember);
} else {
memberInjectors = ImmutableList.of();
}
errors.merge(errorsForMember);
}
void injectMembers() {
InternalContext context = injector.enterContext();
try {
boolean isStageTool = injector.options.stage == Stage.TOOL;
for (SingleMemberInjector memberInjector : memberInjectors) {
// Run injections if we're not in tool stage (ie, PRODUCTION or DEV),
// or if we are in tool stage and the injection point is toolable.
if (!isStageTool || memberInjector.getInjectionPoint().isToolable()) {
if (InternalFlags.getUseMethodHandlesOption()) {
try {
// In theory, constructing the handle to invoke it exactly once is expensive and
// wasteful, and it is true for
// directly injecting the member this is probably slower than the reflective
// SingleFieldInjector. However,
// by taking this path we::
// 1. Don't need to construct fastclasses for method injections
// (SingleMethodInjector).
// 2. construct fast classes for transitive injections (constructors/@Provides
// methods).
// 3. Can leverage or initialize caches for transitive InternalFactory.getHandle
// calls.
memberInjector
.getInjectHandle(new LinkageContext())
.invokeExact((Object) null, context);
} catch (InternalProvisionException e) {
errors.merge(e);
} catch (Throwable t) {
// This will propagate unexpected Errors.
throw InternalMethodHandles.sneakyThrow(t);
}
} else {
try {
memberInjector.inject(context, null);
} catch (InternalProvisionException e) {
errors.merge(e);
}
}
}
}
} finally {
context.close();
}
}
}
}
|
StaticInjection
|
java
|
elastic__elasticsearch
|
modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4WriteThrottlingHandlerTests.java
|
{
"start": 1815,
"end": 10644
}
|
class ____ extends ESTestCase {
private ThreadWatchdog threadWatchdog = new ThreadWatchdog();
@Before
public void setFakeThreadName() {
// These tests interact with EmbeddedChannel instances directly on the test thread, so we rename it temporarily to satisfy checks
// that we're running on a transport thread
Thread.currentThread().setName(Transports.TEST_MOCK_TRANSPORT_THREAD_PREFIX + Thread.currentThread().getName());
}
@After
public void resetThreadName() {
final var threadName = Thread.currentThread().getName();
assertThat(threadName, startsWith(Transports.TEST_MOCK_TRANSPORT_THREAD_PREFIX));
Thread.currentThread().setName(threadName.substring(Transports.TEST_MOCK_TRANSPORT_THREAD_PREFIX.length()));
}
public void testThrottlesLargeMessage() {
final List<ByteBuf> seen = new CopyOnWriteArrayList<>();
final CapturingHandler capturingHandler = new CapturingHandler(seen);
final EmbeddedChannel embeddedChannel = new EmbeddedChannel(
capturingHandler,
new Netty4WriteThrottlingHandler(new ThreadContext(Settings.EMPTY), threadWatchdog.getActivityTrackerForCurrentThread())
);
// we assume that the channel outbound buffer is smaller than Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE
final int writeableBytes = Math.toIntExact(embeddedChannel.bytesBeforeUnwritable());
assertThat(writeableBytes, lessThan(Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE));
final int fullSizeChunks = randomIntBetween(2, 10);
final int extraChunkSize = randomIntBetween(0, 10);
final byte[] messageBytes = randomByteArrayOfLength(
Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE * fullSizeChunks + extraChunkSize
);
final Object message = wrapAsNettyOrEsBuffer(messageBytes);
final ChannelPromise promise = embeddedChannel.newPromise();
embeddedChannel.write(message, promise);
assertThat(seen, hasSize(1));
assertSliceEquals(seen.get(0), message, 0, Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE);
assertFalse(promise.isDone());
embeddedChannel.flush();
assertTrue(promise.isDone());
assertThat(seen, hasSize(fullSizeChunks + (extraChunkSize == 0 ? 0 : 1)));
assertTrue(capturingHandler.didWriteAfterThrottled);
if (extraChunkSize != 0) {
assertSliceEquals(
seen.get(seen.size() - 1),
message,
Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE * fullSizeChunks,
extraChunkSize
);
}
}
public void testThrottleLargeCompositeMessage() {
final List<ByteBuf> seen = new CopyOnWriteArrayList<>();
final CapturingHandler capturingHandler = new CapturingHandler(seen);
final EmbeddedChannel embeddedChannel = new EmbeddedChannel(
capturingHandler,
new Netty4WriteThrottlingHandler(new ThreadContext(Settings.EMPTY), threadWatchdog.getActivityTrackerForCurrentThread())
);
// we assume that the channel outbound buffer is smaller than Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE
final int writeableBytes = Math.toIntExact(embeddedChannel.bytesBeforeUnwritable());
assertThat(writeableBytes, lessThan(Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE));
final int fullSizeChunks = randomIntBetween(2, 10);
final int extraChunkSize = randomIntBetween(0, 10);
final byte[] messageBytes = randomByteArrayOfLength(
Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE * fullSizeChunks + extraChunkSize
);
int splitOffset = randomIntBetween(0, messageBytes.length);
int lastChunkSizeOfTheFirstSplit = splitOffset % Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE;
final BytesReference message = CompositeBytesReference.of(
new BytesArray(messageBytes, 0, splitOffset),
new BytesArray(messageBytes, splitOffset, messageBytes.length - splitOffset)
);
final ChannelPromise promise = embeddedChannel.newPromise();
embeddedChannel.write(message, promise);
assertThat(seen, hasSize(oneOf(1, 2)));
assertSliceEquals(seen.get(0), message, 0, seen.get(0).readableBytes());
assertFalse(promise.isDone());
embeddedChannel.flush();
assertTrue(promise.isDone());
// If the extra chunk size is greater than the last chunk size for the first half of the split, it means we will need to send
// (extraChunkSize - lastChunkSizeOfTheFirstSplit) bytes as the very last chunk of the entire message.
assertThat(seen, hasSize(oneOf(fullSizeChunks, fullSizeChunks + 1 + (extraChunkSize > lastChunkSizeOfTheFirstSplit ? 1 : 0))));
assertTrue(capturingHandler.didWriteAfterThrottled);
assertBufferEquals(Unpooled.compositeBuffer().addComponents(true, seen), message);
}
public void testPassesSmallMessageDirectly() {
final List<ByteBuf> seen = new CopyOnWriteArrayList<>();
final CapturingHandler capturingHandler = new CapturingHandler(seen);
final EmbeddedChannel embeddedChannel = new EmbeddedChannel(
capturingHandler,
new Netty4WriteThrottlingHandler(new ThreadContext(Settings.EMPTY), threadWatchdog.getActivityTrackerForCurrentThread())
);
final int writeableBytes = Math.toIntExact(embeddedChannel.bytesBeforeUnwritable());
assertThat(writeableBytes, lessThan(Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE));
final byte[] messageBytes = randomByteArrayOfLength(randomIntBetween(0, Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE));
final Object message = wrapAsNettyOrEsBuffer(messageBytes);
final ChannelPromise promise = embeddedChannel.newPromise();
embeddedChannel.write(message, promise);
assertThat(seen, hasSize(1)); // first message should be passed through straight away
assertBufferEquals(seen.get(0), message);
assertFalse(promise.isDone());
embeddedChannel.flush();
assertTrue(promise.isDone());
assertThat(seen, hasSize(1));
assertFalse(capturingHandler.didWriteAfterThrottled);
}
public void testThrottlesOnUnwritable() {
final List<ByteBuf> seen = new CopyOnWriteArrayList<>();
final EmbeddedChannel embeddedChannel = new EmbeddedChannel(
new CapturingHandler(seen),
new Netty4WriteThrottlingHandler(new ThreadContext(Settings.EMPTY), threadWatchdog.getActivityTrackerForCurrentThread())
);
final int writeableBytes = Math.toIntExact(embeddedChannel.bytesBeforeUnwritable());
assertThat(writeableBytes, lessThan(Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE));
final byte[] messageBytes = randomByteArrayOfLength(writeableBytes + randomIntBetween(0, 10));
final Object message = wrapAsNettyOrEsBuffer(messageBytes);
final ChannelPromise promise = embeddedChannel.newPromise();
embeddedChannel.write(message, promise);
assertThat(seen, hasSize(1)); // first message should be passed through straight away
assertBufferEquals(seen.get(0), message);
assertFalse(promise.isDone());
final Object messageToQueue = wrapAsNettyOrEsBuffer(
randomByteArrayOfLength(randomIntBetween(0, Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE))
);
final ChannelPromise promiseForQueued = embeddedChannel.newPromise();
embeddedChannel.write(messageToQueue, promiseForQueued);
assertThat(seen, hasSize(1));
assertFalse(promiseForQueued.isDone());
assertFalse(promise.isDone());
embeddedChannel.flush();
assertTrue(promise.isDone());
assertTrue(promiseForQueued.isDone());
}
private static void assertBufferEquals(ByteBuf expected, Object message) {
if (message instanceof ByteBuf buf) {
assertSame(expected, buf);
} else {
assertEquals(expected, Netty4Utils.toByteBuf(asInstanceOf(BytesReference.class, message)));
}
}
private static void assertSliceEquals(ByteBuf expected, Object message, int index, int length) {
assertEquals(
(message instanceof ByteBuf buf ? buf : Netty4Utils.toByteBuf(asInstanceOf(BytesReference.class, message))).slice(
index,
length
),
expected
);
}
private static Object wrapAsNettyOrEsBuffer(byte[] messageBytes) {
if (randomBoolean()) {
return Unpooled.wrappedBuffer(messageBytes);
}
return new BytesArray(messageBytes);
}
private
|
Netty4WriteThrottlingHandlerTests
|
java
|
quarkusio__quarkus
|
extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/runtime/MicrometerCounterInterceptorTest.java
|
{
"start": 852,
"end": 11428
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withConfigurationResource("test-logging.properties")
.overrideConfigKey("quarkus.micrometer.binder.mp-metrics.enabled", "false")
.overrideConfigKey("quarkus.micrometer.binder.vertx.enabled", "false")
.overrideConfigKey("quarkus.micrometer.registry-enabled-default", "false")
.overrideConfigKey("quarkus.redis.devservices.enabled", "false")
.withApplicationRoot((jar) -> jar
.addClass(TestValueResolver.class)
.addClass(CountedResource.class)
.addClass(TimedResource.class)
.addClass(GuardedResult.class));
@Inject
MeterRegistry registry;
@Inject
CountedResource counted;
@BeforeAll
static void addSimpleRegistry() {
Metrics.globalRegistry.add(new SimpleMeterRegistry());
}
@Test
void testCountFailuresOnly_NoMetricsOnSuccess() {
counted.onlyCountFailures();
Assertions.assertThrows(MeterNotFoundException.class, () -> registry.get("metric.none").counter());
}
@Test
void testCountAllMetrics_MetricsOnSuccess() {
counted.countAllInvocations(false);
Counter counter = registry.get("metric.all")
.tag("method", "countAllInvocations")
.tag("class", "io.quarkus.micrometer.test.CountedResource")
.tag("extra", "tag")
.tag("do_fail", "prefix_false")
.tag("exception", "none")
.tag("result", "success").counter();
Assertions.assertNotNull(counter);
Assertions.assertEquals(1, counter.count());
}
@Test
void testCountAllMetrics_MetricsOnFailure() {
Assertions.assertThrows(NullPointerException.class, () -> counted.countAllInvocations(true));
Counter counter = registry.get("metric.all")
.tag("method", "countAllInvocations")
.tag("class", "io.quarkus.micrometer.test.CountedResource")
.tag("extra", "tag")
.tag("do_fail", "prefix_true")
.tag("exception", "NullPointerException")
.tag("result", "failure").counter();
Assertions.assertNotNull(counter);
Assertions.assertEquals(1, counter.count());
Assertions.assertNull(counter.getId().getDescription());
}
@Test
void testCountEmptyMetricName_Success() {
counted.emptyMetricName(false);
Counter counter = registry.get("method.counted")
.tag("method", "emptyMetricName")
.tag("class", "io.quarkus.micrometer.test.CountedResource")
.tag("exception", "none")
.tag("fail", "false")
.tag("result", "success").counter();
Assertions.assertNotNull(counter);
Assertions.assertEquals(1, counter.count());
Assertions.assertEquals("nice description", counter.getId().getDescription());
}
@Test
void testCountEmptyMetricName_Failure() {
Assertions.assertThrows(NullPointerException.class, () -> counted.emptyMetricName(true));
Counter counter = registry.get("method.counted")
.tag("method", "emptyMetricName")
.tag("class", "io.quarkus.micrometer.test.CountedResource")
.tag("exception", "NullPointerException")
.tag("fail", "true")
.tag("result", "failure").counter();
Assertions.assertNotNull(counter);
Assertions.assertEquals(1, counter.count());
}
@Test
void testCountAsyncFailuresOnly_NoMetricsOnSuccess() {
GuardedResult guardedResult = new GuardedResult();
CompletableFuture<?> completableFuture = counted.onlyCountAsyncFailures(guardedResult);
guardedResult.complete();
completableFuture.join();
Assertions.assertThrows(MeterNotFoundException.class, () -> registry.get("async.none").counter());
}
@Test
void testCountAsyncAllMetrics_MetricsOnSuccess() {
GuardedResult guardedResult = new GuardedResult();
CompletableFuture<?> completableFuture = counted.countAllAsyncInvocations(guardedResult);
guardedResult.complete();
completableFuture.join();
Counter counter = registry.get("async.all")
.tag("method", "countAllAsyncInvocations")
.tag("class", "io.quarkus.micrometer.test.CountedResource")
.tag("extra", "tag")
.tag("exception", "none")
.tag("result", "success").counter();
Assertions.assertNotNull(counter);
Assertions.assertEquals(1, counter.count());
}
@Test
void testCountAsyncAllMetrics_MetricsOnFailure() {
GuardedResult guardedResult = new GuardedResult();
CompletableFuture<?> completableFuture = counted.countAllAsyncInvocations(guardedResult);
guardedResult.complete(new NullPointerException());
Assertions.assertThrows(java.util.concurrent.CompletionException.class, () -> completableFuture.join());
Counter counter = registry.get("async.all")
.tag("method", "countAllAsyncInvocations")
.tag("class", "io.quarkus.micrometer.test.CountedResource")
.tag("extra", "tag")
.tag("exception", "NullPointerException")
.tag("result", "failure").counter();
Assertions.assertNotNull(counter);
Assertions.assertEquals(1, counter.count());
Assertions.assertNull(counter.getId().getDescription());
}
@Test
void testCountAsyncEmptyMetricName_Success() {
GuardedResult guardedResult = new GuardedResult();
CompletableFuture<?> completableFuture = counted.emptyAsyncMetricName(guardedResult);
guardedResult.complete();
completableFuture.join();
Counter counter = registry.get("method.counted")
.tag("method", "emptyAsyncMetricName")
.tag("class", "io.quarkus.micrometer.test.CountedResource")
.tag("exception", "none")
.tag("result", "success").counter();
Assertions.assertNotNull(counter);
Assertions.assertEquals(1, counter.count());
}
@Test
void testCountAsyncEmptyMetricName_Failure() {
GuardedResult guardedResult = new GuardedResult();
CompletableFuture<?> completableFuture = counted.emptyAsyncMetricName(guardedResult);
guardedResult.complete(new NullPointerException());
Assertions.assertThrows(java.util.concurrent.CompletionException.class, () -> completableFuture.join());
Counter counter = registry.get("method.counted")
.tag("method", "emptyMetricName")
.tag("class", "io.quarkus.micrometer.test.CountedResource")
.tag("exception", "NullPointerException")
.tag("result", "failure").counter();
Assertions.assertNotNull(counter);
Assertions.assertEquals(1, counter.count());
}
@Test
void testCountUniFailuresOnly_NoMetricsOnSuccess() {
GuardedResult guardedResult = new GuardedResult();
Uni<?> uni = counted.onlyCountUniFailures(guardedResult);
guardedResult.complete();
uni.subscribe().asCompletionStage().join();
Assertions.assertThrows(MeterNotFoundException.class, () -> registry.get("uni.none").counter());
}
@Test
void testCountUniAllMetrics_MetricsOnSuccess() {
GuardedResult guardedResult = new GuardedResult();
Uni<?> uni = counted.countAllUniInvocations(guardedResult);
guardedResult.complete();
uni.subscribe().asCompletionStage().join();
Counter counter = registry.get("uni.all")
.tag("method", "countAllUniInvocations")
.tag("class", "io.quarkus.micrometer.test.CountedResource")
.tag("extra", "tag")
.tag("exception", "none")
.tag("result", "success").counter();
Assertions.assertNotNull(counter);
Assertions.assertEquals(1, counter.count());
}
@Test
void testCountUniAllMetrics_MetricsOnFailure() {
GuardedResult guardedResult = new GuardedResult();
Uni<?> uni = counted.countAllUniInvocations(guardedResult);
guardedResult.complete(new NullPointerException());
Assertions.assertThrows(java.util.concurrent.CompletionException.class,
() -> uni.subscribe().asCompletionStage().join());
Counter counter = registry.get("uni.all")
.tag("method", "countAllUniInvocations")
.tag("class", "io.quarkus.micrometer.test.CountedResource")
.tag("extra", "tag")
.tag("exception", "NullPointerException")
.tag("result", "failure").counter();
Assertions.assertNotNull(counter);
Assertions.assertEquals(1, counter.count());
Assertions.assertNull(counter.getId().getDescription());
}
@Test
void testCountUniEmptyMetricName_Success() {
GuardedResult guardedResult = new GuardedResult();
Uni<?> uni = counted.emptyUniMetricName(guardedResult);
guardedResult.complete();
uni.subscribe().asCompletionStage().join();
Counter counter = registry.get("method.counted")
.tag("method", "emptyUniMetricName")
.tag("class", "io.quarkus.micrometer.test.CountedResource")
.tag("exception", "none")
.tag("result", "success").counter();
Assertions.assertNotNull(counter);
Assertions.assertEquals(1, counter.count());
}
@Test
void testCountUniEmptyMetricName_Failure() {
GuardedResult guardedResult = new GuardedResult();
Uni<?> uni = counted.emptyUniMetricName(guardedResult);
guardedResult.complete(new NullPointerException());
Assertions.assertThrows(java.util.concurrent.CompletionException.class,
() -> uni.subscribe().asCompletionStage().join());
Counter counter = registry.get("method.counted")
.tag("method", "emptyMetricName")
.tag("class", "io.quarkus.micrometer.test.CountedResource")
.tag("exception", "NullPointerException")
.tag("result", "failure").counter();
Assertions.assertNotNull(counter);
Assertions.assertEquals(1, counter.count());
}
}
|
MicrometerCounterInterceptorTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/channel/ChannelStateWriteRequest.java
|
{
"start": 10507,
"end": 11434
}
|
class ____ extends ChannelStateWriteRequest {
private final ChannelStateWriteResult targetResult;
private final CheckpointStorageLocationReference locationReference;
CheckpointStartRequest(
JobVertexID jobVertexID,
int subtaskIndex,
long checkpointId,
ChannelStateWriteResult targetResult,
CheckpointStorageLocationReference locationReference) {
super(jobVertexID, subtaskIndex, checkpointId, "Start");
this.targetResult = checkNotNull(targetResult);
this.locationReference = checkNotNull(locationReference);
}
ChannelStateWriteResult getTargetResult() {
return targetResult;
}
public CheckpointStorageLocationReference getLocationReference() {
return locationReference;
}
@Override
public void cancel(Throwable cause) {
targetResult.fail(cause);
}
}
|
CheckpointStartRequest
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/taskmanager/TaskCancelAsyncProducerConsumerITCase.java
|
{
"start": 9414,
"end": 10305
}
|
class ____ extends Thread {
private final RecordWriter<LongValue> recordWriter;
public ProducerThread(ResultPartitionWriter partitionWriter) {
this.recordWriter = new RecordWriterBuilder<LongValue>().build(partitionWriter);
}
@Override
public void run() {
LongValue current = new LongValue(0);
try {
while (true) {
current.setValue(current.getValue() + 1);
recordWriter.emit(current);
recordWriter.flushAll();
}
} catch (Exception e) {
ASYNC_PRODUCER_EXCEPTION = e;
}
}
}
}
/** Invokable consuming buffers in a separate Thread (not the main Task thread). */
public static
|
ProducerThread
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ProductionComponentProcessorTest.java
|
{
"start": 9858,
"end": 9944
}
|
interface ____ {}",
"",
" @Module",
" static final
|
B
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/SubarraysShouldHaveSameSize.java
|
{
"start": 840,
"end": 3431
}
|
class ____ extends BasicErrorMessageFactory {
private static final String MESSAGE = "%n" +
"actual and expected 2d arrays should be deeply equal but rows at index %s differ:%n" +
"actual[%s] size is %s and expected[%s] is %s.%n" +
"actual[%s] was:%n" +
" %s%n" +
"expected[%s] was:%n" +
" %s%n" +
"actual was:%n" +
" %s%n" +
"expected was:%n" +
" %s";
/**
* Creates a new <code>{@link SubarraysShouldHaveSameSize}</code>.
* @param actual the actual 2D array in the failed assertion.
* @param expected the actual 2D array to compare actual with.
* @param actualSubArray actual[index] array
* @param actualSubArrayLength actual[index] length
* @param expectedSubArray expected[index]
* @param expectedSubArrayLength actual[index] length
* @param index index of {@code actualSubArray}, e.g. {@code 3} when checking size (length) of {@code actual[3]}
* @return the created {@code ErrorMessageFactory}
*/
public static ErrorMessageFactory subarraysShouldHaveSameSize(Object actual, Object expected, Object actualSubArray,
int actualSubArrayLength, Object expectedSubArray,
int expectedSubArrayLength, int index) {
return new SubarraysShouldHaveSameSize(actual, expected, actualSubArray, actualSubArrayLength, expectedSubArray,
expectedSubArrayLength, index);
}
private SubarraysShouldHaveSameSize(Object actual, Object expected, Object actualSubArray, int actualSubArrayLength,
Object expectedSubArray, int expectedSubArrayLength, int index) {
// reuse %s to let representation format the arrays but don't do it for integers as we want to keep the default toString of
// int (that would mot be the case if the representation was changed to hex representation for example).
super(MESSAGE.formatted(index, index, actualSubArrayLength, index, expectedSubArrayLength, index, "%s", index, "%s", "%s",
"%s"),
actualSubArray, expectedSubArray, actual, expected);
}
}
|
SubarraysShouldHaveSameSize
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/translog/SnapshotMatchers.java
|
{
"start": 831,
"end": 2096
}
|
class ____ {
private SnapshotMatchers() {
}
/**
* Consumes a snapshot and make sure it's size is as expected
*/
public static Matcher<Translog.Snapshot> size(int size) {
return new SizeMatcher(size);
}
/**
* Consumes a snapshot and make sure it's content is as expected
*/
public static Matcher<Translog.Snapshot> equalsTo(Translog.Operation... ops) {
return new EqualMatcher(ops);
}
/**
* Consumes a snapshot and make sure it's content is as expected
*/
public static Matcher<Translog.Snapshot> equalsTo(List<Translog.Operation> ops) {
return new EqualMatcher(ops.toArray(new Translog.Operation[ops.size()]));
}
public static Matcher<Translog.Snapshot> containsOperationsInAnyOrder(Collection<Translog.Operation> expectedOperations) {
return new ContainingInAnyOrderMatcher(expectedOperations);
}
/**
* Consumes a snapshot and makes sure that its operations have all seqno between minSeqNo(inclusive) and maxSeqNo(inclusive).
*/
public static Matcher<Translog.Snapshot> containsSeqNoRange(long minSeqNo, long maxSeqNo) {
return new ContainingSeqNoRangeMatcher(minSeqNo, maxSeqNo);
}
public static
|
SnapshotMatchers
|
java
|
quarkusio__quarkus
|
independent-projects/qute/core/src/main/java/io/quarkus/qute/SectionBlock.java
|
{
"start": 582,
"end": 8306
}
|
class ____ implements WithOrigin, ErrorInitializer {
public final Origin origin;
/**
* Id generated by the parser. {@value SectionHelperFactory#MAIN_BLOCK_NAME} for the main block.
*/
public final String id;
/**
* Label used for the given part. {@value SectionHelperFactory#MAIN_BLOCK_NAME} for the main block.
*/
public final String label;
/**
* An unmodifiable ordered map of parsed parameters.
* <p>
* Note that the order does not necessary reflect the original positions of the parameters but the parsing order.
*
* @see SectionHelperFactory#getParameters()
*/
public final Map<String, String> parameters;
/**
* An unmodifiable ordered map of parameter expressions.
*/
public final Map<String, Expression> expressions;
/**
* Section content - an immutable list of template nodes.
*/
public List<TemplateNode> nodes;
private final List<String> positionalParameters;
public SectionBlock(Origin origin, String id, String label, Map<String, String> parameters,
Map<String, Expression> expressions,
List<TemplateNode> nodes, List<String> positionalParameters) {
this.origin = origin;
this.id = id;
this.label = label;
this.parameters = parameters;
this.expressions = expressions;
this.nodes = ImmutableList.copyOf(nodes);
this.positionalParameters = positionalParameters;
}
public boolean isEmpty() {
return nodes.isEmpty();
}
/**
*
* @param position
* @return the parameter for the specified position, or {@code null} if no such parameter exists
*/
public String getParameter(int position) {
return positionalParameters.get(position);
}
List<Expression> getExpressions() {
List<Expression> expressions = new ArrayList<>();
expressions.addAll(this.expressions.values());
for (TemplateNode node : nodes) {
expressions.addAll(node.getExpressions());
}
return expressions;
}
Expression findExpression(Predicate<Expression> predicate) {
for (Expression e : this.expressions.values()) {
if (predicate.test(e)) {
return e;
}
}
for (TemplateNode node : nodes) {
if (node instanceof ExpressionNode) {
Expression e = ((ExpressionNode) node).expression;
if (predicate.test(e)) {
return e;
}
} else if (node instanceof SectionNode) {
Expression found = ((SectionNode) node).findExpression(predicate);
if (found != null) {
return found;
}
}
}
return null;
}
List<ParameterDeclaration> getParamDeclarations() {
List<ParameterDeclaration> declarations = null;
for (TemplateNode node : nodes) {
List<ParameterDeclaration> nodeDeclarations = node.getParameterDeclarations();
if (!nodeDeclarations.isEmpty()) {
if (declarations == null) {
declarations = new ArrayList<>();
}
declarations.addAll(nodeDeclarations);
}
}
return declarations != null ? declarations : Collections.emptyList();
}
TemplateNode findNode(Predicate<TemplateNode> predicate) {
for (TemplateNode node : nodes) {
if (predicate.test(node)) {
return node;
}
if (node.isSection()) {
SectionNode sectionNode = (SectionNode) node;
TemplateNode found = sectionNode.findNode(predicate);
if (found != null) {
return found;
}
}
}
return null;
}
List<TemplateNode> findNodes(Predicate<TemplateNode> predicate) {
List<TemplateNode> ret = null;
for (TemplateNode node : nodes) {
if (predicate.test(node)) {
if (ret == null) {
ret = new ArrayList<>();
}
ret.add(node);
}
if (node.isSection()) {
SectionNode sectionNode = (SectionNode) node;
List<TemplateNode> found = sectionNode.findNodes(predicate);
if (!found.isEmpty()) {
if (ret == null) {
ret = new ArrayList<>();
}
ret.addAll(found);
}
}
}
return ret == null ? Collections.emptyList() : ret;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("SectionBlock [origin=").append(origin).append(", id=").append(id).append(", label=").append(label)
.append("]");
return builder.toString();
}
@Override
public Origin getOrigin() {
return origin;
}
void optimizeNodes(Set<TemplateNode> nodesToRemove) {
List<TemplateNode> effectiveNodes = new ArrayList<>();
boolean hasLineSeparator = false;
boolean nodeIgnored = false;
for (TemplateNode node : nodes) {
if (node instanceof SectionNode) {
effectiveNodes.add(node);
((SectionNode) node).optimizeNodes(nodesToRemove);
} else if (node == Parser.COMMENT_NODE || nodesToRemove.contains(node)) {
// Ignore comments and nodes for removal
nodeIgnored = true;
} else {
effectiveNodes.add(node);
if (node instanceof LineSeparatorNode) {
hasLineSeparator = true;
}
}
}
if (!hasLineSeparator && !nodeIgnored) {
// No optimizations are possible
return;
}
if (hasLineSeparator) {
List<TemplateNode> finalNodes;
// Collapse adjacent text and line separator nodes
finalNodes = new ArrayList<>();
List<TextNode> textGroup = null;
for (TemplateNode node : effectiveNodes) {
if (node instanceof TextNode) {
if (textGroup == null) {
textGroup = new ArrayList<>();
}
textGroup.add((TextNode) node);
} else {
if (textGroup != null) {
collapseGroup(textGroup, finalNodes);
textGroup = null;
}
finalNodes.add(node);
}
}
if (textGroup != null) {
collapseGroup(textGroup, finalNodes);
}
nodes = ImmutableList.copyOf(finalNodes);
} else if (nodeIgnored) {
nodes = ImmutableList.copyOf(effectiveNodes);
}
}
private void collapseGroup(List<TextNode> group, List<TemplateNode> finalNodes) {
if (group.size() > 1) {
// Collapse the group...
StringBuilder val = new StringBuilder();
for (TextNode textNode : group) {
val.append(textNode.getValue());
}
finalNodes.add(new TextNode(val.toString(), group.get(0).getOrigin()));
} else {
finalNodes.add(group.get(0));
}
}
static SectionBlock.Builder builder(String id, Parser parser,
ErrorInitializer errorInitializer) {
return new Builder(id, parser, errorInitializer).setLabel(id);
}
static
|
SectionBlock
|
java
|
reactor__reactor-core
|
reactor-test/src/main/java/reactor/test/util/LoggerUtils.java
|
{
"start": 6521,
"end": 11875
}
|
class ____ implements reactor.util.Logger {
private final reactor.util.Logger delegate;
private final CapturingFactory parent;
DivertingLogger(Logger delegate, CapturingFactory parent) {
this.delegate = delegate;
this.parent = parent;
}
@Override
public String getName() {
return delegate.getName();
}
@Override
public boolean isTraceEnabled() {
Logger logger = parent.getCapturingLogger();
return delegate.isTraceEnabled() || (logger != null && logger.isTraceEnabled());
}
@Override
public void trace(String msg) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.trace(msg);
}
if (parent.isRedirectToOriginal()) {
delegate.trace(msg);
}
}
@SuppressWarnings("NullAway") // NullAway issue with varargs and jspecify mode
@Override
public void trace(String format, @Nullable Object @Nullable... arguments) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.trace(format, arguments);
}
if (parent.isRedirectToOriginal()) {
delegate.trace(format, arguments);
}
}
@Override
public void trace(String msg, Throwable t) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.trace(msg, t);
}
if (parent.isRedirectToOriginal()) {
delegate.trace(msg, t);
}
}
@Override
public boolean isDebugEnabled() {
Logger logger = parent.getCapturingLogger();
return delegate.isDebugEnabled() || (logger != null && logger.isDebugEnabled());
}
@Override
public void debug(String msg) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.debug(msg);
}
if (parent.isRedirectToOriginal()) {
delegate.debug(msg);
}
}
@SuppressWarnings("NullAway") // NullAway issue with varargs and jspecify mode
@Override
public void debug(String format, @Nullable Object @Nullable... arguments) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.debug(format, arguments);
}
if (parent.isRedirectToOriginal()) {
delegate.debug(format, arguments);
}
}
@Override
public void debug(String msg, Throwable t) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.debug(msg, t);
}
if (parent.isRedirectToOriginal()) {
delegate.debug(msg, t);
}
}
@Override
public boolean isInfoEnabled() {
Logger logger = parent.getCapturingLogger();
return delegate.isInfoEnabled() || (logger != null && logger.isInfoEnabled());
}
@Override
public void info(String msg) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.info(msg);
}
if (parent.isRedirectToOriginal()) {
delegate.info(msg);
}
}
@SuppressWarnings("NullAway") // NullAway issue with varargs and jspecify mode
@Override
public void info(String format, @Nullable Object @Nullable... arguments) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.info(format, arguments);
}
if (parent.isRedirectToOriginal()) {
delegate.info(format, arguments);
}
}
@Override
public void info(String msg, Throwable t) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.info(msg, t);
}
if (parent.isRedirectToOriginal()) {
delegate.info(msg, t);
}
}
@Override
public boolean isWarnEnabled() {
Logger logger = parent.getCapturingLogger();
return delegate.isWarnEnabled() || (logger != null && logger.isWarnEnabled());
}
@Override
public void warn(String msg) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.warn(msg);
}
if (parent.isRedirectToOriginal()) {
delegate.warn(msg);
}
}
@SuppressWarnings("NullAway") // NullAway issue with varargs and jspecify mode
@Override
public void warn(String format, @Nullable Object @Nullable... arguments) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.warn(format, arguments);
}
if (parent.isRedirectToOriginal()) {
delegate.warn(format, arguments);
}
}
@Override
public void warn(String msg, Throwable t) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.warn(msg, t);
}
if (parent.isRedirectToOriginal()) {
delegate.warn(msg, t);
}
}
@Override
public boolean isErrorEnabled() {
Logger logger = parent.getCapturingLogger();
return delegate.isErrorEnabled() || (logger != null && logger.isErrorEnabled());
}
@Override
public void error(String msg) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.error(msg);
}
if (parent.isRedirectToOriginal()) {
delegate.error(msg);
}
}
@Override
public void error(String format, @Nullable Object @Nullable... arguments) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.error(format, arguments);
}
if (parent.isRedirectToOriginal()) {
delegate.error(format, arguments);
}
}
@Override
public void error(String msg, Throwable t) {
Logger logger = parent.getCapturingLogger();
if (logger != null) {
logger.error(msg, t);
}
if (parent.isRedirectToOriginal()) {
delegate.error(msg, t);
}
}
}
}
|
DivertingLogger
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/embeddable/EmbeddableWithParentWithInheritance2Test.java
|
{
"start": 3269,
"end": 3607
}
|
class ____ {
Cheese cheese;
Integer intensity;
@Parent
public Cheese getCheese() {
return cheese;
}
public void setCheese(Cheese cheese) {
this.cheese = cheese;
}
public Integer getIntensity() {
return intensity;
}
public void setIntensity(Integer intensity) {
this.intensity = intensity;
}
}
}
|
SmellOf
|
java
|
elastic__elasticsearch
|
libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/AbstractExternalProcessorFactoryBridge.java
|
{
"start": 1472,
"end": 2593
}
|
class ____ implements Processor.Factory {
@Override
public Processor create(
final Map<String, Processor.Factory> processorFactories,
final String tag,
final String description,
final Map<String, Object> config,
final ProjectId projectId
) throws Exception {
final Map<String, ProcessorFactoryBridge> bridgedProcessorFactories = StableBridgeAPI.fromInternal(
processorFactories,
ProcessorFactoryBridge::fromInternal
);
final ProjectIdBridge bridgedProjectId = ProjectIdBridge.fromInternal(projectId);
final ProcessorBridge bridgedProcessor = AbstractExternalProcessorFactoryBridge.this.create(
bridgedProcessorFactories,
tag,
description,
config,
bridgedProjectId
);
return bridgedProcessor.toInternal();
}
ProcessorFactoryBridge toExternal() {
return AbstractExternalProcessorFactoryBridge.this;
}
}
}
|
InternalProxy
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/entities/converter/SexConverter.java
|
{
"start": 242,
"end": 960
}
|
class ____ implements AttributeConverter<Sex, String> {
@Override
public String convertToDatabaseColumn(Sex attribute) {
if (attribute == null) {
return null;
}
switch (attribute) {
case MALE: {
return "M";
}
case FEMALE: {
return "F";
}
default: {
throw new IllegalArgumentException( "Unexpected Sex model value [" + attribute + "]" );
}
}
}
@Override
public Sex convertToEntityAttribute(String dbData) {
if (dbData == null) {
return null;
}
if ( "M".equals( dbData ) ) {
return Sex.MALE;
}
else if ( "F".equals( dbData ) ) {
return Sex.FEMALE;
}
throw new IllegalArgumentException( "Unexpected Sex db value [" + dbData + "]" );
}
}
|
SexConverter
|
java
|
spring-projects__spring-security
|
saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/jackson2/Saml2RedirectAuthenticationRequestMixin.java
|
{
"start": 1162,
"end": 1895
}
|
class ____ in serialize/deserialize
* {@link Saml2RedirectAuthenticationRequest}.
*
* <pre>
* ObjectMapper mapper = new ObjectMapper();
* mapper.registerModule(new Saml2Jackson2Module());
* </pre>
*
* @author Ulrich Grave
* @since 5.7
* @see Saml2Jackson2Module
* @see SecurityJackson2Modules
* @deprecated as of 7.0 in favor of
* {@code org.springframework.security.saml2.jackson.Saml2RedirectAuthenticationRequestMixin}
* based on Jackson 3
*/
@SuppressWarnings("removal")
@Deprecated(forRemoval = true)
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS)
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY, getterVisibility = JsonAutoDetect.Visibility.NONE)
@JsonIgnoreProperties(ignoreUnknown = true)
|
helps
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/AbstractInformationExtractorImpl.java
|
{
"start": 60933,
"end": 61117
}
|
interface ____ {
ForeignKeyBuilder addColumnMapping(ColumnInformation referencing, ColumnInformation referenced);
ForeignKeyInformation build();
}
protected static
|
ForeignKeyBuilder
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/fixes/SuggestedFixesTest.java
|
{
"start": 71589,
"end": 71874
}
|
class ____ the compilation fail, so no suppression is added.
CompilationTestHelper.newInstance(
AddSuppressWarningsIfCompilationSucceedsOnlyInSameCompilationUnit.class, getClass())
.addSourceLines(
"OnlyInSameCompilationUnit.java", //
"
|
makes
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/plugins/ModuleSupport.java
|
{
"start": 5649,
"end": 6260
}
|
class ____ implements ModuleFinder {
private final ModuleReference mref;
private final String mn;
private InMemoryModuleFinder(ModuleReference mref) {
this.mref = mref;
this.mn = mref.descriptor().name();
}
@Override
public Optional<ModuleReference> find(String name) {
Objects.requireNonNull(name);
return Optional.ofNullable(mn.equals(name) ? mref : null);
}
@Override
public Set<ModuleReference> findAll() {
return Set.of(mref);
}
}
static
|
InMemoryModuleFinder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/function/json/PostgreSQLJsonTableFunction.java
|
{
"start": 1954,
"end": 8981
}
|
class ____ extends JsonTableFunction {
public PostgreSQLJsonTableFunction(TypeConfiguration typeConfiguration) {
super( typeConfiguration );
}
@Override
protected void renderJsonTable(
SqlAppender sqlAppender,
JsonTableArguments arguments,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
if ( arguments.errorBehavior() == JsonTableErrorBehavior.NULL ) {
throw new QueryException( "Can't emulate null on error clause on PostgreSQL" );
}
sqlAppender.appendSql( "(select" );
renderColumns( sqlAppender, arguments.columnsClause(), 0, walker );
sqlAppender.appendSql( " from jsonb_path_query(" );
final boolean needsCast = !arguments.isJsonType() && AbstractSqlAstTranslator.isParameter( arguments.jsonDocument() );
if ( needsCast ) {
sqlAppender.appendSql( "cast(" );
}
arguments.jsonDocument().accept( walker );
if ( needsCast ) {
sqlAppender.appendSql( " as jsonb)" );
}
final SqlAstNode jsonPath = arguments.jsonPath();
if ( jsonPath != null ) {
sqlAppender.appendSql( ',' );
if ( jsonPath instanceof Literal ) {
jsonPath.accept( walker );
}
else {
sqlAppender.appendSql( "cast(" );
jsonPath.accept( walker );
sqlAppender.appendSql( " as jsonpath)" );
}
final JsonPathPassingClause passingClause = arguments.passingClause();
if ( passingClause != null ) {
sqlAppender.append( ",jsonb_build_object" );
char separator = '(';
for ( Map.Entry<String, Expression> entry : passingClause.getPassingExpressions().entrySet() ) {
sqlAppender.append( separator );
sqlAppender.appendSingleQuoteEscapedString( entry.getKey() );
sqlAppender.append( ',' );
entry.getValue().accept( walker );
separator = ',';
}
sqlAppender.append( ')' );
}
}
else {
sqlAppender.appendSql( ",'$[*]'" );
}
sqlAppender.appendSql( ") with ordinality t0(d,i)" );
renderNestedColumnJoins( sqlAppender, arguments.columnsClause(), 0, walker );
sqlAppender.appendSql( ')' );
}
protected int renderNestedColumnJoins(SqlAppender sqlAppender, JsonTableColumnsClause jsonTableColumnsClause, int clauseLevel, SqlAstTranslator<?> walker) {
int nextClauseLevel = clauseLevel;
for ( JsonTableColumnDefinition columnDefinition : jsonTableColumnsClause.getColumnDefinitions() ) {
if ( columnDefinition instanceof JsonTableNestedColumnDefinition nestedColumnDefinition ) {
sqlAppender.appendSql( " left join lateral jsonb_path_query(t" );
sqlAppender.appendSql( clauseLevel );
sqlAppender.appendSql( ".d," );
sqlAppender.appendSingleQuoteEscapedString( nestedColumnDefinition.jsonPath() );
sqlAppender.appendSql( ") with ordinality t" );
sqlAppender.appendSql( clauseLevel + 1 );
sqlAppender.appendSql( "(d,i) on true" );
nextClauseLevel = renderNestedColumnJoins( sqlAppender, nestedColumnDefinition.columns(), clauseLevel + 1, walker );
}
}
return nextClauseLevel;
}
@Override
protected int renderColumns(SqlAppender sqlAppender, JsonTableColumnsClause jsonTableColumnsClause, int clauseLevel, SqlAstTranslator<?> walker) {
return renderColumnDefinitions( sqlAppender, jsonTableColumnsClause, ' ', clauseLevel, walker );
}
@Override
protected int renderJsonNestedColumnDefinition(SqlAppender sqlAppender, JsonTableNestedColumnDefinition definition, int clauseLevel, SqlAstTranslator<?> walker) {
return renderColumns( sqlAppender, definition.columns(), clauseLevel, walker );
}
@Override
protected void renderJsonOrdinalityColumnDefinition(SqlAppender sqlAppender, JsonTableOrdinalityColumnDefinition definition, int clauseLevel, SqlAstTranslator<?> walker) {
sqlAppender.appendSql( 't' );
sqlAppender.appendSql( clauseLevel );
sqlAppender.appendSql( ".i " );
sqlAppender.appendSql( definition.name() );
}
@Override
protected void renderJsonValueColumnDefinition(SqlAppender sqlAppender, JsonTableValueColumnDefinition definition, int clauseLevel, SqlAstTranslator<?> walker) {
// jsonb_path_query_first errors by default
if ( definition.errorBehavior() != null && definition.errorBehavior() != JsonValueErrorBehavior.ERROR ) {
throw new QueryException( "Can't emulate on error clause on PostgreSQL" );
}
if ( definition.emptyBehavior() != null && definition.emptyBehavior() != JsonValueEmptyBehavior.NULL ) {
throw new QueryException( "Can't emulate on empty clause on PostgreSQL" );
}
final String jsonPath = definition.jsonPath() == null
? "$." + definition.name()
: definition.jsonPath();
PostgreSQLJsonValueFunction.appendJsonValue(
sqlAppender,
new ClauseLevelDocumentExpression( clauseLevel ),
new QueryLiteral<>(
jsonPath,
walker.getSessionFactory().getTypeConfiguration().getBasicTypeForJavaType( String.class )
),
true,
definition.type(),
null,
walker
);
sqlAppender.appendSql( ' ' );
sqlAppender.appendSql( definition.name() );
}
@Override
protected void renderJsonQueryColumnDefinition(SqlAppender sqlAppender, JsonTableQueryColumnDefinition definition, int clauseLevel, SqlAstTranslator<?> walker) {
// jsonb_path_query functions error by default
if ( definition.errorBehavior() != null && definition.errorBehavior() != JsonQueryErrorBehavior.ERROR ) {
throw new QueryException( "Can't emulate on error clause on PostgreSQL" );
}
if ( definition.emptyBehavior() != null && definition.emptyBehavior() != JsonQueryEmptyBehavior.NULL ) {
throw new QueryException( "Can't emulate on empty clause on PostgreSQL" );
}
final String jsonPath = definition.jsonPath() == null
? "$." + definition.name()
: definition.jsonPath();
PostgreSQLJsonQueryFunction.appendJsonQuery(
sqlAppender,
new ClauseLevelDocumentExpression( clauseLevel ),
new QueryLiteral<>(
jsonPath,
walker.getSessionFactory().getTypeConfiguration().getBasicTypeForJavaType( String.class )
),
true,
definition.wrapMode(),
null,
walker
);
sqlAppender.appendSql( ' ' );
sqlAppender.appendSql( definition.name() );
}
@Override
protected void renderJsonExistsColumnDefinition(SqlAppender sqlAppender, JsonTableExistsColumnDefinition definition, int clauseLevel, SqlAstTranslator<?> walker) {
// jsonb_path_exists errors by default
if ( definition.errorBehavior() != null && definition.errorBehavior() != JsonExistsErrorBehavior.ERROR ) {
throw new QueryException( "Can't emulate on error clause on PostgreSQL" );
}
final String jsonPath = definition.jsonPath() == null
? "$." + definition.name()
: definition.jsonPath();
PostgreSQLJsonExistsFunction.appendJsonExists(
sqlAppender,
walker,
new ClauseLevelDocumentExpression( clauseLevel ),
new QueryLiteral<>(
jsonPath,
walker.getSessionFactory().getTypeConfiguration().getBasicTypeForJavaType( String.class )
),
null
);
sqlAppender.appendSql( ' ' );
sqlAppender.appendSql( definition.name() );
}
protected static
|
PostgreSQLJsonTableFunction
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/transport/TripleIsolationExecutorSupportFactory.java
|
{
"start": 1024,
"end": 1260
}
|
class ____ implements IsolationExecutorSupportFactory {
@Override
public ExecutorSupport createIsolationExecutorSupport(URL url) {
return new TripleIsolationExecutorSupport(url);
}
}
|
TripleIsolationExecutorSupportFactory
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/javadoc/InvalidLinkTest.java
|
{
"start": 2897,
"end": 3239
}
|
interface ____<T> {
// BUG: Diagnostic contains: erasure
/** {@link #bar(Test<String>)} */
void foo(Test<String> foo);
}
""")
.doTest();
}
@Test
public void validLinks() {
helper
.addSourceLines(
"Test.java",
"""
import java.util.List;
|
Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/onetoone/Computer.java
|
{
"start": 519,
"end": 1542
}
|
class ____ {
private ComputerPk id;
private String cpu;
private SerialNumber serial;
@OneToOne(cascade = {CascadeType.PERSIST})
@JoinColumn(name = "serialbrand", referencedColumnName = "brand")
@JoinColumn(name = "serialmodel", referencedColumnName = "model")
public SerialNumber getSerial() {
return serial;
}
public void setSerial(SerialNumber serial) {
this.serial = serial;
}
public boolean equals(Object o) {
if ( this == o ) return true;
if ( !(o instanceof Computer computer) ) return false;
return id.equals( computer.id );
}
public int hashCode() {
return id.hashCode();
}
@EmbeddedId
@AttributeOverrides({
@AttributeOverride(name = "brand", column = @Column(name = "computer_brand")),
@AttributeOverride(name = "model", column = @Column(name = "computer_model"))
})
public ComputerPk getId() {
return id;
}
public void setId(ComputerPk id) {
this.id = id;
}
public String getCpu() {
return cpu;
}
public void setCpu(String cpu) {
this.cpu = cpu;
}
}
|
Computer
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/stream/StreamExecMiniBatchAssigner.java
|
{
"start": 3345,
"end": 6481
}
|
class ____ extends ExecNodeBase<RowData>
implements StreamExecNode<RowData>, SingleTransformationTranslator<RowData> {
public static final String MINI_BATCH_ASSIGNER_TRANSFORMATION = "mini-batch-assigner";
public static final String FIELD_NAME_MINI_BATCH_INTERVAL = "miniBatchInterval";
@JsonProperty(FIELD_NAME_MINI_BATCH_INTERVAL)
private final MiniBatchInterval miniBatchInterval;
public StreamExecMiniBatchAssigner(
ReadableConfig tableConfig,
MiniBatchInterval miniBatchInterval,
InputProperty inputProperty,
RowType outputType,
String description) {
this(
ExecNodeContext.newNodeId(),
ExecNodeContext.newContext(StreamExecMiniBatchAssigner.class),
ExecNodeContext.newPersistedConfig(StreamExecMiniBatchAssigner.class, tableConfig),
miniBatchInterval,
Collections.singletonList(inputProperty),
outputType,
description);
}
@JsonCreator
public StreamExecMiniBatchAssigner(
@JsonProperty(FIELD_NAME_ID) int id,
@JsonProperty(FIELD_NAME_TYPE) ExecNodeContext context,
@JsonProperty(FIELD_NAME_CONFIGURATION) ReadableConfig persistedConfig,
@JsonProperty(FIELD_NAME_MINI_BATCH_INTERVAL) MiniBatchInterval miniBatchInterval,
@JsonProperty(FIELD_NAME_INPUT_PROPERTIES) List<InputProperty> inputProperties,
@JsonProperty(FIELD_NAME_OUTPUT_TYPE) RowType outputType,
@JsonProperty(FIELD_NAME_DESCRIPTION) String description) {
super(id, context, persistedConfig, inputProperties, outputType, description);
this.miniBatchInterval = checkNotNull(miniBatchInterval);
}
@SuppressWarnings("unchecked")
@Override
protected Transformation<RowData> translateToPlanInternal(
PlannerBase planner, ExecNodeConfig config) {
final Transformation<RowData> inputTransform =
(Transformation<RowData>) getInputEdges().get(0).translateToPlan(planner);
final OneInputStreamOperator<RowData, RowData> operator;
if (miniBatchInterval.getMode() == MiniBatchMode.ProcTime) {
operator = new ProcTimeMiniBatchAssignerOperator(miniBatchInterval.getInterval());
} else if (miniBatchInterval.getMode() == MiniBatchMode.RowTime) {
operator = new RowTimeMiniBatchAssginerOperator(miniBatchInterval.getInterval());
} else {
throw new TableException(
String.format(
"MiniBatchAssigner shouldn't be in %s mode this is a bug, please file an issue.",
miniBatchInterval.getMode()));
}
return ExecNodeUtil.createOneInputTransformation(
inputTransform,
createTransformationMeta(MINI_BATCH_ASSIGNER_TRANSFORMATION, config),
operator,
InternalTypeInfo.of(getOutputType()),
inputTransform.getParallelism(),
false);
}
}
|
StreamExecMiniBatchAssigner
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/util/TraversableOnceException.java
|
{
"start": 1055,
"end": 1423
}
|
class ____ extends RuntimeException {
private static final long serialVersionUID = 7636881584773577290L;
/** Creates a new exception with a default message. */
public TraversableOnceException() {
super(
"The Iterable can be iterated over only once. Only the first call to 'iterator()' will succeed.");
}
}
|
TraversableOnceException
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/type/typedef/NamedEnumUserType.java
|
{
"start": 332,
"end": 631
}
|
class ____<T extends Enum<T>> extends EnumType<T> {
private static final long serialVersionUID = -4176945793071035928L;
@Override
public void setParameterValues(Properties parameters) {
parameters.setProperty(EnumType.NAMED, "true");
super.setParameterValues(parameters);
}
}
|
NamedEnumUserType
|
java
|
apache__camel
|
test-infra/camel-test-infra-couchdb/src/test/java/org/apache/camel/test/infra/couchdb/services/CouchDbServiceFactory.java
|
{
"start": 2746,
"end": 2847
}
|
class ____ extends CouchDbRemoteInfraService implements CouchDbService {
}
}
|
CouchDbRemoteTestService
|
java
|
spring-projects__spring-boot
|
module/spring-boot-graphql/src/main/java/org/springframework/boot/graphql/autoconfigure/rsocket/GraphQlRSocketAutoConfiguration.java
|
{
"start": 4599,
"end": 4834
}
|
class ____ extends AnyNestedCondition {
NoJacksonOrJackson2Preferred() {
super(ConfigurationPhase.PARSE_CONFIGURATION);
}
@ConditionalOnMissingClass("tools.jackson.databind.json.JsonMapper")
static
|
NoJacksonOrJackson2Preferred
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/GrpcProtocol.java
|
{
"start": 904,
"end": 1045
}
|
class ____ extends TripleProtocol {
public GrpcProtocol(FrameworkModel frameworkModel) {
super(frameworkModel);
}
}
|
GrpcProtocol
|
java
|
elastic__elasticsearch
|
x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsShardAction.java
|
{
"start": 3081,
"end": 4633
}
|
class ____ extends SingleShardRequest<Request> {
private final ShardId shardId;
private final boolean waitForAdvance;
private final long checkpoint;
private final TimeValue timeout;
Request(ShardId shardId, boolean waitForAdvance, long checkpoint, TimeValue timeout) {
super(shardId.getIndexName());
this.shardId = shardId;
this.waitForAdvance = waitForAdvance;
this.checkpoint = checkpoint;
this.timeout = timeout;
}
Request(StreamInput in) throws IOException {
super(in);
this.shardId = new ShardId(in);
this.waitForAdvance = in.readBoolean();
this.checkpoint = in.readLong();
this.timeout = in.readTimeValue();
}
@Override
public ActionRequestValidationException validate() {
return null;
}
public ShardId getShardId() {
return shardId;
}
public TimeValue timeout() {
return timeout;
}
public boolean waitForAdvance() {
return waitForAdvance;
}
public long checkpoint() {
return checkpoint;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
shardId.writeTo(out);
out.writeBoolean(waitForAdvance);
out.writeLong(checkpoint);
out.writeTimeValue(timeout);
}
}
public static
|
Request
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/impl/DummyLifecycleStrategy.java
|
{
"start": 1201,
"end": 3225
}
|
class ____ extends LifecycleStrategySupport {
private final List<String> events = new ArrayList<>();
@Override
public void onContextStarting(CamelContext context) {
events.add("onContextStarting");
}
@Override
public void onContextStopping(CamelContext context) {
events.add("onContextStopping");
}
@Override
public void onComponentAdd(String name, Component component) {
events.add("onComponentAdd");
}
@Override
public void onComponentRemove(String name, Component component) {
events.add("onComponentRemove");
}
@Override
public void onEndpointAdd(Endpoint endpoint) {
events.add("onEndpointAdd");
}
@Override
public void onEndpointRemove(Endpoint endpoint) {
events.add("onEndpointRemove");
}
@Override
public void onServiceAdd(CamelContext context, Service service, org.apache.camel.Route route) {
events.add("onServiceAdd");
}
@Override
public void onServiceRemove(CamelContext context, Service service, org.apache.camel.Route route) {
events.add("onServiceRemove");
}
@Override
public void onRoutesAdd(Collection<org.apache.camel.Route> routes) {
events.add("onRoutesAdd");
}
@Override
public void onRoutesRemove(Collection<org.apache.camel.Route> routes) {
events.add("onRoutesRemove");
}
@Override
public void onRouteContextCreate(Route route) {
events.add("onRouteContextCreate");
}
@Override
public void onThreadPoolAdd(
CamelContext camelContext, ThreadPoolExecutor threadPool, String id, String sourceId, String routeId,
String threadPoolProfileId) {
events.add("onThreadPoolAdd");
}
@Override
public void onThreadPoolRemove(CamelContext camelContext, ThreadPoolExecutor threadPool) {
events.add("onThreadPoolRemove");
}
public List<String> getEvents() {
return events;
}
}
|
DummyLifecycleStrategy
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.