language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
micronaut-projects__micronaut-core
|
http-server-netty/src/main/java/io/micronaut/http/server/netty/NettyHttpServer.java
|
{
"start": 5775,
"end": 42998
}
|
class ____ implements NettyEmbeddedServer {
@SuppressWarnings("WeakerAccess")
public static final String OUTBOUND_KEY = "-outbound-";
private static final Logger LOG = LoggerFactory.getLogger(NettyHttpServer.class);
private final NettyEmbeddedServices nettyEmbeddedServices;
private final NettyHttpServerConfiguration serverConfiguration;
private final ServerSslConfiguration sslConfiguration;
private final Environment environment;
private final RoutingInBoundHandler routingHandler;
private final boolean isDefault;
private final ApplicationContext applicationContext;
private final AtomicBoolean running = new AtomicBoolean(false);
private final ChannelGroup webSocketSessions = new DefaultChannelGroup(GlobalEventExecutor.INSTANCE);
private final HttpHostResolver hostResolver;
private boolean shutdownWorker = false;
private boolean shutdownParent = false;
private EventLoopGroup workerGroup;
private EventLoopGroup parentGroup;
private final Collection<ChannelPipelineListener> pipelineListeners = new ArrayList<>(2);
@Nullable
private volatile List<Listener> activeListeners = null;
private final List<NettyHttpServerConfiguration.NettyListenerConfiguration> listenerConfigurations;
private final CompositeNettyServerCustomizer rootCustomizer = new CompositeNettyServerCustomizer();
/**
* @param serverConfiguration The Netty HTTP server configuration
* @param nettyEmbeddedServices The embedded server context
* @param isDefault Is this the default server
*/
@SuppressWarnings("ParameterNumber")
public NettyHttpServer(
NettyHttpServerConfiguration serverConfiguration,
NettyEmbeddedServices nettyEmbeddedServices,
boolean isDefault) {
this.isDefault = isDefault;
this.serverConfiguration = serverConfiguration;
this.nettyEmbeddedServices = nettyEmbeddedServices;
Optional<File> location = this.serverConfiguration.getMultipart().getLocation();
location.ifPresent(dir -> DiskFileUpload.baseDirectory = dir.getAbsolutePath());
this.applicationContext = nettyEmbeddedServices.getApplicationContext();
this.environment = applicationContext.getEnvironment();
final ServerSslBuilder serverSslBuilder = nettyEmbeddedServices.getServerSslBuilder();
if (serverSslBuilder != null) {
this.sslConfiguration = serverSslBuilder.getSslConfiguration();
} else {
this.sslConfiguration = null;
}
ApplicationEventPublisher<HttpRequestTerminatedEvent> httpRequestTerminatedEventPublisher = nettyEmbeddedServices
.getEventPublisher(HttpRequestTerminatedEvent.class);
ApplicationEventPublisher<HttpRequestReceivedEvent> httpRequestReceivedEventPublisher = nettyEmbeddedServices
.getEventPublisher(HttpRequestReceivedEvent.class);
final Supplier<ExecutorService> ioExecutor = SupplierUtil.memoized(() ->
nettyEmbeddedServices.getExecutorSelector()
.select(TaskExecutors.BLOCKING).orElse(null)
);
this.routingHandler = new RoutingInBoundHandler(
serverConfiguration,
nettyEmbeddedServices,
ioExecutor,
httpRequestTerminatedEventPublisher,
httpRequestReceivedEventPublisher,
applicationContext.getConversionService()
);
this.hostResolver = new DefaultHttpHostResolver(serverConfiguration, () -> NettyHttpServer.this);
this.listenerConfigurations = buildListenerConfigurations();
}
private List<NettyHttpServerConfiguration.NettyListenerConfiguration> buildListenerConfigurations() {
List<NettyHttpServerConfiguration.NettyListenerConfiguration> explicit = serverConfiguration.getListeners();
if (explicit != null) {
if (explicit.isEmpty()) {
throw new IllegalArgumentException("When configuring listeners explicitly, must specify at least one");
}
return explicit;
} else {
String configuredHost = serverConfiguration.getHost().orElse(null);
List<NettyHttpServerConfiguration.NettyListenerConfiguration> implicit = new ArrayList<>(2);
final ServerSslBuilder serverSslBuilder = nettyEmbeddedServices.getServerSslBuilder();
if (serverSslBuilder != null && this.sslConfiguration.isEnabled()) {
implicit.add(NettyHttpServerConfiguration.NettyListenerConfiguration.createTcp(configuredHost, sslConfiguration.getPort(), true, sslConfiguration.getKeyName(), sslConfiguration.getTrustName()));
} else {
implicit.add(NettyHttpServerConfiguration.NettyListenerConfiguration.createTcp(configuredHost, getHttpPort(serverConfiguration), false, null, null));
}
if (isDefault) {
if (serverConfiguration.isDualProtocol()) {
implicit.add(NettyHttpServerConfiguration.NettyListenerConfiguration.createTcp(configuredHost, getHttpPort(serverConfiguration), false, null, null));
}
final Router router = this.nettyEmbeddedServices.getRouter();
final Set<Integer> exposedPorts = router.getExposedPorts();
for (int exposedPort : exposedPorts) {
if (exposedPort == -1 || exposedPort == 0 || implicit.stream().noneMatch(cfg -> cfg.getPort() == exposedPort)) {
NettyHttpServerConfiguration.NettyListenerConfiguration mgmt = NettyHttpServerConfiguration.NettyListenerConfiguration.createTcp(configuredHost, exposedPort, false, null, null);
mgmt.setExposeDefaultRoutes(false);
mgmt.setSupportGracefulShutdown(false);
implicit.add(mgmt);
}
}
}
return implicit;
}
}
/**
* Get the configured http port otherwise will default the value depending on the env.
*
* @param serverConfiguration configuration object for the server
* @return http port
*/
private int getHttpPort(NettyHttpServerConfiguration serverConfiguration) {
Integer configPort = serverConfiguration.getPort().orElse(null);
return getHttpPort(configPort);
}
private int getHttpPort(Integer configPort) {
if (configPort != null) {
return configPort;
} else {
if (environment.getActiveNames().contains(Environment.TEST)) {
return -1;
} else {
return HttpServerConfiguration.DEFAULT_PORT;
}
}
}
@Override
public boolean isKeepAlive() {
return false;
}
/**
* @return The configuration for the server
*/
@SuppressWarnings("WeakerAccess")
public NettyHttpServerConfiguration getServerConfiguration() {
return serverConfiguration;
}
@Override
public boolean isRunning() {
return running.get();
}
@Override
@NonNull
public synchronized NettyEmbeddedServer start() {
if (!isRunning()) {
if (isDefault && !applicationContext.isRunning()) {
applicationContext.start();
}
//suppress unused
//done here to prevent a blocking service loader in the event loop
EventLoopGroupConfiguration workerConfig = resolveWorkerConfiguration();
workerGroup = createWorkerEventLoopGroup(workerConfig);
parentGroup = createParentEventLoopGroup();
Supplier<ServerBootstrap> serverBootstrap = SupplierUtil.memoized(() -> {
ServerBootstrap sb = createServerBootstrap();
processOptions(serverConfiguration.getOptions(), sb::option);
processOptions(serverConfiguration.getChildOptions(), sb::childOption);
sb.group(parentGroup, workerGroup);
return sb;
});
Supplier<Bootstrap> udpBootstrap = SupplierUtil.memoized(() -> {
Bootstrap ub = new Bootstrap();
processOptions(serverConfiguration.getOptions(), ub::option);
ub.group(workerGroup);
return ub;
});
Supplier<Bootstrap> acceptedBootstrap = SupplierUtil.memoized(() -> {
Bootstrap ub = new Bootstrap();
processOptions(serverConfiguration.getChildOptions(), ub::option);
ub.group(workerGroup);
return ub;
});
List<Listener> listeners = new ArrayList<>();
for (NettyHttpServerConfiguration.NettyListenerConfiguration listenerConfiguration : listenerConfigurations) {
listeners.add(bind(serverBootstrap, udpBootstrap, acceptedBootstrap, listenerConfiguration, workerConfig));
}
this.activeListeners = Collections.unmodifiableList(listeners);
if (isDefault) {
final Router router = this.nettyEmbeddedServices.getRouter();
final Set<Integer> exposedPorts = router.getExposedPorts();
if (CollectionUtils.isNotEmpty(exposedPorts)) {
router.applyDefaultPorts(listeners.stream()
.filter(l -> l.config.isExposeDefaultRoutes())
.map(l -> l.serverChannel.localAddress())
.filter(InetSocketAddress.class::isInstance)
.map(addr -> ((InetSocketAddress) addr).getPort())
.toList());
}
}
fireStartupEvents();
running.set(true);
}
return this;
}
private EventLoopGroupConfiguration resolveWorkerConfiguration() {
EventLoopGroupConfiguration workerConfig = serverConfiguration.getWorker();
if (workerConfig == null) {
workerConfig = nettyEmbeddedServices.getEventLoopGroupRegistry()
.getEventLoopGroupConfiguration(EventLoopGroupConfiguration.DEFAULT).orElse(null);
} else {
final String eventLoopGroupName = workerConfig.getName();
if (!EventLoopGroupConfiguration.DEFAULT.equals(eventLoopGroupName)) {
workerConfig = nettyEmbeddedServices.getEventLoopGroupRegistry()
.getEventLoopGroupConfiguration(eventLoopGroupName).orElse(workerConfig);
}
}
return workerConfig;
}
@Override
@NonNull
public synchronized NettyEmbeddedServer stop() {
return stop(false);
}
@Override
@NonNull
public NettyEmbeddedServer stopServerOnly() {
return stop(true);
}
@NonNull
private NettyEmbeddedServer stop(boolean stopServerOnly) {
if (isRunning() && workerGroup != null) {
if (running.compareAndSet(true, false)) {
stopInternal(stopServerOnly);
}
}
return this;
}
@Override
public void register(@NonNull NettyServerCustomizer customizer) {
Objects.requireNonNull(customizer, "customizer");
rootCustomizer.add(customizer);
}
@Override
@SuppressWarnings("InnerAssignmentCheck")
public int getPort() {
List<Listener> listenersLocal = this.activeListeners;
// flags for determining failure reason
boolean hasRandom = false;
boolean hasUnix = false;
if (listenersLocal == null) {
// not started, try to infer from config
for (NettyHttpServerConfiguration.NettyListenerConfiguration listenerCfg : listenerConfigurations) {
switch (listenerCfg.getFamily()) {
case TCP, QUIC -> {
if (listenerCfg.getPort() == -1) {
hasRandom = true;
} else {
// found one \o/
return listenerCfg.getPort();
}
}
case UNIX -> hasUnix = true;
default -> {
// unknown
}
}
}
} else {
// started already, just use the localAddress() of each channel
for (Listener listener : listenersLocal) {
SocketAddress localAddress = listener.serverChannel.localAddress();
if (localAddress instanceof InetSocketAddress address) {
// found one \o/
return address.getPort();
} else {
hasUnix = true;
}
}
}
// no eligible port
if (hasRandom) {
throw new UnsupportedOperationException("Retrieving the port from the server before it has started is not supported when binding to a random port");
} else if (hasUnix) {
throw new UnsupportedOperationException("Retrieving the port from the server is not supported for unix domain sockets");
} else {
throw new UnsupportedOperationException("Could not retrieve server port");
}
}
@Override
public String getHost() {
return serverConfiguration.getHost()
.orElseGet(() -> Optional.ofNullable(CachedEnvironment.getenv(Environment.HOSTNAME)).orElse(SocketUtils.LOCALHOST));
}
@Override
public String getScheme() {
return (sslConfiguration != null && sslConfiguration.isEnabled())
? io.micronaut.http.HttpRequest.SCHEME_HTTPS
: io.micronaut.http.HttpRequest.SCHEME_HTTP;
}
@Override
public URL getURL() {
try {
return new URL(getScheme() + "://" + getHost() + ':' + getPort());
} catch (MalformedURLException e) {
throw new ConfigurationException("Invalid server URL: " + e.getMessage(), e);
}
}
@Override
public URI getURI() {
try {
return new URI(getScheme() + "://" + getHost() + ':' + getPort());
} catch (URISyntaxException e) {
throw new ConfigurationException("Invalid server URL: " + e.getMessage(), e);
}
}
@Override
public URI getContextURI() {
try {
String contextPath = serverConfiguration.getContextPath();
if (contextPath == null) {
return getURI();
}
return new URI(getScheme() + "://" + getHost() + ':' + getPort() + contextPath);
} catch (URISyntaxException e) {
throw new ConfigurationException("Invalid server URL: " + e.getMessage(), e);
}
}
@Override
public ApplicationContext getApplicationContext() {
return applicationContext;
}
@Override
public ApplicationConfiguration getApplicationConfiguration() {
return serverConfiguration.getApplicationConfiguration();
}
@Override
public final Set<Integer> getBoundPorts() {
List<Listener> listeners = activeListeners;
if (listeners == null) {
return Collections.emptySet();
}
return Collections.unmodifiableSet(listeners.stream()
.map(l -> l.serverChannel.localAddress())
.filter(InetSocketAddress.class::isInstance)
.map(addr -> ((InetSocketAddress) addr).getPort())
.collect(Collectors.<Integer, Set<Integer>>toCollection(LinkedHashSet::new)));
}
/**
* @return The parent event loop group
*/
@SuppressWarnings("WeakerAccess")
protected EventLoopGroup createParentEventLoopGroup() {
final NettyHttpServerConfiguration.Parent parent = serverConfiguration.getParent();
return nettyEmbeddedServices.getEventLoopGroupRegistry()
.getEventLoopGroup(parent != null ? parent.getName() : NettyHttpServerConfiguration.Parent.NAME)
.orElseGet(() -> {
final EventLoopGroup newGroup = newEventLoopGroup(parent);
shutdownParent = true;
return newGroup;
});
}
/**
* @param workerConfig The worker configuration
* @return The worker event loop group
*/
@SuppressWarnings("WeakerAccess")
protected EventLoopGroup createWorkerEventLoopGroup(@Nullable EventLoopGroupConfiguration workerConfig) {
String configName = workerConfig != null ? workerConfig.getName() : EventLoopGroupConfiguration.DEFAULT;
return nettyEmbeddedServices.getEventLoopGroupRegistry().getEventLoopGroup(configName)
.orElseGet(() -> {
LOG.warn("The configuration for 'micronaut.server.netty.worker.{}' is deprecated. Use 'micronaut.netty.event-loops.default' configuration instead.", configName);
final EventLoopGroup newGroup = newEventLoopGroup(workerConfig);
shutdownWorker = true;
return newGroup;
});
}
/**
* @return The Netty server bootstrap
*/
@SuppressWarnings("WeakerAccess")
protected ServerBootstrap createServerBootstrap() {
return new ServerBootstrap();
}
private Listener bind(Supplier<ServerBootstrap> serverBootstrap, Supplier<Bootstrap> udpBootstrap, Supplier<Bootstrap> acceptedBootstrap, NettyHttpServerConfiguration.NettyListenerConfiguration cfg, EventLoopGroupConfiguration workerConfig) {
logBind(cfg);
try {
Integer fd = cfg.getFd();
Listener listener;
if (cfg.getFamily() == NettyHttpServerConfiguration.NettyListenerConfiguration.Family.QUIC) {
ChannelFuture future;
listener = new UdpListener(cfg);
Bootstrap listenerBootstrap = udpBootstrap.get().clone()
.handler(listener)
.channelFactory(() -> {
if (fd != null) {
return nettyEmbeddedServices.getChannelInstance(NettyChannelType.DATAGRAM_SOCKET, workerConfig, null, fd);
} else {
return nettyEmbeddedServices.getChannelInstance(NettyChannelType.DATAGRAM_SOCKET, workerConfig);
}
});
int port = cfg.getPort();
if (port == -1) {
port = 0;
}
if (cfg.isBind()) {
if (cfg.getHost() == null) {
future = listenerBootstrap.bind(port);
} else {
future = listenerBootstrap.bind(cfg.getHost(), port);
}
} else {
future = listenerBootstrap.register();
}
future.syncUninterruptibly();
} else {
listener = new Listener(cfg);
Channel parent;
if (cfg.isServerSocket()) {
ChannelFuture future;
ServerBootstrap listenerBootstrap = serverBootstrap.get().clone()
// this initializer runs before the actual bind operation, so we can be sure
// setServerChannel has been called by the time bind runs.
.handler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(@NonNull Channel ch) {
listener.setServerChannel(ch);
}
})
.childHandler(listener);
switch (cfg.getFamily()) {
case TCP:
listenerBootstrap.channelFactory(() -> {
if (fd != null) {
return (ServerSocketChannel) nettyEmbeddedServices.getChannelInstance(NettyChannelType.SERVER_SOCKET, workerConfig, null, fd);
} else {
return (ServerSocketChannel) nettyEmbeddedServices.getChannelInstance(NettyChannelType.SERVER_SOCKET, workerConfig);
}
});
int port = cfg.getPort();
if (port == -1) {
port = 0;
}
if (cfg.isBind()) {
if (cfg.getHost() == null) {
future = listenerBootstrap.bind(port);
} else {
future = listenerBootstrap.bind(cfg.getHost(), port);
}
} else {
future = listenerBootstrap.register();
}
break;
case UNIX:
listenerBootstrap.channelFactory(() -> {
if (fd != null) {
return (ServerChannel) nettyEmbeddedServices.getChannelInstance(NettyChannelType.DOMAIN_SERVER_SOCKET, workerConfig, null, fd);
} else {
return (ServerChannel) nettyEmbeddedServices.getChannelInstance(NettyChannelType.DOMAIN_SERVER_SOCKET, workerConfig);
}
});
if (cfg.isBind()) {
if (((IoEventLoopGroup) listenerBootstrap.config().group()).isIoType(NioIoHandler.class)) {
// jdk UnixDomainSocketAddress
future = listenerBootstrap.bind(UnixDomainSocketAddress.of(cfg.getPath()));
} else {
// netty DomainSocketAddress (epoll/kqueue)
future = listenerBootstrap.bind(DomainSocketHolder.makeDomainSocketAddress(cfg.getPath()));
}
} else {
future = listenerBootstrap.register();
}
break;
default:
throw new UnsupportedOperationException("Unsupported family: " + cfg.getFamily());
}
future.syncUninterruptibly();
parent = future.channel();
} else {
parent = null;
}
Integer acceptedFd = cfg.getAcceptedFd();
if (acceptedFd != null) {
ChannelFactory<Channel> cf = switch (cfg.getFamily()) {
case TCP ->
() -> nettyEmbeddedServices.getChannelInstance(NettyChannelType.CLIENT_SOCKET, workerConfig, parent, acceptedFd);
case UNIX ->
() -> nettyEmbeddedServices.getChannelInstance(NettyChannelType.DOMAIN_SOCKET, workerConfig, parent, acceptedFd);
default ->
throw new UnsupportedOperationException("Unsupported family: " + cfg.getFamily());
};
if (parent == null) {
// if isServerSocket is false, use our connection channel as the "server channel".
ChannelFactory<Channel> innerFactory = cf;
cf = () -> {
Channel ch = innerFactory.newChannel();
listener.setServerChannel(ch);
return ch;
};
}
acceptedBootstrap.get().clone()
.handler(listener)
.channelFactory(cf)
.register()
.syncUninterruptibly();
}
}
return listener;
} catch (Exception e) {
// syncUninterruptibly will rethrow a checked BindException as unchecked, so this value can be true
@SuppressWarnings("ConstantConditions")
final boolean isBindError = e instanceof BindException;
if (LOG.isErrorEnabled()) {
//noinspection ConstantConditions
if (isBindError) {
LOG.error("Unable to start server. Port {} already in use.", displayAddress(cfg));
} else {
LOG.error("Error starting Micronaut server: {}", e.getMessage(), e);
}
}
stopInternal(false);
throw new ServerStartupException("Unable to start Micronaut server on " + displayAddress(cfg), e);
}
}
private void logBind(NettyHttpServerConfiguration.NettyListenerConfiguration cfg) {
Optional<String> applicationName = serverConfiguration.getApplicationConfiguration().getName();
if (applicationName.isPresent()) {
if (LOG.isTraceEnabled()) {
LOG.trace("Binding {} server to {}", applicationName.get(), displayAddress(cfg));
}
} else {
if (LOG.isTraceEnabled()) {
LOG.trace("Binding server to {}", displayAddress(cfg));
}
}
}
private static String displayAddress(NettyHttpServerConfiguration.NettyListenerConfiguration cfg) {
return switch (cfg.getFamily()) {
case TCP, QUIC -> cfg.getHost() == null ? "*:" + cfg.getPort() : cfg.getHost() + ":" + cfg.getPort();
case UNIX -> {
if (cfg.getPath() == null) {
yield cfg.getFd() == null ? "unix:accepted-fd:" + cfg.getAcceptedFd() : "unix:fd:" + cfg.getFd();
} else {
if (cfg.getPath().startsWith("\0")) {
yield "unix:@" + cfg.getPath().substring(1);
} else {
yield "unix:" + cfg.getPath();
}
}
}
};
}
public static <T> CompletionStage<T> toCompletionStage(Future<T> future) {
CompletableFuture<T> cf = new CompletableFuture<>();
if (future.isDone()) {
// addListener can fail when the underlying event loop is already shut down
if (future.isSuccess()) {
cf.complete(future.getNow());
} else {
cf.completeExceptionally(future.cause());
}
} else {
future.addListener((GenericFutureListener<Future<T>>) f -> {
if (f.isSuccess()) {
cf.complete(f.getNow());
} else {
cf.completeExceptionally(f.cause());
}
});
}
return cf;
}
private void fireStartupEvents() {
applicationContext.getEventPublisher(ServerStartupEvent.class)
.publishEvent(new ServerStartupEvent(this));
}
private void logShutdownErrorIfNecessary(Future<?> future) {
if (!future.isSuccess() && LOG.isWarnEnabled()) {
Throwable e = future.cause();
LOG.warn("Error stopping Micronaut server: {}", e.getMessage(), e);
}
}
private void stopInternal(boolean stopServerOnly) {
List<Future<?>> futures = new ArrayList<>(2);
try {
if (shutdownParent) {
EventLoopGroupConfiguration parent = serverConfiguration.getParent();
if (parent != null) {
long quietPeriod = parent.getShutdownQuietPeriod().toMillis();
long timeout = parent.getShutdownTimeout().toMillis();
futures.add(
parentGroup.shutdownGracefully(quietPeriod, timeout, TimeUnit.MILLISECONDS)
.addListener(this::logShutdownErrorIfNecessary)
);
} else {
futures.add(
parentGroup.shutdownGracefully()
.addListener(this::logShutdownErrorIfNecessary)
);
}
}
if (shutdownWorker) {
futures.add(
workerGroup.shutdownGracefully()
.addListener(this::logShutdownErrorIfNecessary)
);
}
webSocketSessions.close();
applicationContext.getEventPublisher(ServerShutdownEvent.class).publishEvent(new ServerShutdownEvent(this));
if (isDefault && applicationContext.isRunning() && !stopServerOnly) {
applicationContext.stop();
}
serverConfiguration.getMultipart().getLocation().ifPresent(dir -> DiskFileUpload.baseDirectory = null);
List<Listener> activeListeners = this.activeListeners;
if (activeListeners != null) {
for (Listener listener : activeListeners) {
listener.clean();
}
this.activeListeners = null;
}
// If we are only stopping the server, we need to wait for the futures to complete otherwise
// when CRaC is trying to take a snapshot it will capture objects in flow of shutting down.
if (stopServerOnly) {
if (LOG.isDebugEnabled()) {
LOG.debug("Waiting for graceful shutdown to complete");
}
for (Future<?> future : futures) {
future.awaitUninterruptibly();
}
if (LOG.isDebugEnabled()) {
LOG.debug("Done...");
}
}
} catch (Throwable e) {
if (LOG.isErrorEnabled()) {
LOG.error("Error stopping Micronaut server: {}", e.getMessage(), e);
}
}
}
private EventLoopGroup newEventLoopGroup(EventLoopGroupConfiguration config) {
if (config != null) {
ExecutorService executorService = config.getExecutorName()
.flatMap(name -> applicationContext.findBean(ExecutorService.class, Qualifiers.byName(name))).orElse(null);
if (executorService != null) {
return nettyEmbeddedServices.createEventLoopGroup(
DefaultEventLoopGroupRegistry.numThreads(config),
executorService,
config.getIoRatio().orElse(null)
);
} else {
return nettyEmbeddedServices.createEventLoopGroup(
config
);
}
} else {
return nettyEmbeddedServices.createEventLoopGroup(
new DefaultEventLoopGroupConfiguration()
);
}
}
private void processOptions(Map<ChannelOption, Object> options, BiConsumer<ChannelOption, Object> biConsumer) {
final ChannelOptionFactory channelOptionFactory = nettyEmbeddedServices.getChannelOptionFactory();
options.forEach((option, value) -> biConsumer.accept(option,
channelOptionFactory.convertValue(option, value, environment)));
}
@Override
public void addChannel(Channel channel) {
this.webSocketSessions.add(channel);
}
@Override
public void removeChannel(Channel channel) {
this.webSocketSessions.remove(channel);
}
@Override
public ChannelGroup getChannelGroup() {
return this.webSocketSessions;
}
/**
* @return {@link io.micronaut.http.server.netty.NettyHttpServer} which implements {@link WebSocketSessionRepository}
*/
public WebSocketSessionRepository getWebSocketSessionRepository() {
return this;
}
@Override
public boolean isClientChannel() {
return false;
}
@Override
public void doOnConnect(@NonNull ChannelPipelineListener listener) {
this.pipelineListeners.add(Objects.requireNonNull(listener, "The listener cannot be null"));
}
@Override
public Set<String> getObservedConfigurationPrefixes() {
return Set.of(HttpServerConfiguration.PREFIX, SslConfiguration.PREFIX);
}
@Override
public CompletionStage<?> shutdownGracefully() {
List<Listener> listeners = activeListeners;
if (listeners == null) {
return CompletableFuture.completedStage(null);
}
return GracefulShutdownCapable.shutdownAll(listeners.stream());
}
@Override
public OptionalLong reportActiveTasks() {
List<Listener> listeners = this.activeListeners;
if (listeners == null) {
return OptionalLong.empty();
}
return GracefulShutdownCapable.combineActiveTasks(listeners);
}
@Override
public void onApplicationEvent(RefreshEvent event) {
// if anything under HttpServerConfiguration.PREFIX changes re-build
// the NettyHttpServerInitializer in the server bootstrap to apply changes
// this will ensure re-configuration to HTTPS settings, read-timeouts, logging etc. apply
// configuration properties are auto-refreshed so will be visible automatically
List<Listener> listeners = activeListeners;
if (listeners != null) {
for (Listener listener : listeners) {
listener.refresh();
}
}
}
final void triggerPipelineListeners(ChannelPipeline pipeline) {
for (ChannelPipelineListener pipelineListener : pipelineListeners) {
pipelineListener.onConnect(pipeline);
}
}
private HttpPipelineBuilder createPipelineBuilder(NettyServerCustomizer customizer, boolean quic) {
Objects.requireNonNull(customizer, "customizer");
return new HttpPipelineBuilder(NettyHttpServer.this, nettyEmbeddedServices, sslConfiguration, routingHandler, hostResolver, customizer, quic);
}
/**
* Builds Embedded Channel.
*
* @param ssl whether to enable SSL
* @return The embedded channel with our server handlers
*/
@Internal
public EmbeddedChannel buildEmbeddedChannel(boolean ssl) {
EmbeddedChannel channel = new EmbeddedChannel(new ChannelDuplexHandler() {
// work around https://github.com/netty/netty/pull/13730
boolean reading = false;
ChannelPromise closePromise;
@Override
public void channelRead(@NonNull ChannelHandlerContext ctx, @NonNull Object msg) throws Exception {
reading = true;
ctx.fireChannelRead(msg);
reading = false;
ChannelPromise closePromise = this.closePromise;
if (closePromise != null) {
this.closePromise = null;
ctx.close(closePromise);
}
}
@Override
public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
if (reading) {
closePromise = promise;
} else {
ctx.close(promise);
}
}
});
buildEmbeddedChannel(channel, ssl);
return channel;
}
/**
* Builds Embedded Channel.
*
* @param prototype The embedded channel to add our handlers to
* @param ssl whether to enable SSL
*/
@Internal
public void buildEmbeddedChannel(EmbeddedChannel prototype, boolean ssl) {
HttpPipelineBuilder builder = createPipelineBuilder(rootCustomizer, false);
SslContextHolder sslContextHolder = ssl ? createLegacySslContextHolder(false) : null;
builder.new ConnectionPipeline(prototype, sslContextHolder).initChannel();
}
static Predicate<String> inclusionPredicate(NettyHttpServerConfiguration.AccessLogger config) {
List<String> exclusions = config.getExclusions();
if (CollectionUtils.isEmpty(exclusions)) {
return null;
} else {
// Don't do this inside the predicate to avoid compiling every request
List<Pattern> patterns = exclusions.stream().map(Pattern::compile).collect(Collectors.toList());
return uri -> patterns.stream().noneMatch(pattern -> pattern.matcher(uri).matches());
}
}
private SslContextHolder createLegacySslContextHolder(boolean quic) {
SslContext sslContext = nettyEmbeddedServices.getServerSslBuilder() != null && !quic ? nettyEmbeddedServices.getServerSslBuilder().build().orElse(null) : null;
QuicSslContext quicSslContext = quic ? nettyEmbeddedServices.getServerSslBuilder().buildQuic().orElse(null) : null;
return new SslContextHolder(sslContext, quicSslContext);
}
private
|
NettyHttpServer
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryParenthesesTest.java
|
{
"start": 7877,
"end": 8268
}
|
class ____ {
public void f() {
Double d = (Double) (-1.0);
d = (double) -1.0;
}
}
""")
.doTest();
}
@Test
public void annotationsWithoutTrailingParentheses() {
helper
.addSourceLines(
"Test.java",
"""
@Deprecated(forRemoval = true)
|
Test
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/ImportAutoConfigurationImportSelectorTests.java
|
{
"start": 10798,
"end": 10878
}
|
interface ____ {
}
@MetaImportAutoConfiguration
@UnrelatedOne
static
|
ImportTwo
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistReplicaRecovery.java
|
{
"start": 1693,
"end": 4358
}
|
class ____ extends LazyPersistTestCase {
@Test
public void testDnRestartWithSavedReplicas()
throws IOException, InterruptedException, TimeoutException {
getClusterBuilder().build();
FSNamesystem fsn = cluster.getNamesystem();
final DataNode dn = cluster.getDataNodes().get(0);
DatanodeDescriptor dnd =
NameNodeAdapter.getDatanode(fsn, dn.getDatanodeId());
final String METHOD_NAME = GenericTestUtils.getMethodName();
Path path1 = new Path("/" + METHOD_NAME + ".01.dat");
makeTestFile(path1, BLOCK_SIZE, true);
ensureFileReplicasOnStorageType(path1, RAM_DISK);
// Sleep for a short time to allow the lazy writer thread to do its job.
// However the block replica should not be evicted from RAM_DISK yet.
FsDatasetImpl fsDImpl = (FsDatasetImpl) DataNodeTestUtils.getFSDataset(dn);
GenericTestUtils
.waitFor(() -> fsDImpl.getNonPersistentReplicas() == 0, 10,
3 * LAZY_WRITER_INTERVAL_SEC * 1000);
ensureFileReplicasOnStorageType(path1, RAM_DISK);
LOG.info("Restarting the DataNode");
assertTrue(cluster.restartDataNode(0, true), "DN did not restart properly");
// wait for blockreport
waitForBlockReport(dn, dnd);
// Ensure that the replica is now on persistent storage.
ensureFileReplicasOnStorageType(path1, DEFAULT);
}
@Test
public void testDnRestartWithUnsavedReplicas()
throws IOException, InterruptedException, TimeoutException {
getClusterBuilder().build();
FsDatasetTestUtil.stopLazyWriter(cluster.getDataNodes().get(0));
final String METHOD_NAME = GenericTestUtils.getMethodName();
Path path1 = new Path("/" + METHOD_NAME + ".01.dat");
makeTestFile(path1, BLOCK_SIZE, true);
ensureFileReplicasOnStorageType(path1, RAM_DISK);
LOG.info("Restarting the DataNode");
cluster.restartDataNode(0, true);
cluster.waitActive();
// Ensure that the replica is still on transient storage.
ensureFileReplicasOnStorageType(path1, RAM_DISK);
}
private boolean waitForBlockReport(final DataNode dn,
final DatanodeDescriptor dnd) throws IOException, InterruptedException {
final DatanodeStorageInfo storage = dnd.getStorageInfos()[0];
final long lastCount = storage.getBlockReportCount();
dn.triggerBlockReport(
new BlockReportOptions.Factory().setIncremental(false).build());
try {
GenericTestUtils
.waitFor(() -> lastCount != storage.getBlockReportCount(), 10, 10000);
} catch (TimeoutException te) {
LOG.error("Timeout waiting for block report for {}", dnd);
return false;
}
return true;
}
}
|
TestLazyPersistReplicaRecovery
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-batch/src/test/java/org/apache/camel/component/spring/batch/SpringBatchEndpointTest.java
|
{
"start": 2258,
"end": 13182
}
|
class ____ extends CamelTestSupport {
// Fixtures
@Mock
JobLauncher jobLauncher;
@Mock
JobLauncher alternativeJobLauncher;
@Mock
JobRegistry jobRegistry;
@Mock
Job job;
@Mock
Job dynamicMockjob;
// Camel fixtures
@EndpointInject("mock:test")
MockEndpoint mockEndpoint;
@EndpointInject("mock:error")
MockEndpoint errorEndpoint;
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("spring-batch:mockJob").to("mock:test");
}
};
}
@Override
protected void bindToRegistry(Registry registry) {
registry.bind("jobLauncher", jobLauncher);
registry.bind("alternativeJobLauncher", alternativeJobLauncher);
registry.bind("mockJob", job);
registry.bind("dynamicMockjob", dynamicMockjob);
registry.bind("jobRegistry", jobRegistry);
}
// Tests
@Test
public void shouldInjectJobToEndpoint() throws IllegalAccessException {
SpringBatchEndpoint batchEndpoint = getMandatoryEndpoint("spring-batch:mockJob", SpringBatchEndpoint.class);
Job batchEndpointJob = (Job) FieldUtils.readField(batchEndpoint, "job", true);
assertSame(job, batchEndpointJob);
}
@Test
public void shouldRunJob() throws Exception {
// When
sendBody("direct:start", "Start the job, please.");
// Then
verify(jobLauncher).run(eq(job), any(JobParameters.class));
}
@Test
public void shouldReturnJobExecution() throws Exception {
// Given
JobExecution jobExecution = mock(JobExecution.class);
when(jobLauncher.run(eq(job), any(JobParameters.class))).thenReturn(jobExecution);
// When
sendBody("direct:start", "Start the job, please.");
// Then
mockEndpoint.expectedBodiesReceived(jobExecution);
}
@Test
public void shouldThrowExceptionIfUsedAsConsumer() {
RouteBuilder rb = new RouteBuilder() {
@Override
public void configure() {
from("spring-batch:mockJob").to("direct:emptyEndpoint");
}
};
final CamelContext context = context();
// When
assertThrows(FailedToStartRouteException.class,
() -> context.addRoutes(rb));
}
@Test
public void shouldConvertHeadersToJobParams() throws Exception {
// Given
String headerKey = "headerKey";
String headerValue = "headerValue";
// When
template.sendBodyAndHeader("direct:start", "Start the job, please.", headerKey, headerValue);
// Then
ArgumentCaptor<JobParameters> jobParameters = ArgumentCaptor.forClass(JobParameters.class);
verify(jobLauncher).run(any(Job.class), jobParameters.capture());
String parameter = jobParameters.getValue().getString(headerKey);
assertEquals(parameter, headerValue);
}
@Test
public void shouldConvertDateHeadersToJobParams() throws Exception {
// Given
String headerKey = "headerKey";
Date headerValue = new Date();
// When
template.sendBodyAndHeader("direct:start", "Start the job, please.", headerKey, headerValue);
// Then
ArgumentCaptor<JobParameters> jobParameters = ArgumentCaptor.forClass(JobParameters.class);
verify(jobLauncher).run(any(Job.class), jobParameters.capture());
Date parameter = jobParameters.getValue().getDate(headerKey);
assertEquals(parameter, headerValue);
}
@Test
public void shouldConvertLongHeadersToJobParams() throws Exception {
// Given
String headerKey = "headerKey";
Long headerValue = 1L;
// When
template.sendBodyAndHeader("direct:start", "Start the job, please.", headerKey, headerValue);
// Then
ArgumentCaptor<JobParameters> jobParameters = ArgumentCaptor.forClass(JobParameters.class);
verify(jobLauncher).run(any(Job.class), jobParameters.capture());
Long parameter = jobParameters.getValue().getLong(headerKey);
assertEquals(parameter, headerValue);
}
@Test
public void shouldConvertDoubleHeadersToJobParams() throws Exception {
// Given
String headerKey = "headerKey";
Double headerValue = 1.0;
// When
template.sendBodyAndHeader("direct:start", "Start the job, please.", headerKey, headerValue);
// Then
ArgumentCaptor<JobParameters> jobParameters = ArgumentCaptor.forClass(JobParameters.class);
verify(jobLauncher).run(any(Job.class), jobParameters.capture());
Double parameter = jobParameters.getValue().getDouble(headerKey);
assertEquals(parameter, headerValue);
}
@Test
public void shouldInjectJobLauncherByReferenceName() throws Exception {
// Given
context().addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:launcherRefTest").to("spring-batch:mockJob?jobLauncher=#alternativeJobLauncher");
}
});
// When
template.sendBody("direct:launcherRefTest", "Start the job, please.");
// Then
SpringBatchEndpoint batchEndpoint
= context().getEndpoint("spring-batch:mockJob?jobLauncher=#alternativeJobLauncher", SpringBatchEndpoint.class);
JobLauncher batchEndpointJobLauncher = (JobLauncher) FieldUtils.readField(batchEndpoint, "jobLauncher", true);
assertSame(alternativeJobLauncher, batchEndpointJobLauncher);
}
@Test
public void shouldFailWhenThereIsNoJobLauncher() throws Exception {
// Given
SimpleRegistry registry = new SimpleRegistry();
registry.bind("mockJob", job);
CamelContext camelContext = new DefaultCamelContext(registry);
camelContext.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("spring-batch:mockJob");
}
});
// When
assertThrows(FailedToStartRouteException.class,
() -> camelContext.start());
}
@Test
public void shouldResolveAnyJobLauncher() throws Exception {
// Given
SimpleRegistry registry = new SimpleRegistry();
registry.bind("mockJob", job);
registry.bind("someRandomName", jobLauncher);
CamelContext camelContext = new DefaultCamelContext(registry);
camelContext.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("spring-batch:mockJob");
}
});
// When
camelContext.start();
// Then
SpringBatchEndpoint batchEndpoint = camelContext.getEndpoint("spring-batch:mockJob", SpringBatchEndpoint.class);
JobLauncher batchEndpointJobLauncher = (JobLauncher) FieldUtils.readField(batchEndpoint, "jobLauncher", true);
assertSame(jobLauncher, batchEndpointJobLauncher);
}
@Test
public void shouldUseJobLauncherFromComponent() throws Exception {
// Given
SpringBatchComponent batchComponent = new SpringBatchComponent();
batchComponent.setJobLauncher(alternativeJobLauncher);
context.addComponent("customBatchComponent", batchComponent);
// When
context().addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:startCustom").to("customBatchComponent:mockJob");
}
});
// Then
SpringBatchEndpoint batchEndpoint = context().getEndpoint("customBatchComponent:mockJob", SpringBatchEndpoint.class);
JobLauncher batchEndpointJobLauncher = (JobLauncher) FieldUtils.readField(batchEndpoint, "jobLauncher", true);
assertSame(alternativeJobLauncher, batchEndpointJobLauncher);
}
@Test
public void shouldInjectJobRegistryByReferenceName() throws Exception {
// Given
Job mockJob = mock(Job.class);
when(jobRegistry.getJob(eq("mockJob"))).thenReturn(mockJob);
context().addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:jobRegistryRefTest").to("spring-batch:mockJob?jobRegistry=#jobRegistry");
}
});
// When
template.sendBody("direct:jobRegistryRefTest", "Start the job, please.");
// Then
SpringBatchEndpoint batchEndpoint
= context().getEndpoint("spring-batch:mockJob?jobRegistry=#jobRegistry", SpringBatchEndpoint.class);
JobRegistry batchEndpointJobRegistry = (JobRegistry) FieldUtils.readField(batchEndpoint, "jobRegistry", true);
assertSame(jobRegistry, batchEndpointJobRegistry);
}
@Test
public void shouldUseJobRegistryFromComponent() throws Exception {
// Given
SpringBatchComponent batchComponent = new SpringBatchComponent();
batchComponent.setJobRegistry(jobRegistry);
batchComponent.setJobLauncher(jobLauncher);
context.addComponent("customBatchComponent", batchComponent);
// When
context().addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:startCustom").to("customBatchComponent:mockJob");
}
});
// Then
SpringBatchEndpoint batchEndpoint = context().getEndpoint("customBatchComponent:mockJob", SpringBatchEndpoint.class);
JobRegistry batchEndpointJobRegistry = (JobRegistry) FieldUtils.readField(batchEndpoint, "jobRegistry", true);
assertSame(jobRegistry, batchEndpointJobRegistry);
}
@Test
public void shouldGetJobFromJobRegistry() throws Exception {
// Given
Job mockJobFromJobRegistry = mock(Job.class);
when(jobRegistry.getJob(eq("mockJobFromJobRegistry"))).thenReturn(mockJobFromJobRegistry);
// When
context().addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:jobRegistryTest").to("spring-batch:mockJobFromJobRegistry?jobRegistry=#jobRegistry");
}
});
// Then
SpringBatchEndpoint batchEndpoint = context()
.getEndpoint("spring-batch:mockJobFromJobRegistry?jobRegistry=#jobRegistry", SpringBatchEndpoint.class);
Job batchEndpointJob = (Job) FieldUtils.readField(batchEndpoint, "job", true);
assertSame(mockJobFromJobRegistry, batchEndpointJob);
}
}
|
SpringBatchEndpointTest
|
java
|
apache__camel
|
dsl/camel-kamelet-main/src/test/java/org/apache/camel/main/app/MySpringBean.java
|
{
"start": 1075,
"end": 1258
}
|
class ____ {
@Autowired(required = true)
private CamelContext camelContext;
@Bean({ "a1", "a2" })
public String cheese() {
return "cheese";
}
}
|
MySpringBean
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestNMReconnect.java
|
{
"start": 3833,
"end": 4292
}
|
class ____ extends ParameterizedSchedulerTestBase {
private static final RecordFactory recordFactory =
RecordFactoryProvider.getRecordFactory(null);
private List<RMNodeEvent> rmNodeEvents = new ArrayList<RMNodeEvent>();
private Dispatcher dispatcher;
private RMContextImpl context;
public void initTestNMReconnect(SchedulerType type) throws IOException {
initParameterizedSchedulerTestBase(type);
setUp();
}
private
|
TestNMReconnect
|
java
|
quarkusio__quarkus
|
extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/DefaultTokenStateManager.java
|
{
"start": 750,
"end": 15638
}
|
class ____ implements TokenStateManager {
private static final Logger LOG = Logger.getLogger(DefaultTokenStateManager.class);
@Override
public Uni<String> createTokenState(RoutingContext routingContext, OidcTenantConfig oidcConfig,
AuthorizationCodeTokens tokens, OidcRequestContext<String> requestContext) {
if (!oidcConfig.tokenStateManager().splitTokens()) {
// ID, access and refresh tokens are all represented by a single cookie.
// In this case they are all encrypted once all tokens have been added to the buffer.
StringBuilder sb = new StringBuilder();
// Add ID token
sb.append(tokens.getIdToken());
// By default, all three tokens are retained
if (oidcConfig.tokenStateManager().strategy() == Strategy.KEEP_ALL_TOKENS) {
// Add access and refresh tokens
sb.append(CodeAuthenticationMechanism.COOKIE_DELIM)
.append(tokens.getAccessToken())
.append(CodeAuthenticationMechanism.COOKIE_DELIM)
.append(tokens.getAccessTokenExpiresIn() != null ? tokens.getAccessTokenExpiresIn() : "")
.append(CodeAuthenticationMechanism.COOKIE_DELIM)
.append(tokens.getAccessTokenScope() != null ? encodeScopes(oidcConfig, tokens.getAccessTokenScope())
: "")
.append(CodeAuthenticationMechanism.COOKIE_DELIM)
.append(tokens.getRefreshToken());
} else if (oidcConfig.tokenStateManager().strategy() == Strategy.ID_REFRESH_TOKENS) {
// But sometimes the access token is not required.
// For example, when the Quarkus endpoint does not need to use it to access another service.
// Skip access token, access token expiry, access token scope, add refresh token
sb.append(CodeAuthenticationMechanism.COOKIE_DELIM)
.append("")
.append(CodeAuthenticationMechanism.COOKIE_DELIM)
.append("")
.append(CodeAuthenticationMechanism.COOKIE_DELIM)
.append("")
.append(CodeAuthenticationMechanism.COOKIE_DELIM)
.append(tokens.getRefreshToken());
}
// Now all three tokens are encrypted
String encryptedTokens = OidcUtils.encryptToken(sb.toString(), routingContext, oidcConfig);
return Uni.createFrom().item(encryptedTokens);
} else {
// ID, access and refresh tokens are represented as individual cookies
// Encrypt ID token
String encryptedIdToken = OidcUtils.encryptToken(tokens.getIdToken(), routingContext, oidcConfig);
// By default, all three tokens are retained
if (oidcConfig.tokenStateManager().strategy() == Strategy.KEEP_ALL_TOKENS) {
createSessionAccessTokenCookie(routingContext, oidcConfig, tokens);
// Encrypt refresh token and create a `q_session_rt` cookie.
if (tokens.getRefreshToken() != null) {
OidcUtils.createSessionCookie(routingContext,
oidcConfig,
getRefreshTokenCookieName(oidcConfig),
OidcUtils.encryptToken(tokens.getRefreshToken(), routingContext, oidcConfig),
routingContext.get(CodeAuthenticationMechanism.SESSION_MAX_AGE_PARAM));
}
} else if (oidcConfig.tokenStateManager().strategy() == Strategy.ID_REFRESH_TOKENS
&& tokens.getRefreshToken() != null) {
// Encrypt refresh token and create a `q_session_rt` cookie.
OidcUtils.createSessionCookie(routingContext,
oidcConfig,
getRefreshTokenCookieName(oidcConfig),
OidcUtils.encryptToken(tokens.getRefreshToken(), routingContext, oidcConfig),
routingContext.get(CodeAuthenticationMechanism.SESSION_MAX_AGE_PARAM));
}
// q_session cookie
return Uni.createFrom().item(encryptedIdToken);
}
}
@Override
public Uni<AuthorizationCodeTokens> getTokens(RoutingContext routingContext, OidcTenantConfig oidcConfig, String tokenState,
OidcRequestContext<AuthorizationCodeTokens> requestContext) {
String idToken = null;
String accessToken = null;
Long accessTokenExpiresIn = null;
String accessTokenScope = null;
String refreshToken = null;
if (!oidcConfig.tokenStateManager().splitTokens()) {
// ID, access and refresh tokens are all be represented by a single cookie.
String decryptedTokenState = OidcUtils.decryptToken(tokenState, routingContext, oidcConfig);
String[] tokens = CodeAuthenticationMechanism.COOKIE_PATTERN.split(decryptedTokenState);
try {
idToken = tokens[0];
if (oidcConfig.tokenStateManager().strategy() == Strategy.KEEP_ALL_TOKENS) {
accessToken = tokens[1];
accessTokenExpiresIn = tokens[2].isEmpty() ? null : parseAccessTokenExpiresIn(tokens[2]);
accessTokenScope = tokens[3].isEmpty() ? null : tokens[3];
refreshToken = tokens[4];
} else if (oidcConfig.tokenStateManager().strategy() == Strategy.ID_REFRESH_TOKENS) {
refreshToken = tokens[4];
}
} catch (ArrayIndexOutOfBoundsException ex) {
final String error = "Session cookie is malformed";
LOG.debug(ex);
return Uni.createFrom().failure(new AuthenticationFailedException(error));
} catch (AuthenticationFailedException ex) {
return Uni.createFrom().failure(ex);
}
} else {
// Decrypt ID token from the q_session cookie
idToken = OidcUtils.decryptToken(tokenState, routingContext, oidcConfig);
if (oidcConfig.tokenStateManager().strategy() == Strategy.KEEP_ALL_TOKENS) {
String atCookieValue = getAccessTokenCookie(routingContext, oidcConfig);
if (atCookieValue != null) {
// Decrypt access token from the q_session_at cookie
String accessTokenState = OidcUtils.decryptToken(atCookieValue, routingContext, oidcConfig);
String[] accessTokenData = CodeAuthenticationMechanism.COOKIE_PATTERN.split(accessTokenState);
accessToken = accessTokenData[0];
try {
accessTokenExpiresIn = accessTokenData[1].isEmpty() ? null
: parseAccessTokenExpiresIn(accessTokenData[1]);
if (accessTokenData.length == 3) {
accessTokenScope = accessTokenData[2].isEmpty() ? null
: decodeScopes(oidcConfig, accessTokenData[2]);
}
} catch (ArrayIndexOutOfBoundsException ex) {
final String error = "Session cookie is malformed";
LOG.debug(ex);
// Make this error message visible in the dev mode
return Uni.createFrom().failure(new AuthenticationFailedException(error));
} catch (AuthenticationFailedException ex) {
return Uni.createFrom().failure(ex);
}
}
Cookie rtCookie = getRefreshTokenCookie(routingContext, oidcConfig);
if (rtCookie != null) {
// Decrypt refresh token from the q_session_rt cookie
refreshToken = OidcUtils.decryptToken(rtCookie.getValue(), routingContext, oidcConfig);
}
} else if (oidcConfig.tokenStateManager().strategy() == Strategy.ID_REFRESH_TOKENS) {
Cookie rtCookie = getRefreshTokenCookie(routingContext, oidcConfig);
if (rtCookie != null) {
refreshToken = OidcUtils.decryptToken(rtCookie.getValue(), routingContext, oidcConfig);
}
}
}
return Uni.createFrom()
.item(new AuthorizationCodeTokens(idToken, accessToken, refreshToken, accessTokenExpiresIn, accessTokenScope));
}
@Override
public Uni<Void> deleteTokens(RoutingContext routingContext, OidcTenantConfig oidcConfig, String tokenState,
OidcRequestContext<Void> requestContext) {
if (oidcConfig.tokenStateManager().splitTokens()) {
getAccessTokenCookie(routingContext, oidcConfig);
List<String> atCookieNames = routingContext.get(OidcUtils.SESSION_AT_COOKIE_NAME);
if (atCookieNames != null) {
LOG.debugf("Remove session access cookie names: %s", atCookieNames);
for (String cookieName : atCookieNames) {
OidcUtils.removeCookie(routingContext, oidcConfig, cookieName);
}
}
OidcUtils.removeCookie(routingContext, getRefreshTokenCookie(routingContext, oidcConfig),
oidcConfig);
}
return CodeAuthenticationMechanism.VOID_UNI;
}
private static Long parseAccessTokenExpiresIn(String accessTokenExpiresInString) {
try {
return Long.valueOf(accessTokenExpiresInString);
} catch (NumberFormatException ex) {
final String error = """
Access token expires_in property in the session cookie must be a number, found %s
""".formatted(accessTokenExpiresInString);
LOG.debug(ex);
// Make this error message visible in the dev mode
throw new AuthenticationFailedException(error);
}
}
private static String getAccessTokenCookie(RoutingContext routingContext, OidcTenantConfig oidcConfig) {
final Map<String, Cookie> cookies = routingContext.request().cookieMap();
return OidcUtils.getSessionCookie(routingContext.data(), cookies, oidcConfig, OidcUtils.SESSION_AT_COOKIE_NAME,
getAccessTokenCookieName(oidcConfig));
}
private static ServerCookie getRefreshTokenCookie(RoutingContext routingContext, OidcTenantConfig oidcConfig) {
return (ServerCookie) routingContext.request().getCookie(getRefreshTokenCookieName(oidcConfig));
}
private static String getAccessTokenCookieName(OidcTenantConfig oidcConfig) {
String cookieSuffix = OidcUtils.getCookieSuffix(oidcConfig);
return OidcUtils.SESSION_AT_COOKIE_NAME + cookieSuffix;
}
private static String getRefreshTokenCookieName(OidcTenantConfig oidcConfig) {
String cookieSuffix = OidcUtils.getCookieSuffix(oidcConfig);
return OidcUtils.SESSION_RT_COOKIE_NAME + cookieSuffix;
}
private static String encodeScopes(OidcTenantConfig oidcConfig, String accessTokenScope) {
if (oidcConfig.tokenStateManager().encryptionRequired()) {
return accessTokenScope;
}
return OidcCommonUtils.base64UrlEncode(accessTokenScope.getBytes(StandardCharsets.UTF_8));
}
private static String decodeScopes(OidcTenantConfig oidcConfig, String accessTokenScope) {
if (oidcConfig.tokenStateManager().encryptionRequired()) {
return accessTokenScope;
}
return OidcCommonUtils.base64UrlDecode(accessTokenScope);
}
private static void createSessionAccessTokenCookie(RoutingContext routingContext, OidcTenantConfig oidcConfig,
AuthorizationCodeTokens tokens) {
String cookieName = getAccessTokenCookieName(oidcConfig);
StringBuilder sb = new StringBuilder();
// Add access token and its expires_in property
sb.append(tokens.getAccessToken())
.append(CodeAuthenticationMechanism.COOKIE_DELIM)
.append(tokens.getAccessTokenExpiresIn() != null ? tokens.getAccessTokenExpiresIn() : "")
.append(CodeAuthenticationMechanism.COOKIE_DELIM)
.append(tokens.getAccessTokenScope() != null ? encodeScopes(oidcConfig, tokens.getAccessTokenScope())
: "");
String cookieValue = OidcUtils.encryptToken(sb.toString(), routingContext, oidcConfig);
LOG.debugf("Session access token cookie length for the tenant %s is %d bytes.",
oidcConfig.tenantId().get(), cookieValue.length());
if (cookieValue.length() > OidcUtils.MAX_COOKIE_VALUE_LENGTH) {
LOG.debugf(
"Session access token cookie length for the tenant %s is greater than %d bytes."
+ " The cookie will be split to chunks to avoid browsers ignoring it."
+ " Alternative recommendations: 1. Set 'quarkus.oidc.token-state-manager.strategy=id-refresh-tokens' if you do not need to use the access token"
+ " as a source of roles or to request UserInfo or propagate it to the downstream services."
+ " 2. Decrease the encrypted session access token cookie's length by enabling a direct encryption algorithm"
+ " with 'quarkus.oidc.token-state-manager.encryption-algorithm=dir'."
+ " 3. Decrease the session access token cookie's length by disabling its encryption with 'quarkus.oidc.token-state-manager.encryption-required=false'"
+ " but only if it is considered to be safe in your application's network."
+ " 4. Use the 'quarkus-oidc-db-token-state-manager' extension or the 'quarkus-oidc-redis-token-state-manager' extension"
+ " or register a custom 'quarkus.oidc.TokenStateManager'"
+ " CDI bean with the alternative priority set to 1 and save the tokens on the server.",
oidcConfig.tenantId().get(), OidcUtils.MAX_COOKIE_VALUE_LENGTH);
OidcUtils.createChunkedCookie(routingContext, oidcConfig, cookieName, cookieValue,
routingContext.get(CodeAuthenticationMechanism.SESSION_MAX_AGE_PARAM));
} else {
// Create a `q_session_at` cookie.
OidcUtils.createSessionCookie(routingContext,
oidcConfig,
cookieName,
cookieValue,
routingContext.get(CodeAuthenticationMechanism.SESSION_MAX_AGE_PARAM));
}
}
}
|
DefaultTokenStateManager
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_1400/Issue1487.java
|
{
"start": 589,
"end": 1227
}
|
class ____ {
private Long _id;
private Long id;
@JSONField(name = "_id")
public long get_id() {
if (null != _id) {
return _id.longValue();
} else {
return 0L;
}
}
@JSONField(name = "_id")
public void set_id(Long _id) {
this._id = _id;
}
public long getId() {
if (null != id) {
return id.longValue();
} else {
return 0L;
}
}
public void setId(Long id) {
this.id = id;
}
}
}
|
Model
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/LoadBalancingKMSClientProvider.java
|
{
"start": 2918,
"end": 3071
}
|
interface ____<T> {
public T call(KMSClientProvider provider) throws IOException, Exception;
}
@SuppressWarnings("serial")
static
|
ProviderCallable
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/Metadata.java
|
{
"start": 37553,
"end": 38901
}
|
class ____ {
private static final LeaderAndEpoch NO_LEADER_OR_EPOCH = new LeaderAndEpoch(Optional.empty(), Optional.empty());
public final Optional<Node> leader;
public final Optional<Integer> epoch;
public LeaderAndEpoch(Optional<Node> leader, Optional<Integer> epoch) {
this.leader = Objects.requireNonNull(leader);
this.epoch = Objects.requireNonNull(epoch);
}
public static LeaderAndEpoch noLeaderOrEpoch() {
return NO_LEADER_OR_EPOCH;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
LeaderAndEpoch that = (LeaderAndEpoch) o;
if (!leader.equals(that.leader)) return false;
return epoch.equals(that.epoch);
}
@Override
public int hashCode() {
int result = leader.hashCode();
result = 31 * result + epoch.hashCode();
return result;
}
@Override
public String toString() {
return "LeaderAndEpoch{" +
"leader=" + leader +
", epoch=" + epoch.map(Number::toString).orElse("absent") +
'}';
}
}
public static
|
LeaderAndEpoch
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/util/ClassUtils.java
|
{
"start": 14188,
"end": 14315
}
|
class ____ present (including all of its
* superclasses and interfaces)
* @throws IllegalStateException if the corresponding
|
is
|
java
|
quarkusio__quarkus
|
extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcProcessor.java
|
{
"start": 12404,
"end": 39403
}
|
class ____ be ignored during bean discovery
transformationContext.transform().add(ADDITIONAL_BEAN).done();
}
}
}
});
builder.setComputingBeanArchiveIndex(index);
builder.setImmutableBeanArchiveIndex(beanArchiveIndex.getImmutableIndex());
builder.setApplicationIndex(combinedIndex.getIndex());
List<BeanDefiningAnnotation> beanDefiningAnnotations = additionalBeanDefiningAnnotations.stream()
.map((s) -> new BeanDefiningAnnotation(s.getName(), s.getDefaultScope())).collect(Collectors.toList());
beanDefiningAnnotations.add(new BeanDefiningAnnotation(ADDITIONAL_BEAN, null));
builder.setAdditionalBeanDefiningAnnotations(beanDefiningAnnotations);
builder.addResourceAnnotations(
resourceAnnotations.stream().map(ResourceAnnotationBuildItem::getName).collect(Collectors.toList()));
// register all annotation transformers
for (AnnotationsTransformerBuildItem transformer : annotationTransformers) {
builder.addAnnotationTransformation(transformer.getAnnotationTransformation());
}
// register all injection point transformers
for (InjectionPointTransformerBuildItem transformer : injectionPointTransformers) {
builder.addInjectionPointTransformer(transformer.getInjectionPointsTransformer());
}
// register all observer transformers
for (ObserverTransformerBuildItem transformer : observerTransformers) {
builder.addObserverTransformer(transformer.getInstance());
}
// register additional interceptor bindings
for (InterceptorBindingRegistrarBuildItem registrar : interceptorBindingRegistrars) {
builder.addInterceptorBindingRegistrar(registrar.getInterceptorBindingRegistrar());
}
// register additional qualifiers
for (QualifierRegistrarBuildItem registrar : qualifierRegistrars) {
builder.addQualifierRegistrar(registrar.getQualifierRegistrar());
}
// register additional stereotypes
for (StereotypeRegistrarBuildItem registrar : stereotypeRegistrars) {
builder.addStereotypeRegistrar(registrar.getStereotypeRegistrar());
}
builder.setRemoveUnusedBeans(arcConfig.shouldEnableBeanRemoval());
if (arcConfig.shouldOnlyKeepAppBeans()) {
builder.addRemovalExclusion(new AbstractCompositeApplicationClassesPredicate<BeanInfo>(
applicationClassesIndex, generatedClassNames, applicationClassPredicates, testClassPredicate) {
@Override
protected DotName getDotName(BeanInfo bean) {
return bean.getBeanClass();
}
});
}
builder.addRemovalExclusion(new BeanTypeExclusion(DotName.createSimple(TestApplicationClassPredicate.class.getName())));
for (AdditionalBeanBuildItem additionalBean : additionalBeans) {
if (!additionalBean.isRemovable()) {
for (String beanClass : additionalBean.getBeanClasses()) {
builder.addRemovalExclusion(new BeanClassNameExclusion(beanClass));
}
}
}
for (BeanDefiningAnnotationBuildItem annotation : additionalBeanDefiningAnnotations) {
if (!annotation.isRemovable()) {
builder.addRemovalExclusion(new BeanClassAnnotationExclusion(annotation.getName()));
}
}
// unremovable beans specified in application.properties
if (arcConfig.unremovableTypes().isPresent()) {
List<Predicate<ClassInfo>> classPredicates = initClassPredicates(arcConfig.unremovableTypes().get());
builder.addRemovalExclusion(new Predicate<BeanInfo>() {
@Override
public boolean test(BeanInfo beanInfo) {
ClassInfo beanClass = beanInfo.getImplClazz();
if (beanClass != null) {
// if any of the predicates match, we make the given bean unremovable
for (Predicate<ClassInfo> predicate : classPredicates) {
if (predicate.test(beanClass)) {
return true;
}
}
}
return false;
}
});
}
if (testClassPredicate.isPresent()) {
builder.addRemovalExclusion(new Predicate<BeanInfo>() {
@Override
public boolean test(BeanInfo bean) {
return testClassPredicate.get().getPredicate().test(bean.getBeanClass().toString());
}
});
}
builder.setTransformUnproxyableClasses(arcConfig.transformUnproxyableClasses());
builder.setTransformPrivateInjectedFields(arcConfig.transformPrivateInjectedFields());
builder.setFailOnInterceptedPrivateMethod(arcConfig.failOnInterceptedPrivateMethod());
builder.setJtaCapabilities(capabilities.isPresent(Capability.TRANSACTIONS));
builder.setGenerateSources(BootstrapDebug.debugSourcesDir() != null);
builder.setAllowMocking(launchModeBuildItem.getLaunchMode() == LaunchMode.TEST);
builder.setStrictCompatibility(arcConfig.strictCompatibility());
if (arcConfig.selectedAlternatives().isPresent()) {
final List<Predicate<ClassInfo>> selectedAlternatives = initClassPredicates(
arcConfig.selectedAlternatives().get());
builder.setAlternativePriorities(new AlternativePriorities() {
@Override
public Integer compute(AnnotationTarget target, Collection<StereotypeInfo> stereotypes) {
ClassInfo clazz;
switch (target.kind()) {
case CLASS:
clazz = target.asClass();
break;
case FIELD:
clazz = target.asField().declaringClass();
break;
case METHOD:
clazz = target.asMethod().declaringClass();
break;
default:
return null;
}
if (selectedAlternatives.stream().anyMatch(p -> p.test(clazz))) {
return Integer.MAX_VALUE;
}
if (!stereotypes.isEmpty()) {
for (StereotypeInfo stereotype : stereotypes) {
if (selectedAlternatives.stream().anyMatch(p -> p.test(stereotype.getTarget()))) {
return Integer.MAX_VALUE;
}
}
}
return null;
}
});
}
if (arcConfig.excludeTypes().isPresent()) {
for (Predicate<ClassInfo> predicate : initClassPredicates(
arcConfig.excludeTypes().get())) {
builder.addExcludeType(predicate);
}
}
if (!excludedTypes.isEmpty()) {
for (Predicate<ClassInfo> predicate : initClassPredicates(
excludedTypes.stream().map(ExcludedTypeBuildItem::getMatch).collect(Collectors.toList()))) {
builder.addExcludeType(predicate);
}
}
if (launchModeBuildItem.getLaunchMode() == LaunchMode.TEST) {
builder.addExcludeType(createQuarkusComponentTestExcludePredicate(index));
}
for (SuppressConditionGeneratorBuildItem generator : suppressConditionGenerators) {
builder.addSuppressConditionGenerator(generator.getGenerator());
}
builder.setBuildCompatibleExtensions(buildCompatibleExtensions.entrypoint);
builder.setOptimizeContexts(new Predicate<BeanDeployment>() {
@Override
public boolean test(BeanDeployment deployment) {
switch (arcConfig.optimizeContexts()) {
case TRUE:
return true;
case FALSE:
return false;
case AUTO:
// Optimize the context if there is less than 1000 beans in the app
// Note that removed beans are excluded
return deployment.getBeans().size() < 1000;
default:
throw new IllegalArgumentException("Unexpected value: " + arcConfig.optimizeContexts());
}
}
});
BeanProcessor beanProcessor = builder.build();
ContextRegistrar.RegistrationContext context = beanProcessor.registerCustomContexts();
return new ContextRegistrationPhaseBuildItem(context, beanProcessor);
}
// PHASE 2 - register all beans
@BuildStep
public BeanRegistrationPhaseBuildItem registerBeans(ContextRegistrationPhaseBuildItem contextRegistrationPhase,
List<ContextConfiguratorBuildItem> contextConfigurationRegistry,
BuildProducer<InterceptorResolverBuildItem> interceptorResolver,
BuildProducer<BeanDiscoveryFinishedBuildItem> beanDiscoveryFinished,
BuildProducer<TransformedAnnotationsBuildItem> transformedAnnotations,
BuildProducer<InvokerFactoryBuildItem> invokerFactory) {
for (ContextConfiguratorBuildItem contextConfigurator : contextConfigurationRegistry) {
for (ContextConfigurator value : contextConfigurator.getValues()) {
// Just make sure the configurator is processed
value.done();
}
}
BeanProcessor beanProcessor = contextRegistrationPhase.getBeanProcessor();
beanProcessor.registerScopes();
BeanRegistrar.RegistrationContext registrationContext = beanProcessor.registerBeans();
BeanDeployment beanDeployment = beanProcessor.getBeanDeployment();
interceptorResolver.produce(new InterceptorResolverBuildItem(beanDeployment));
beanDiscoveryFinished.produce(new BeanDiscoveryFinishedBuildItem(beanDeployment));
transformedAnnotations.produce(new TransformedAnnotationsBuildItem(beanDeployment));
invokerFactory.produce(new InvokerFactoryBuildItem(beanDeployment));
return new BeanRegistrationPhaseBuildItem(registrationContext, beanProcessor);
}
// PHASE 3 - register synthetic observers
@BuildStep
public ObserverRegistrationPhaseBuildItem registerSyntheticObservers(BeanRegistrationPhaseBuildItem beanRegistrationPhase,
List<BeanConfiguratorBuildItem> beanConfigurators,
BuildProducer<ReflectiveMethodBuildItem> reflectiveMethods,
BuildProducer<ReflectiveFieldBuildItem> reflectiveFields,
BuildProducer<UnremovableBeanBuildItem> unremovableBeans,
BuildProducer<ValidationPhaseBuildItem.ValidationErrorBuildItem> validationErrors) {
for (BeanConfiguratorBuildItem configurator : beanConfigurators) {
// Just make sure the configurator is processed
configurator.getValues().forEach(BeanConfigurator::done);
}
BeanProcessor beanProcessor = beanRegistrationPhase.getBeanProcessor();
beanProcessor.registerSyntheticInjectionPoints(beanRegistrationPhase.getContext());
// Initialize the type -> bean map
beanProcessor.getBeanDeployment().initBeanByTypeMap();
ObserverRegistrar.RegistrationContext registrationContext = beanProcessor.registerSyntheticObservers();
return new ObserverRegistrationPhaseBuildItem(registrationContext, beanProcessor);
}
// PHASE 4 - initialize and validate the bean deployment
@BuildStep
public ValidationPhaseBuildItem validate(ObserverRegistrationPhaseBuildItem observerRegistrationPhase,
List<ObserverConfiguratorBuildItem> observerConfigurationRegistry,
List<UnremovableBeanBuildItem> unremovableBeans,
BuildProducer<BytecodeTransformerBuildItem> bytecodeTransformer,
BuildProducer<SynthesisFinishedBuildItem> synthesisFinished) {
for (ObserverConfiguratorBuildItem configurator : observerConfigurationRegistry) {
// Just make sure the configurator is processed
configurator.getValues().forEach(ObserverConfigurator::done);
}
BeanProcessor beanProcessor = observerRegistrationPhase.getBeanProcessor();
synthesisFinished.produce(new SynthesisFinishedBuildItem(beanProcessor.getBeanDeployment()));
Consumer<BytecodeTransformer> bytecodeTransformerConsumer = new BytecodeTransformerConsumer(bytecodeTransformer);
beanProcessor.initialize(bytecodeTransformerConsumer,
unremovableBeans.stream().map(UnremovableBeanBuildItem::getPredicate).collect(Collectors.toList()));
BeanDeploymentValidator.ValidationContext validationContext = beanProcessor.validate(bytecodeTransformerConsumer);
return new ValidationPhaseBuildItem(validationContext, beanProcessor);
}
// PHASE 5 - generate resources
@BuildStep
@Produce(ResourcesGeneratedPhaseBuildItem.class)
public void generateResources(ArcConfig config,
ValidationPhaseBuildItem validationPhase,
List<ValidationPhaseBuildItem.ValidationErrorBuildItem> validationErrors,
BuildProducer<ReflectiveClassBuildItem> reflectiveClasses,
BuildProducer<ReflectiveMethodBuildItem> reflectiveMethods,
BuildProducer<ReflectiveFieldBuildItem> reflectiveFields,
BuildProducer<GeneratedClassBuildItem> generatedClass,
LiveReloadBuildItem liveReloadBuildItem,
BuildProducer<GeneratedResourceBuildItem> generatedResource,
BuildProducer<BytecodeTransformerBuildItem> bytecodeTransformer,
List<ReflectiveBeanClassBuildItem> reflectiveBeanClasses,
ExecutorService buildExecutor) throws Exception {
for (ValidationErrorBuildItem validationError : validationErrors) {
for (Throwable error : validationError.getValues()) {
validationPhase.getContext().addDeploymentProblem(error);
}
}
BeanProcessor beanProcessor = validationPhase.getBeanProcessor();
beanProcessor.processValidationErrors(validationPhase.getContext());
ExistingClasses existingClasses = liveReloadBuildItem.getContextObject(ExistingClasses.class);
if (existingClasses == null || !liveReloadBuildItem.isLiveReload()) {
// Reset the data if there is no context object or if the first start was unsuccessful
existingClasses = new ExistingClasses();
liveReloadBuildItem.setContextObject(ExistingClasses.class, existingClasses);
}
Consumer<BytecodeTransformer> bytecodeTransformerConsumer = new BytecodeTransformerConsumer(bytecodeTransformer);
Set<DotName> reflectiveBeanClassesNames = reflectiveBeanClasses.stream().map(ReflectiveBeanClassBuildItem::getClassName)
.collect(Collectors.toSet());
boolean parallelResourceGeneration = Boolean
.parseBoolean(System.getProperty("quarkus.arc.parallel-resource-generation", "true"));
long start = System.nanoTime();
ExecutorService executor = parallelResourceGeneration ? buildExecutor : null;
List<ResourceOutput.Resource> resources;
resources = beanProcessor.generateResources(new ReflectionRegistration() {
@Override
public void registerMethod(String declaringClass, String name, String... params) {
reflectiveMethods.produce(new ReflectiveMethodBuildItem(getClass().getName(), declaringClass, name, params));
}
@Override
public void registerMethod(MethodInfo methodInfo) {
reflectiveMethods.produce(new ReflectiveMethodBuildItem(getClass().getName(), methodInfo));
}
@Override
public void registerField(FieldInfo fieldInfo) {
reflectiveFields.produce(new ReflectiveFieldBuildItem(getClass().getName(), fieldInfo));
}
@Override
public void registerClientProxy(DotName beanClassName, String clientProxyName) {
if (reflectiveBeanClassesNames.contains(beanClassName)) {
// Fields should never be registered for client proxies
reflectiveClasses
.produce(ReflectiveClassBuildItem.builder(clientProxyName)
.reason(getClass().getName())
.methods().build());
}
}
@Override
public void registerSubclass(DotName beanClassName, String subclassName) {
if (reflectiveBeanClassesNames.contains(beanClassName)) {
// Fields should never be registered for subclasses
reflectiveClasses
.produce(ReflectiveClassBuildItem.builder(subclassName)
.reason(getClass().getName())
.methods().build());
}
}
}, existingClasses.existingClasses, bytecodeTransformerConsumer,
config.shouldEnableBeanRemoval() && config.detectUnusedFalsePositives(), executor);
for (ResourceOutput.Resource resource : resources) {
switch (resource.getType()) {
case JAVA_CLASS:
LOGGER.debugf("Add %s class: %s", (resource.isApplicationClass() ? "APP" : "FWK"),
resource.getFullyQualifiedName());
generatedClass.produce(new GeneratedClassBuildItem(resource.isApplicationClass(), resource.getName(),
resource.getData(), resource.getSource()));
if (!resource.isApplicationClass()) {
existingClasses.existingClasses.add(resource.getName());
}
break;
case SERVICE_PROVIDER:
generatedResource.produce(
new GeneratedResourceBuildItem("META-INF/services/" + resource.getName(), resource.getData()));
break;
default:
break;
}
}
LOGGER.debugf("Generated %s resources in %s ms", resources.size(),
TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start));
// Register all qualifiers for reflection to support type-safe resolution at runtime in native image
for (ClassInfo qualifier : beanProcessor.getBeanDeployment().getQualifiers()) {
reflectiveClasses
.produce(ReflectiveClassBuildItem.builder(qualifier.name().toString())
.reason(getClass().getName())
.methods().build());
}
// Register all interceptor bindings for reflection so that AnnotationLiteral.equals() works in a native image
for (ClassInfo binding : beanProcessor.getBeanDeployment().getInterceptorBindings()) {
reflectiveClasses
.produce(ReflectiveClassBuildItem.builder(binding.name().toString())
.reason(getClass().getName())
.methods().build());
}
}
// PHASE 6 - initialize the container
@BuildStep
@Consume(ResourcesGeneratedPhaseBuildItem.class)
@Record(STATIC_INIT)
public ArcContainerBuildItem initializeContainer(ArcConfig config, ArcRecorder recorder,
ShutdownContextBuildItem shutdown, Optional<CurrentContextFactoryBuildItem> currentContextFactory,
LaunchModeBuildItem launchMode)
throws Exception {
ArcContainer container = recorder.initContainer(shutdown,
currentContextFactory.isPresent() ? currentContextFactory.get().getFactory() : null,
config.strictCompatibility(), launchMode.isTest());
return new ArcContainerBuildItem(container);
}
@BuildStep
@Record(STATIC_INIT)
public PreBeanContainerBuildItem notifyBeanContainerListeners(ArcContainerBuildItem container,
List<BeanContainerListenerBuildItem> beanContainerListenerBuildItems, ArcRecorder recorder) throws Exception {
BeanContainer beanContainer = recorder.initBeanContainer(container.getContainer(),
beanContainerListenerBuildItems.stream().map(BeanContainerListenerBuildItem::getBeanContainerListener)
.collect(Collectors.toList()));
return new PreBeanContainerBuildItem(beanContainer);
}
@Record(RUNTIME_INIT)
@BuildStep
public void signalBeanContainerReady(JvmStartupOptimizerArchiveRecorder recorder, PreBeanContainerBuildItem bi,
Optional<JvmStartupOptimizerArchiveRequestedBuildItem> jvmStartupOptimizerArchiveRequested,
BuildProducer<BeanContainerBuildItem> beanContainerProducer) {
if (jvmStartupOptimizerArchiveRequested.isPresent()) {
recorder.controlGenerationAndExit();
}
beanContainerProducer.produce(new BeanContainerBuildItem(bi.getValue()));
}
@BuildStep
List<AdditionalApplicationArchiveMarkerBuildItem> marker() {
return Arrays.asList(new AdditionalApplicationArchiveMarkerBuildItem("META-INF/beans.xml"),
new AdditionalApplicationArchiveMarkerBuildItem("META-INF/services/jakarta.enterprise.inject.spi.Extension"),
new AdditionalApplicationArchiveMarkerBuildItem(
"META-INF/services/jakarta.enterprise.inject.build.compatible.spi.BuildCompatibleExtension"));
}
@BuildStep
@Record(value = RUNTIME_INIT)
void setupExecutor(ExecutorBuildItem executor, ArcRecorder recorder) {
recorder.initExecutor(executor.getExecutorProxy());
}
@BuildStep
AdditionalBeanBuildItem launchMode() {
return new AdditionalBeanBuildItem(LaunchModeProducer.class);
}
@BuildStep
AdditionalBeanBuildItem loggerProducer() {
return new AdditionalBeanBuildItem(LoggerProducer.class);
}
@BuildStep
CustomScopeAnnotationsBuildItem exposeCustomScopeNames(List<CustomScopeBuildItem> customScopes) {
Set<DotName> names = new HashSet<>();
for (CustomScopeBuildItem customScope : customScopes) {
names.add(customScope.getAnnotationName());
}
return new CustomScopeAnnotationsBuildItem(names);
}
private List<Predicate<ClassInfo>> initClassPredicates(List<String> types) {
final String packMatch = ".*";
final String packStarts = ".**";
List<Predicate<ClassInfo>> predicates = new ArrayList<>();
for (String val : types) {
if (val.endsWith(packMatch)) {
// Package matches
final String pack = val.substring(0, val.length() - packMatch.length());
predicates.add(new Predicate<ClassInfo>() {
@Override
public boolean test(ClassInfo c) {
return DotNames.packageName(c.name()).equals(pack);
}
});
} else if (val.endsWith(packStarts)) {
// Package starts with
final String prefix = val.substring(0, val.length() - packStarts.length());
predicates.add(new Predicate<ClassInfo>() {
@Override
public boolean test(ClassInfo c) {
return DotNames.packageName(c.name()).startsWith(prefix);
}
});
} else if (val.contains(".")) {
// Fully qualified name matches
predicates.add(new Predicate<ClassInfo>() {
@Override
public boolean test(ClassInfo c) {
return c.name().toString().equals(val);
}
});
} else {
// Simple name matches
predicates.add(new Predicate<ClassInfo>() {
@Override
public boolean test(ClassInfo c) {
return DotNames.simpleName(c).equals(val);
}
});
}
}
return predicates;
}
@BuildStep
BeanDefiningAnnotationBuildItem quarkusMain() {
return new BeanDefiningAnnotationBuildItem(DotName.createSimple(QuarkusMain.class.getName()), DotNames.SINGLETON);
}
@BuildStep
UnremovableBeanBuildItem unremovableAsyncObserverExceptionHandlers() {
// Make all classes implementing AsyncObserverExceptionHandler unremovable
return UnremovableBeanBuildItem.beanTypes(Set.of(ASYNC_OBSERVER_EXCEPTION_HANDLER));
}
@BuildStep
void validateAsyncObserverExceptionHandlers(ValidationPhaseBuildItem validationPhase,
BuildProducer<ValidationErrorBuildItem> errors) {
BeanResolver resolver = validationPhase.getBeanProcessor().getBeanDeployment().getBeanResolver();
try {
BeanInfo bean = resolver.resolveAmbiguity(
resolver.resolveBeans(Type.create(ASYNC_OBSERVER_EXCEPTION_HANDLER, org.jboss.jandex.Type.Kind.CLASS)));
if (bean == null) {
// This should never happen because of the default impl
errors.produce(new ValidationErrorBuildItem(
new UnsatisfiedResolutionException("AsyncObserverExceptionHandler bean not found")));
}
} catch (AmbiguousResolutionException e) {
errors.produce(new ValidationErrorBuildItem(e));
}
}
@BuildStep
void registerContextPropagation(ArcConfig config, BuildProducer<ThreadContextProviderBuildItem> threadContextProvider) {
if (config.contextPropagation().enabled()) {
threadContextProvider.produce(new ThreadContextProviderBuildItem(ArcContextProvider.class));
}
}
@BuildStep
void registerPreShutdownListener(ShutdownBuildTimeConfig shutdownBuildTimeConfig,
BuildProducer<ShutdownListenerBuildItem> shutdownListenerBuildItemBuildProducer) {
if (shutdownBuildTimeConfig.delayEnabled()) {
shutdownListenerBuildItemBuildProducer.produce(new ShutdownListenerBuildItem(new ArcShutdownListener()));
}
}
Predicate<ClassInfo> createQuarkusComponentTestExcludePredicate(IndexView index) {
// Exlude static nested classed declared on a QuarkusComponentTest:
// 1. Test
|
would
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/vectors/DiversifyingNearestChildrenKnnCollector.java
|
{
"start": 5555,
"end": 12608
}
|
class ____ {
private final int maxSize;
private ParentChildScore[] heapNodes;
private int size = 0;
// Used to keep track of nodeId -> positionInHeap. This way when new scores are added for a
// node, the heap can be
// updated efficiently.
private final IntIntHashMap nodeIdHeapIndex;
private boolean closed = false;
NodeIdCachingHeap(int maxSize) {
final int heapSize;
if (maxSize < 1 || maxSize >= ArrayUtil.MAX_ARRAY_LENGTH) {
// Throw exception to prevent confusing OOME:
throw new IllegalArgumentException("maxSize must be > 0 and < " + (ArrayUtil.MAX_ARRAY_LENGTH - 1) + "; got: " + maxSize);
}
// NOTE: we add +1 because all access to heap is 1-based not 0-based. heap[0] is unused.
heapSize = maxSize + 1;
this.maxSize = maxSize;
this.nodeIdHeapIndex = new IntIntHashMap(maxSize);
this.heapNodes = new ParentChildScore[heapSize];
}
public final int topNode() {
if (size == 0) {
return Integer.MAX_VALUE;
}
return heapNodes[1].child;
}
public final float topScore() {
if (size == 0) {
return Float.NEGATIVE_INFINITY;
}
return heapNodes[1].score;
}
private void pushIn(int nodeId, int parentId, float score) {
size++;
if (size == heapNodes.length) {
heapNodes = ArrayUtil.grow(heapNodes, (size * 3 + 1) / 2);
}
heapNodes[size] = new ParentChildScore(nodeId, parentId, score);
upHeap(size);
}
private void updateElement(int heapIndex, int nodeId, int parentId, float score) {
ParentChildScore oldValue = heapNodes[heapIndex];
assert oldValue.parent == parentId : "attempted to update heap element value but with a different node id";
float oldScore = heapNodes[heapIndex].score;
heapNodes[heapIndex] = new ParentChildScore(nodeId, parentId, score);
// Since we are a min heap, if the new value is less, we need to make sure to bubble it up
if (score < oldScore) {
upHeap(heapIndex);
} else {
downHeap(heapIndex);
}
}
/**
* Adds a value to a heap in log(size) time. If the number of values would exceed the heap's
* maxSize, the least value is discarded.
*
* <p>If `node` already exists in the heap, this will return true if the stored score is updated
* OR the heap is not currently at the maxSize.
*
* @return whether the value was added or updated
*/
public boolean insertWithOverflow(int node, int parentNode, float score) {
if (closed) {
throw new IllegalStateException();
}
int index = nodeIdHeapIndex.indexOf(parentNode);
if (index >= 0) {
int previousNodeIndex = nodeIdHeapIndex.indexGet(index);
if (heapNodes[previousNodeIndex].score < score) {
updateElement(previousNodeIndex, node, parentNode, score);
return true;
}
return false;
}
if (size >= maxSize) {
if (score < heapNodes[1].score || (score == heapNodes[1].score && node > heapNodes[1].child)) {
return false;
}
updateTop(node, parentNode, score);
return true;
}
pushIn(node, parentNode, score);
return true;
}
private void popToDrain() {
closed = true;
if (size > 0) {
heapNodes[1] = heapNodes[size]; // move last to first
size--;
downHeapWithoutCacheUpdate(1); // adjust heap
} else {
throw new IllegalStateException("The heap is empty");
}
}
private void updateTop(int nodeId, int parentId, float score) {
nodeIdHeapIndex.remove(heapNodes[1].parent);
heapNodes[1] = new ParentChildScore(nodeId, parentId, score);
downHeap(1);
}
/** Returns the number of elements currently stored in the PriorityQueue. */
public final int size() {
return size;
}
private void upHeap(int origPos) {
int i = origPos;
ParentChildScore bottomNode = heapNodes[i];
int j = i >>> 1;
while (j > 0 && bottomNode.compareTo(heapNodes[j]) < 0) {
heapNodes[i] = heapNodes[j];
nodeIdHeapIndex.put(heapNodes[i].parent, i);
i = j;
j = j >>> 1;
}
nodeIdHeapIndex.put(bottomNode.parent, i);
heapNodes[i] = bottomNode;
}
private void downHeap(int i) {
ParentChildScore node = heapNodes[i];
int j = i << 1; // find smaller child
int k = j + 1;
if (k <= size && heapNodes[k].compareTo(heapNodes[j]) < 0) {
j = k;
}
while (j <= size && heapNodes[j].compareTo(node) < 0) {
heapNodes[i] = heapNodes[j];
nodeIdHeapIndex.put(heapNodes[i].parent, i);
i = j;
j = i << 1;
k = j + 1;
if (k <= size && heapNodes[k].compareTo(heapNodes[j]) < 0) {
j = k;
}
}
nodeIdHeapIndex.put(node.parent, i);
heapNodes[i] = node; // install saved value
}
private void downHeapWithoutCacheUpdate(int i) {
ParentChildScore node = heapNodes[i];
int j = i << 1; // find smaller child
int k = j + 1;
if (k <= size && heapNodes[k].compareTo(heapNodes[j]) < 0) {
j = k;
}
while (j <= size && heapNodes[j].compareTo(node) < 0) {
heapNodes[i] = heapNodes[j];
i = j;
j = i << 1;
k = j + 1;
if (k <= size && heapNodes[k].compareTo(heapNodes[j]) < 0) {
j = k;
}
}
heapNodes[i] = node; // install saved value
}
}
/** Keeps track of child node, parent node, and the stored score. */
private record ParentChildScore(int child, int parent, float score) implements Comparable<ParentChildScore> {
@Override
public int compareTo(ParentChildScore o) {
int fc = Float.compare(score, o.score);
if (fc == 0) {
// lower numbers are the tiebreakers, lower ids are preferred.
return Integer.compare(o.child, child);
}
return fc;
}
}
}
|
NodeIdCachingHeap
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/steps/ConfigGenerationBuildStep.java
|
{
"start": 18544,
"end": 51788
}
|
class ____ instantiates MP Config and holds all the config objects
*/
@BuildStep
void generateConfigClass(
ConfigurationBuildItem configItem,
LaunchModeBuildItem launchModeBuildItem,
BuildProducer<GeneratedClassBuildItem> generatedClass,
LiveReloadBuildItem liveReloadBuildItem) {
// So it only reports during the build, because it is very likely that the property is available in runtime
// and, it will be caught by the RuntimeConfig and log double warnings
if (!launchModeBuildItem.getLaunchMode().isDevOrTest()) {
ConfigDiagnostic.unknownProperties(configItem.getReadResult().getUnknownBuildProperties());
}
// TODO - Test live reload with ConfigSource
if (liveReloadBuildItem.isLiveReload()) {
return;
}
RunTimeConfigurationGenerator.GenerateOperation
.builder()
.setBuildTimeReadResult(configItem.getReadResult())
.setClassOutput(new GeneratedClassGizmoAdaptor(generatedClass, false))
.setLiveReloadPossible(launchModeBuildItem.getLaunchMode() == LaunchMode.DEVELOPMENT
|| launchModeBuildItem.isAuxiliaryApplication())
.build()
.run();
}
@BuildStep
public void suppressNonRuntimeConfigChanged(
BuildProducer<SuppressNonRuntimeConfigChangedWarningBuildItem> suppressNonRuntimeConfigChanged) {
suppressNonRuntimeConfigChanged.produce(new SuppressNonRuntimeConfigChangedWarningBuildItem("quarkus.profile"));
suppressNonRuntimeConfigChanged.produce(new SuppressNonRuntimeConfigChangedWarningBuildItem("quarkus.default-locale"));
suppressNonRuntimeConfigChanged.produce(new SuppressNonRuntimeConfigChangedWarningBuildItem("quarkus.locales"));
suppressNonRuntimeConfigChanged.produce(new SuppressNonRuntimeConfigChangedWarningBuildItem("quarkus.test.arg-line"));
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
public void releaseConfigOnShutdown(ShutdownContextBuildItem shutdownContext,
ConfigRecorder recorder) {
recorder.releaseConfig(shutdownContext);
}
/**
* Warns if build time config properties have been changed at runtime.
*/
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
public void checkForBuildTimeConfigChange(
RecorderContext recorderContext,
ConfigRecorder recorder,
ConfigurationBuildItem configItem,
List<SuppressNonRuntimeConfigChangedWarningBuildItem> suppressNonRuntimeConfigChangedWarningItems) {
recorderContext.registerSubstitution(io.smallrye.config.ConfigValue.class, QuarkusConfigValue.class,
QuarkusConfigValue.Substitution.class);
Set<String> excludedConfigKeys = new HashSet<>(suppressNonRuntimeConfigChangedWarningItems.size());
for (SuppressNonRuntimeConfigChangedWarningBuildItem item : suppressNonRuntimeConfigChangedWarningItems) {
excludedConfigKeys.add(item.getConfigKey());
}
List<ConfigValue> values = new ArrayList<>();
for (Map.Entry<String, ConfigValue> entry : configItem.getReadResult().getBuildTimeRunTimeValues().entrySet()) {
if (excludedConfigKeys.contains(entry.getKey())) {
continue;
}
ConfigValue value = entry.getValue();
values.add(ConfigValue.builder()
.withName(value.getName())
.withValue(value.getValue())
.withConfigSourceOrdinal(value.getConfigSourceOrdinal())
.build());
}
recorder.handleConfigChange(values);
}
@BuildStep(onlyIfNot = { IsProduction.class })
public void setupConfigOverride(
BuildProducer<GeneratedClassBuildItem> generatedClassBuildItemBuildProducer) {
ClassOutput classOutput = new GeneratedClassGizmoAdaptor(generatedClassBuildItemBuildProducer, true);
try (ClassCreator clazz = ClassCreator.builder().classOutput(classOutput)
.className(RuntimeOverrideConfigSource.GENERATED_CLASS_NAME).build()) {
clazz.getFieldCreator(RuntimeOverrideConfigSource.FIELD_NAME, Map.class)
.setModifiers(Modifier.STATIC | Modifier.PUBLIC | Modifier.VOLATILE);
}
}
@BuildStep
public void watchConfigFiles(BuildProducer<HotDeploymentWatchedFileBuildItem> watchedFiles) {
List<String> configWatchedFiles = new ArrayList<>();
SmallRyeConfig config = ConfigProvider.getConfig().unwrap(SmallRyeConfig.class);
String userDir = System.getProperty("user.dir");
// Main files
configWatchedFiles.add("application.properties");
configWatchedFiles.add("META-INF/microprofile-config.properties");
configWatchedFiles.add(Paths.get(userDir, ".env").toAbsolutePath().toString());
configWatchedFiles.add(Paths.get(userDir, "config", "application.properties").toAbsolutePath().toString());
// Profiles
for (String profile : config.getProfiles()) {
configWatchedFiles.add(String.format("application-%s.properties", profile));
configWatchedFiles.add(String.format("META-INF/microprofile-config-%s.properties", profile));
configWatchedFiles.add(Paths.get(userDir, String.format(".env-%s", profile)).toAbsolutePath().toString());
configWatchedFiles.add(Paths.get(userDir, "config", String.format("application-%s.properties", profile))
.toAbsolutePath().toString());
}
Optional<List<URI>> optionalLocations = config.getOptionalValues(SMALLRYE_CONFIG_LOCATIONS, URI.class);
optionalLocations.ifPresent(locations -> {
for (URI location : locations) {
Path path = location.getScheme() != null && location.getScheme().equals("file") ? Paths.get(location)
: Paths.get(location.getPath());
if (Files.isRegularFile(path)) {
configWatchedFiles.add(path.toAbsolutePath().toString());
for (String profile : config.getProfiles()) {
configWatchedFiles.add(appendProfileToFilename(path.toAbsolutePath(), profile));
}
} else if (Files.isDirectory(path)) {
try (DirectoryStream<Path> files = Files.newDirectoryStream(path, Files::isRegularFile)) {
for (Path file : files) {
configWatchedFiles.add(file.toAbsolutePath().toString());
}
} catch (IOException e) {
// Ignore
}
}
}
});
for (String configWatchedFile : configWatchedFiles) {
watchedFiles.produce(new HotDeploymentWatchedFileBuildItem(configWatchedFile));
}
}
@BuildStep
@Record(RUNTIME_INIT)
void reportDeprecatedMappingProperties(ConfigRecorder configRecorder, ConfigurationBuildItem configBuildItem) {
// Build Time
List<ConfigClass> visibleBuildTimeMappings = new ArrayList<>();
visibleBuildTimeMappings.addAll(configBuildItem.getReadResult().getBuildTimeMappings());
visibleBuildTimeMappings.addAll(configBuildItem.getReadResult().getBuildTimeRunTimeMappings());
Map<String, String> deprecatedProperties = deprecatedProperties(visibleBuildTimeMappings);
ConfigDiagnostic.deprecatedProperties(deprecatedProperties);
// Runtime
Map<String, String> runtimeDeprecatedProperties = deprecatedProperties(
configBuildItem.getReadResult().getRunTimeMappings());
configRecorder.deprecatedProperties(runtimeDeprecatedProperties);
}
private static Map<String, String> deprecatedProperties(List<ConfigClass> configClasses) {
Map<String, String> deprecatedProperties = new HashMap<>();
for (ConfigClass buildTimeMapping : configClasses) {
Map<String, ConfigMappingInterface.Property> properties = ConfigMappings.getProperties(buildTimeMapping);
for (Map.Entry<String, ConfigMappingInterface.Property> entry : properties.entrySet()) {
Deprecated deprecated = entry.getValue().getMethod().getAnnotation(Deprecated.class);
if (deprecated != null) {
// TODO - add javadoc message
deprecatedProperties.put(entry.getKey(), null);
}
}
}
return deprecatedProperties;
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
void unknownConfigFiles(
ApplicationArchivesBuildItem applicationArchives,
LaunchModeBuildItem launchModeBuildItem,
ConfigRecorder configRecorder) throws Exception {
Set<Path> buildTimeFiles = new HashSet<>();
PathCollection rootDirectories = applicationArchives.getRootArchive().getRootDirectories();
for (Path directory : rootDirectories) {
buildTimeFiles.addAll(ConfigDiagnostic.configFiles(directory));
}
buildTimeFiles.addAll(ConfigDiagnostic.configFilesFromLocations());
// Report always at build time since config folder and locations may differ from build to runtime
ConfigDiagnostic.unknownConfigFiles(buildTimeFiles);
// No need to include the application files, because they don't change
if (!launchModeBuildItem.getLaunchMode().isDevOrTest()) {
configRecorder.unknownConfigFiles();
}
}
@BuildStep(onlyIf = NativeOrNativeSourcesBuild.class)
@Record(ExecutionTime.RUNTIME_INIT)
void warnDifferentProfileUsedBetweenBuildAndRunTime(ConfigRecorder configRecorder) {
SmallRyeConfig config = ConfigProvider.getConfig().unwrap(SmallRyeConfig.class);
configRecorder.handleNativeProfileChange(config.getProfiles());
}
@BuildStep(onlyIf = IsProduction.class)
void persistReadConfigOptions(
BuildProducer<ArtifactResultBuildItem> dummy,
QuarkusBuildCloseablesBuildItem closeables,
BuildSystemTargetBuildItem buildSystemTargetBuildItem,
ConfigurationBuildItem configBuildItem,
ConfigTrackingConfig configTrackingConfig) {
var readOptionsProvider = configBuildItem.getReadResult().getReadOptionsProvider();
if (readOptionsProvider != null) {
closeables.add(new Closeable() {
@Override
public void close() throws IOException {
ConfigTrackingWriter.write(
readOptionsProvider.getReadOptions(),
configTrackingConfig,
configBuildItem.getReadResult(),
ConfigUtils.getProfiles(),
buildSystemTargetBuildItem.getOutputDirectory());
}
});
}
}
private String appendProfileToFilename(Path path, String activeProfile) {
String pathWithoutExtension = getPathWithoutExtension(path);
return String.format("%s-%s.%s", pathWithoutExtension, activeProfile, getFileExtension(path));
}
private static String getFileExtension(Path path) {
Objects.requireNonNull(path, "path should not be null");
String fileName = path.getFileName().toString();
int dotIndex = fileName.lastIndexOf('.');
return (dotIndex == -1) ? "" : fileName.substring(dotIndex + 1);
}
private static String getPathWithoutExtension(Path path) {
Objects.requireNonNull(path, "path should not be null");
String fileName = path.toString();
int dotIndex = fileName.lastIndexOf('.');
return (dotIndex == -1) ? fileName : fileName.substring(0, dotIndex);
}
private static final MethodDescriptor NEW_BUILDER = MethodDescriptor.ofConstructor(SmallRyeConfigBuilder.class);
private static final MethodDescriptor BUILD = MethodDescriptor.ofMethod(SmallRyeConfigBuilder.class,
"build", SmallRyeConfig.class);
private static final MethodDescriptor GET_CONFIG_MAPPING = MethodDescriptor.ofMethod(SmallRyeConfig.class,
"getConfigMapping", Object.class, Class.class, String.class);
private static final MethodDescriptor BUILDER_CUSTOMIZER = MethodDescriptor.ofMethod(SmallRyeConfigBuilderCustomizer.class,
"configBuilder",
void.class, SmallRyeConfigBuilder.class);
private static final MethodDescriptor WITH_DEFAULTS = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withDefaultValues",
void.class, SmallRyeConfigBuilder.class, Map.class);
private static final MethodDescriptor WITH_RUNTIME_VALUES = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withRuntimeValues",
void.class, SmallRyeConfigBuilder.class, Map.class);
private static final MethodDescriptor WITH_CONVERTER = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withConverter",
void.class, SmallRyeConfigBuilder.class, String.class, int.class, Converter.class);
private static final MethodDescriptor WITH_INTERCEPTOR = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withInterceptor",
void.class, SmallRyeConfigBuilder.class, ConfigSourceInterceptor.class);
private static final MethodDescriptor WITH_INTERCEPTOR_FACTORY = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withInterceptorFactory",
void.class, SmallRyeConfigBuilder.class, ConfigSourceInterceptorFactory.class);
private static final MethodDescriptor WITH_SOURCE = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withSource",
void.class, SmallRyeConfigBuilder.class, ConfigSource.class);
private static final MethodDescriptor WITH_SOURCE_PROVIDER = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withSource",
void.class, SmallRyeConfigBuilder.class, ConfigSourceProvider.class);
private static final MethodDescriptor WITH_SOURCE_FACTORY = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withSource",
void.class, SmallRyeConfigBuilder.class, ConfigSourceFactory.class);
private static final MethodDescriptor WITH_SECRET_HANDLER = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withSecretKeyHandler",
void.class, SmallRyeConfigBuilder.class, SecretKeysHandler.class);
private static final MethodDescriptor WITH_SECRET_HANDLER_FACTORY = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withSecretKeyHandler",
void.class, SmallRyeConfigBuilder.class, SecretKeysHandlerFactory.class);
private static final MethodDescriptor WITH_MAPPING = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withMapping",
void.class, SmallRyeConfigBuilder.class, ConfigClass.class);
private static final MethodDescriptor WITH_MAPPING_INSTANCE = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withMappingInstance",
void.class, SmallRyeConfigBuilder.class, ConfigClass.class, Object.class);
private static final MethodDescriptor WITH_MAPPING_INSTANCE_FROM_CONFIG = MethodDescriptor.ofMethod(
AbstractConfigBuilder.class,
"withMappingInstance",
void.class, SmallRyeConfigBuilder.class, ConfigClass.class);
private static final MethodDescriptor WITH_MAPPING_IGNORE = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withMappingIgnore",
void.class, SmallRyeConfigBuilder.class, String.class);
private static final MethodDescriptor WITH_CUSTOMIZER = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withCustomizer",
void.class, SmallRyeConfigBuilder.class, SmallRyeConfigBuilderCustomizer.class);
private static final MethodDescriptor WITH_BUILDER = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"withBuilder",
void.class, SmallRyeConfigBuilder.class, ConfigBuilder.class);
private static final MethodDescriptor CONFIG_CLASS = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"configClass",
ConfigClass.class, String.class, String.class);
private static final MethodDescriptor ENSURE_LOADED = MethodDescriptor.ofMethod(AbstractConfigBuilder.class,
"ensureLoaded",
void.class, String.class);
private static final MethodDescriptor MAP_NEW = MethodDescriptor.ofConstructor(HashMap.class, int.class);
private static final MethodDescriptor MAP_PUT = MethodDescriptor.ofMethod(HashMap.class,
"put",
Object.class, Object.class, Object.class);
private static final DotName CONVERTER_NAME = DotName.createSimple(Converter.class.getName());
private static final DotName PRIORITY_NAME = DotName.createSimple(Priority.class.getName());
private static Map<Object, FieldDescriptor> generateSharedConfig(
BuildProducer<GeneratedClassBuildItem> generatedClass,
CombinedIndexBuildItem combinedIndex,
Set<String> converters,
Set<ConfigClass> mappings,
Set<ConfigClass> staticMappings) {
Map<Object, FieldDescriptor> fields = new HashMap<>();
try (ClassCreator classCreator = ClassCreator.builder()
.classOutput(new GeneratedClassGizmoAdaptor(generatedClass, true))
.className("io.quarkus.runtime.generated.SharedConfig")
.superClass(AbstractConfigBuilder.class)
.setFinal(true)
.build()) {
MethodCreator clinit = classCreator.getMethodCreator("<clinit>", void.class);
clinit.setModifiers(ACC_STATIC);
int converterIndex = 0;
for (String converter : converters) {
String fieldName = "conv$" + converterIndex++;
FieldDescriptor converterField = classCreator.getFieldCreator(fieldName, Converter.class)
.setModifiers(ACC_STATIC).getFieldDescriptor();
clinit.writeStaticField(converterField, clinit.newInstance(MethodDescriptor.ofConstructor(converter)));
fields.put(converter, converterField);
}
int configClassIndex = 0;
for (ConfigClass mapping : mappings) {
FieldDescriptor configClassField = classCreator
.getFieldCreator("configClass$" + configClassIndex++, ConfigClass.class)
.setModifiers(ACC_STATIC).getFieldDescriptor();
clinit.writeStaticField(configClassField, clinit.invokeStaticMethod(CONFIG_CLASS,
clinit.load(mapping.getType().getName()), clinit.load(mapping.getPrefix())));
// Cache implementation types of nested elements
List<ConfigMappingMetadata> configMappingsMetadata = ConfigMappingLoader
.getConfigMappingsMetadata(mapping.getType());
for (ConfigMappingMetadata configMappingMetadata : configMappingsMetadata) {
clinit.invokeStaticMethod(ENSURE_LOADED, clinit.load(configMappingMetadata.getInterfaceType().getName()));
}
fields.put(mapping, configClassField);
}
// init build and runtime fixed mappings
ResultHandle configBuilder = clinit.newInstance(NEW_BUILDER);
clinit.invokeStaticMethod(MethodDescriptor.ofMethod(AbstractConfigBuilder.class, "withSharedBuilder", void.class,
SmallRyeConfigBuilder.class), configBuilder);
for (String converter : converters) {
ClassInfo converterClass = combinedIndex.getComputingIndex().getClassByName(converter);
Type type = getConverterType(converterClass, combinedIndex);
AnnotationInstance priorityAnnotation = converterClass.annotation(PRIORITY_NAME);
int priority = priorityAnnotation != null ? priorityAnnotation.value().asInt() : 100;
clinit.invokeStaticMethod(WITH_CONVERTER, configBuilder,
clinit.load(type.name().toString()),
clinit.load(priority),
clinit.readStaticField(fields.get(converter)));
}
for (ConfigClass mapping : staticMappings) {
clinit.invokeStaticMethod(WITH_MAPPING, configBuilder, clinit.readStaticField(fields.get(mapping)));
}
clinit.invokeStaticMethod(WITH_BUILDER, configBuilder, clinit.newInstance(
MethodDescriptor.ofConstructor("io.quarkus.runtime.generated.BuildTimeRunTimeFixedConfigSourceBuilder")));
ResultHandle config = clinit.invokeVirtualMethod(BUILD, configBuilder);
int mappingIndex = 0;
for (ConfigClass mapping : staticMappings) {
FieldDescriptor mappingField = classCreator.getFieldCreator("mapping$" + mappingIndex++, mapping.getType())
.setModifiers(ACC_STATIC).getFieldDescriptor();
clinit.writeStaticField(mappingField, clinit.invokeVirtualMethod(GET_CONFIG_MAPPING, config,
clinit.loadClass(mapping.getType()), clinit.load(mapping.getPrefix())));
fields.put(fields.get(mapping), mappingField);
}
clinit.returnVoid();
}
return fields;
}
private static void generateConfigBuilder(
BuildProducer<GeneratedClassBuildItem> generatedClass,
BuildProducer<ReflectiveClassBuildItem> reflectiveClass,
String className,
CombinedIndexBuildItem combinedIndex,
Map<Object, FieldDescriptor> sharedFields,
Map<String, String> defaultValues,
Map<String, String> runtimeValues,
Set<String> converters,
Set<String> interceptors,
Set<String> interceptorFactories,
Set<String> configSources,
Set<String> configSourceProviders,
Set<String> configSourceFactories,
Set<String> secretKeyHandlers,
Set<String> secretKeyHandlerFactories,
Set<ConfigClass> mappingsShared,
Set<ConfigClass> mappingsInstances,
Set<ConfigClass> mappings,
Set<String> mappingsIgnorePaths,
Set<String> configCustomizers,
Set<String> configBuilders) {
// First generate a customizer with all components to ensure order
try (ClassCreator classCreator = ClassCreator.builder()
.classOutput(new GeneratedClassGizmoAdaptor(generatedClass, true))
.className(className + "Customizer")
.superClass(AbstractConfigBuilder.class)
.interfaces(SmallRyeConfigBuilderCustomizer.class)
.setFinal(true)
.build()) {
MethodCreator clinit = classCreator.getMethodCreator("<clinit>", void.class);
clinit.setModifiers(ACC_STATIC);
MethodCreator method = classCreator.getMethodCreator(BUILDER_CUSTOMIZER);
ResultHandle configBuilder = method.getMethodParam(0);
FieldDescriptor defaultsField = classCreator.getFieldCreator("defaults", Map.class).setModifiers(ACC_STATIC)
.getFieldDescriptor();
clinit.writeStaticField(defaultsField,
clinit.newInstance(MAP_NEW, clinit.load((int) ((float) defaultValues.size() / 0.75f + 1.0f))));
for (Map.Entry<String, String> entry : defaultValues.entrySet()) {
clinit.invokeVirtualMethod(MAP_PUT, clinit.readStaticField(defaultsField), clinit.load(entry.getKey()),
clinit.load(entry.getValue()));
}
method.invokeStaticMethod(WITH_DEFAULTS, configBuilder, method.readStaticField(defaultsField));
FieldDescriptor runtimeValuesField = classCreator.getFieldCreator("runtimeValues", Map.class)
.setModifiers(ACC_STATIC).getFieldDescriptor();
clinit.writeStaticField(runtimeValuesField,
clinit.newInstance(MAP_NEW, clinit.load((int) ((float) runtimeValues.size() / 0.75f + 1.0f))));
for (Map.Entry<String, String> entry : runtimeValues.entrySet()) {
clinit.invokeVirtualMethod(MAP_PUT, clinit.readStaticField(runtimeValuesField), clinit.load(entry.getKey()),
clinit.load(entry.getValue()));
}
method.invokeStaticMethod(WITH_RUNTIME_VALUES, configBuilder, method.readStaticField(runtimeValuesField));
for (String converter : converters) {
ClassInfo converterClass = combinedIndex.getComputingIndex().getClassByName(converter);
Type type = getConverterType(converterClass, combinedIndex);
AnnotationInstance priorityAnnotation = converterClass.annotation(PRIORITY_NAME);
int priority = priorityAnnotation != null ? priorityAnnotation.value().asInt() : 100;
method.invokeStaticMethod(WITH_CONVERTER, configBuilder,
method.load(type.name().toString()),
method.load(priority),
method.readStaticField(sharedFields.get(converter)));
}
for (String interceptor : interceptors) {
method.invokeStaticMethod(WITH_INTERCEPTOR, configBuilder,
method.newInstance(MethodDescriptor.ofConstructor(interceptor)));
}
for (String interceptorFactory : interceptorFactories) {
method.invokeStaticMethod(WITH_INTERCEPTOR_FACTORY, configBuilder,
method.newInstance(MethodDescriptor.ofConstructor(interceptorFactory)));
}
for (String configSource : configSources) {
method.invokeStaticMethod(WITH_SOURCE, configBuilder,
method.newInstance(MethodDescriptor.ofConstructor(configSource)));
}
for (String configSourceProvider : configSourceProviders) {
method.invokeStaticMethod(WITH_SOURCE_PROVIDER, configBuilder,
method.newInstance(MethodDescriptor.ofConstructor(configSourceProvider)));
}
for (String configSourceFactory : configSourceFactories) {
method.invokeStaticMethod(WITH_SOURCE_FACTORY, configBuilder,
method.newInstance(MethodDescriptor.ofConstructor(configSourceFactory)));
}
for (String secretKeyHandler : secretKeyHandlers) {
method.invokeStaticMethod(WITH_SECRET_HANDLER, configBuilder,
method.newInstance(MethodDescriptor.ofConstructor(secretKeyHandler)));
}
for (String secretKeyHandlerFactory : secretKeyHandlerFactories) {
method.invokeStaticMethod(WITH_SECRET_HANDLER_FACTORY, configBuilder,
method.newInstance(MethodDescriptor.ofConstructor(secretKeyHandlerFactory)));
}
for (ConfigClass mappingShared : mappingsShared) {
FieldDescriptor configClassField = sharedFields.get(mappingShared);
FieldDescriptor mappingInstanceField = sharedFields.get(configClassField);
method.invokeStaticMethod(WITH_MAPPING_INSTANCE, configBuilder, method.readStaticField(configClassField),
method.readStaticField(mappingInstanceField));
}
for (ConfigClass mappingInstance : mappingsInstances) {
FieldDescriptor configClassField = sharedFields.get(mappingInstance);
method.invokeStaticMethod(WITH_MAPPING_INSTANCE_FROM_CONFIG, configBuilder,
method.readStaticField(configClassField));
}
mappings.removeAll(mappingsInstances);
for (ConfigClass mapping : mappings) {
method.invokeStaticMethod(WITH_MAPPING, configBuilder, method.readStaticField(sharedFields.get(mapping)));
}
for (String path : mappingsIgnorePaths) {
method.invokeStaticMethod(WITH_MAPPING_IGNORE, configBuilder, method.load(path));
}
clinit.returnVoid();
method.returnVoid();
}
configCustomizers.add(className + "Customizer");
try (ClassCreator classCreator = ClassCreator.builder()
.classOutput(new GeneratedClassGizmoAdaptor(generatedClass, true))
.className(className)
.superClass(AbstractConfigBuilder.class)
.interfaces(SmallRyeConfigBuilderCustomizer.class)
.setFinal(true)
.build()) {
MethodCreator method = classCreator.getMethodCreator(BUILDER_CUSTOMIZER);
ResultHandle configBuilder = method.getMethodParam(0);
for (String configCustomizer : configCustomizers) {
method.invokeStaticMethod(WITH_CUSTOMIZER, configBuilder,
method.newInstance(MethodDescriptor.ofConstructor(configCustomizer)));
}
for (String builder : configBuilders) {
method.invokeStaticMethod(WITH_BUILDER, configBuilder,
method.newInstance(MethodDescriptor.ofConstructor(builder)));
}
method.returnVoid();
}
reflectiveClass.produce(ReflectiveClassBuildItem.builder(className).build());
}
private static Set<String> discoverService(
Class<?> serviceClass,
BuildProducer<ReflectiveClassBuildItem> reflectiveClass) throws IOException {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
Set<String> services = new LinkedHashSet<>();
for (String service : classNamesNamedIn(classLoader, SERVICES_PREFIX + serviceClass.getName())) {
// The discovery includes deployment modules, so we only include services available at runtime
if (QuarkusClassLoader.isClassPresentAtRuntime(service)) {
services.add(service);
reflectiveClass.produce(ReflectiveClassBuildItem.builder(service).build());
}
}
return services;
}
private static Set<String> staticSafeServices(Set<String> services) {
ClassLoader classloader = Thread.currentThread().getContextClassLoader();
Set<String> staticSafe = new LinkedHashSet<>();
for (String service : services) {
// SmallRye Config services are always safe, but they cannot be annotated with @StaticInitSafe
if (service.startsWith("io.smallrye.config.")) {
staticSafe.add(service);
continue;
}
try {
Class<?> serviceClass = classloader.loadClass(service);
if (serviceClass.isAnnotationPresent(StaticInitSafe.class)) {
staticSafe.add(service);
}
} catch (ClassNotFoundException e) {
// Ignore
}
}
return staticSafe;
}
private static Set<ConfigClass> staticSafeConfigMappings(List<ConfigMappingBuildItem> configMappings) {
return configMappings.stream()
.filter(ConfigMappingBuildItem::isStaticInitSafe)
.map(ConfigMappingBuildItem::toConfigClass)
.collect(toSet());
}
private static Set<ConfigClass> runtimeConfigMappings(List<ConfigMappingBuildItem> configMappings) {
return configMappings.stream()
.map(ConfigMappingBuildItem::toConfigClass)
.collect(toSet());
}
private static Type getConverterType(final ClassInfo converter, final CombinedIndexBuildItem combinedIndex) {
if (converter.name().toString().equals(Object.class.getName())) {
throw new IllegalArgumentException(
"Can not add converter " + converter.name() + " that is not parameterized with a type");
}
for (Type type : converter.interfaceTypes()) {
if (type instanceof ParameterizedType) {
ParameterizedType parameterizedType = type.asParameterizedType();
if (parameterizedType.name().equals(CONVERTER_NAME)) {
List<Type> arguments = parameterizedType.arguments();
if (arguments.size() != 1) {
throw new IllegalArgumentException(
"Converter " + converter.name() + " must be parameterized with a single type");
}
return arguments.get(0);
}
}
}
return getConverterType(combinedIndex.getComputingIndex().getClassByName(converter.superName()), combinedIndex);
}
}
|
that
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/JsonProperties.java
|
{
"start": 3258,
"end": 3520
}
|
class ____ {
static {
Accessor.setAccessor(new JsonPropertiesAccessor() {
@Override
protected void addProp(JsonProperties props, String name, JsonNode value) {
props.addProp(name, value);
}
});
}
public static
|
JsonProperties
|
java
|
spring-projects__spring-boot
|
module/spring-boot-web-server/src/main/java/org/springframework/boot/web/server/servlet/context/ServletComponentRegisteringPostProcessor.java
|
{
"start": 2221,
"end": 6072
}
|
class ____
implements BeanFactoryPostProcessor, ApplicationContextAware, BeanFactoryInitializationAotProcessor {
private static final boolean MOCK_SERVLET_CONTEXT_AVAILABLE = ClassUtils
.isPresent("org.springframework.mock.web.MockServletContext", null);
private static final List<ServletComponentHandler> HANDLERS;
static {
List<ServletComponentHandler> servletComponentHandlers = new ArrayList<>();
servletComponentHandlers.add(new WebServletHandler());
servletComponentHandlers.add(new WebFilterHandler());
servletComponentHandlers.add(new WebListenerHandler());
HANDLERS = Collections.unmodifiableList(servletComponentHandlers);
}
private final Set<String> packagesToScan;
@SuppressWarnings("NullAway.Init")
private ApplicationContext applicationContext;
ServletComponentRegisteringPostProcessor(Set<String> packagesToScan) {
this.packagesToScan = packagesToScan;
}
@Override
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException {
if (eligibleForServletComponentScanning()) {
ClassPathScanningCandidateComponentProvider componentProvider = createComponentProvider();
for (String packageToScan : this.packagesToScan) {
scanPackage(componentProvider, packageToScan);
}
}
}
private void scanPackage(ClassPathScanningCandidateComponentProvider componentProvider, String packageToScan) {
for (BeanDefinition candidate : componentProvider.findCandidateComponents(packageToScan)) {
if (candidate instanceof AnnotatedBeanDefinition annotatedBeanDefinition) {
for (ServletComponentHandler handler : HANDLERS) {
handler.handle(annotatedBeanDefinition, (BeanDefinitionRegistry) this.applicationContext);
}
}
}
}
private boolean eligibleForServletComponentScanning() {
return this.applicationContext instanceof WebApplicationContext webApplicationContext
&& (webApplicationContext.getServletContext() == null || (MOCK_SERVLET_CONTEXT_AVAILABLE
&& webApplicationContext.getServletContext() instanceof MockServletContext));
}
private ClassPathScanningCandidateComponentProvider createComponentProvider() {
ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider(
false);
componentProvider.setEnvironment(this.applicationContext.getEnvironment());
componentProvider.setResourceLoader(this.applicationContext);
for (ServletComponentHandler handler : HANDLERS) {
componentProvider.addIncludeFilter(handler.getTypeFilter());
}
return componentProvider;
}
Set<String> getPackagesToScan() {
return Collections.unmodifiableSet(this.packagesToScan);
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
@Override
public BeanFactoryInitializationAotContribution processAheadOfTime(ConfigurableListableBeanFactory beanFactory) {
return (generationContext, beanFactoryInitializationCode) -> {
for (String beanName : beanFactory.getBeanDefinitionNames()) {
BeanDefinition definition = beanFactory.getBeanDefinition(beanName);
if (Objects.equals(definition.getBeanClassName(),
WebListenerHandler.ServletComponentWebListenerRegistrar.class.getName())) {
ValueHolder firstArgument = definition.getConstructorArgumentValues()
.getArgumentValue(0, String.class);
Assert.notNull(firstArgument, "'firstArgument' must not be null");
String listenerClassName = (String) firstArgument.getValue();
Assert.state(listenerClassName != null, "'listenerClassName' must not be null");
generationContext.getRuntimeHints()
.reflection()
.registerType(TypeReference.of(listenerClassName), MemberCategory.INVOKE_DECLARED_CONSTRUCTORS);
}
}
};
}
}
|
ServletComponentRegisteringPostProcessor
|
java
|
quarkusio__quarkus
|
extensions/panache/hibernate-reactive-panache-common/runtime/src/main/java/io/quarkus/hibernate/reactive/panache/common/runtime/CommonPanacheQueryImpl.java
|
{
"start": 1086,
"end": 7232
}
|
class ____<Entity> {
private Object paramsArrayOrMap;
/**
* this is the HQL query expanded from the Panache-Query
*/
private String query;
/**
* this is the original Panache-Query, if any (can be null)
*/
private String originalQuery;
/**
* This is only used by the Spring Data JPA extension, due to Spring's Query annotation allowing a custom count query
* See https://docs.spring.io/spring-data/jpa/reference/jpa/query-methods.html#jpa.query-methods.at-query.native
* Otherwise we do not use this, and rely on ORM to generate count queries
*/
protected String customCountQueryForSpring;
private String orderBy;
private Uni<Mutiny.Session> em;
private Page page;
private Uni<Long> count;
private Range range;
private LockModeType lockModeType;
private Map<String, Object> hints;
private Map<String, Map<String, Object>> filters;
private Class<?> projectionType;
public CommonPanacheQueryImpl(Uni<Mutiny.Session> em, String query, String originalQuery, String orderBy,
Object paramsArrayOrMap) {
this.em = em;
this.query = query;
this.originalQuery = originalQuery;
this.orderBy = orderBy;
this.paramsArrayOrMap = paramsArrayOrMap;
}
private CommonPanacheQueryImpl(CommonPanacheQueryImpl<?> previousQuery, String newQueryString,
String customCountQueryForSpring,
Class<?> projectionType) {
this.em = previousQuery.em;
this.query = newQueryString;
this.customCountQueryForSpring = customCountQueryForSpring;
this.orderBy = previousQuery.orderBy;
this.paramsArrayOrMap = previousQuery.paramsArrayOrMap;
this.page = previousQuery.page;
this.count = previousQuery.count;
this.range = previousQuery.range;
this.lockModeType = previousQuery.lockModeType;
this.hints = previousQuery.hints;
this.filters = previousQuery.filters;
this.projectionType = projectionType;
}
// Builder
public <T> CommonPanacheQueryImpl<T> project(Class<T> type) {
String selectQuery = query;
if (PanacheJpaUtil.isNamedQuery(query)) {
selectQuery = NamedQueryUtil.getNamedQuery(query.substring(1));
}
String lowerCasedTrimmedQuery = PanacheJpaUtil.trimForAnalysis(selectQuery);
if (lowerCasedTrimmedQuery.startsWith("select new ")
|| lowerCasedTrimmedQuery.startsWith("select distinct new ")) {
throw new PanacheQueryException("Unable to perform a projection on a 'select [distinct]? new' query: " + query);
}
// If the query starts with a select clause, we pass it on to ORM which can handle that via a projection type
if (lowerCasedTrimmedQuery.startsWith("select ")) {
// I think projections do not change the result count, so we can keep the custom count query
return new CommonPanacheQueryImpl<>(this, query, customCountQueryForSpring, type);
}
// FIXME: this assumes the query starts with "FROM " probably?
// build select clause with a constructor expression
String selectClause = "SELECT " + getParametersFromClass(type, null);
// I think projections do not change the result count, so we can keep the custom count query
return new CommonPanacheQueryImpl<>(this, selectClause + selectQuery, customCountQueryForSpring, null);
}
private StringBuilder getParametersFromClass(Class<?> type, String parentParameter) {
StringBuilder selectClause = new StringBuilder();
Constructor<?> constructor = getConstructor(type);
selectClause.append("new ").append(type.getName()).append(" (");
String parametersListStr = Stream.of(constructor.getParameters())
.map(parameter -> getParameterName(type, parentParameter, parameter))
.collect(Collectors.joining(","));
selectClause.append(parametersListStr);
selectClause.append(") ");
return selectClause;
}
private Constructor<?> getConstructor(Class<?> type) {
Constructor<?>[] typeConstructors = type.getDeclaredConstructors();
//We start to look for constructors with @ProjectedConstructor
for (Constructor<?> typeConstructor : typeConstructors) {
if (typeConstructor.isAnnotationPresent(ProjectedConstructor.class)) {
return typeConstructor;
}
}
//If didn't find anything early,
//we try to find a constructor with parameters annotated with @ProjectedFieldName
for (Constructor<?> typeConstructor : typeConstructors) {
for (Parameter parameter : typeConstructor.getParameters()) {
if (parameter.isAnnotationPresent(ProjectedFieldName.class)) {
return typeConstructor;
}
}
}
//We fall back to the first constructor that has parameters
for (Constructor<?> typeConstructor : typeConstructors) {
Parameter[] parameters = typeConstructor.getParameters();
if (parameters.length == 0) {
continue;
}
return typeConstructor;
}
//If everything fails, we return the first available constructor
return typeConstructors[0];
}
private String getParameterName(Class<?> parentType, String parentParameter, Parameter parameter) {
String parameterName;
// Check if constructor param is annotated with ProjectedFieldName
if (hasProjectedFieldName(parameter)) {
parameterName = getNameFromProjectedFieldName(parameter);
} else if (!parameter.isNamePresent()) {
throw new PanacheQueryException(
"Your application must be built with parameter names, this should be the default if" +
" using Quarkus project generation. Check the Maven or Gradle compiler configuration to include '-parameters'.");
} else {
// Check if
|
CommonPanacheQueryImpl
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/security/inheritance/multiple/pathonbase/BaseResource_Second_Interface.java
|
{
"start": 951,
"end": 1677
}
|
interface ____
extends BaseResource_Third_Interface {
@POST
@Path(MULTIPLE_INHERITANCE + CLASS_PATH_ON_RESOURCE + IMPL_ON_BASE + SECOND_INTERFACE + INTERFACE_METHOD_WITH_PATH
+ METHOD_ROLES_ALLOWED_PATH)
String multipleInheritance_ClassPathOnBase_ImplOnBase_SecondInterface_InterfaceMethodWithPath_MethodRolesAllowed(
JsonObject array);
@POST
@Path(MULTIPLE_INHERITANCE + CLASS_PATH_ON_RESOURCE + IMPL_ON_BASE + SECOND_INTERFACE + INTERFACE_METHOD_WITH_PATH
+ NO_SECURITY_ANNOTATION_PATH)
String multipleInheritance_ClassPathOnBase_ImplOnBase_SecondInterface_InterfaceMethodWithPath_NoAnnotation(
JsonObject array);
}
|
BaseResource_Second_Interface
|
java
|
micronaut-projects__micronaut-core
|
core/src/main/java/io/micronaut/core/io/service/ServiceDefinition.java
|
{
"start": 855,
"end": 1584
}
|
class ____ of the service
*/
String getName();
/**
* @return is the service present
*/
default boolean isPresent() {
return false;
}
/**
* Load the service of throw the given exception.
*
* @param exceptionSupplier The exception supplier
* @param <X> The exception type
* @return The instance
* @throws X The exception concrete type
*/
default <X extends Throwable> T orElseThrow(Supplier<? extends X> exceptionSupplier) throws X {
try {
return load();
} catch (Throwable err) {
throw exceptionSupplier.get();
}
}
/**
* @return load the service
*/
T load();
}
|
name
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/utils/RangeInputFormat.java
|
{
"start": 1222,
"end": 2112
}
|
class ____ extends GenericInputFormat<RowData> implements NonParallelInput {
private static final long serialVersionUID = 1L;
private long start;
private long end;
private transient long current;
private transient BoxedWrapperRowData reuse;
public RangeInputFormat(long start, long end) {
this.start = start;
this.end = end;
}
@Override
public boolean reachedEnd() throws IOException {
return current >= end;
}
@Override
public void open(GenericInputSplit split) throws IOException {
super.open(split);
this.current = start;
}
@Override
public RowData nextRecord(RowData ignore) throws IOException {
if (reuse == null) {
reuse = new BoxedWrapperRowData(1);
}
reuse.setLong(0, current);
current++;
return reuse;
}
}
|
RangeInputFormat
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/proxy/MappedSuperclassWithEmbeddableTest.java
|
{
"start": 3064,
"end": 3456
}
|
class ____ extends BaseEntity {
@Id
private String id;
private String name;
public TestEntity() {
}
private TestEntity(String id, String name) {
this.id = id;
this.name = name;
EmbeddedValue value = new EmbeddedValue( "SUPER " + name );
setSuperField( value );
}
public String id() {
return id;
}
public String name() {
return name;
}
}
}
|
TestEntity
|
java
|
apache__camel
|
core/camel-management/src/test/java/org/apache/camel/management/ManagedRouteAutoStartupTest.java
|
{
"start": 1333,
"end": 2993
}
|
class ____ extends ManagementTestSupport {
@Test
public void testManagedCamelContext() throws Exception {
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = getContextObjectName();
ObjectName onFoo = getCamelObjectName(TYPE_ROUTE, "foo");
ObjectName onBar = getCamelObjectName(TYPE_ROUTE, "bar");
assertTrue(mbeanServer.isRegistered(on), "Should be registered");
String name = (String) mbeanServer.getAttribute(on, "CamelId");
assertEquals(context.getManagementName(), name);
String state = (String) mbeanServer.getAttribute(onFoo, "State");
assertEquals("Stopped", state);
state = (String) mbeanServer.getAttribute(onBar, "State");
assertEquals("Started", state);
// start the route
mbeanServer.invoke(onFoo, "start", null, null);
state = (String) mbeanServer.getAttribute(onFoo, "State");
assertEquals("Started", state);
Object reply = mbeanServer.invoke(on, "requestBody", new Object[] { "direct:foo", "Hello World" },
new String[] { "java.lang.String", "java.lang.Object" });
assertEquals("Bye World", reply);
// stop Camel
mbeanServer.invoke(on, "stop", null, null);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:bar").routeId("bar").to("mock:bar");
from("direct:foo").routeId("foo").autoStartup(false).transform(constant("Bye World"));
}
};
}
}
|
ManagedRouteAutoStartupTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/validation/beanvalidation/BeanValidationBeanRegistrationAotProcessorTests.java
|
{
"start": 10137,
"end": 10231
}
|
class ____ {
Iterable<BeanWithRecursiveIterable> iterable;
}
static
|
BeanWithRecursiveIterable
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/visitors/data/Sort.java
|
{
"start": 1097,
"end": 2934
}
|
interface ____ {
/**
* Constant for unsorted.
*/
Sort UNSORTED = new DefaultSort();
/**
* @return Is sorting applied
*/
boolean isSorted();
/**
* Orders by the specified property name (defaults to ascending).
*
* @param propertyName The property name to order by
* @return A new sort with the order applied
*/
Sort order(String propertyName);
/**
* Adds an order object.
*
* @param order The order object
* @return A new sort with the order applied
*/
Sort order(Order order);
/**
* Orders by the specified property name and direction.
*
* @param propertyName The property name to order by
* @param direction Either "asc" for ascending or "desc" for descending
*
* @return A new sort with the order applied
*/
Sort order(String propertyName, Order.Direction direction);
/**
* @return The order definitions for this sort.
*/
List<Order> getOrderBy();
/**
* @return Default unsorted sort instance.
*/
static Sort unsorted() {
return UNSORTED;
}
/**
* Create a sort from the given list of orders.
*
* @param orderList The order list
* @return The sort
*/
static Sort of(List<Order> orderList) {
if (CollectionUtils.isEmpty(orderList)) {
return UNSORTED;
}
return new DefaultSort(orderList);
}
/**
* Creates a sort from an array orders.
* @param orders The orders
* @return The orders
*/
static Sort of(Order... orders) {
if (ArrayUtils.isEmpty(orders)) {
return UNSORTED;
} else {
return new DefaultSort(Arrays.asList(orders));
}
}
/**
* The ordering of results.
*/
|
Sort
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java
|
{
"start": 3041,
"end": 35114
}
|
class ____ extends AbstractNonTextScriptFieldTypeTestCase {
private static final Boolean MALFORMED_BOOLEAN = null;
private static final Boolean EMPTY_STR_BOOLEAN = false;
@Override
protected ScriptFactory parseFromSource() {
return BooleanFieldScript.PARSE_FROM_SOURCE;
}
@Override
protected ScriptFactory dummyScript() {
return BooleanFieldScriptTests.DUMMY;
}
@Override
public void testDocValues() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true, false]}"))));
List<Long> results = new ArrayList<>();
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
BooleanScriptFieldType ft = simpleMappedFieldType();
BooleanScriptFieldData ifd = ft.fielddataBuilder(mockFielddataContext()).build(null, null);
searcher.search(new MatchAllDocsQuery(), new Collector() {
@Override
public ScoreMode scoreMode() {
return ScoreMode.COMPLETE_NO_SCORES;
}
@Override
public LeafCollector getLeafCollector(LeafReaderContext context) {
SortedNumericLongValues dv = ifd.load(context).getLongValues();
return new LeafCollector() {
@Override
public void setScorer(Scorable scorer) {}
@Override
public void collect(int doc) throws IOException {
if (dv.advanceExact(doc)) {
for (int i = 0; i < dv.docValueCount(); i++) {
results.add(dv.nextValue());
}
}
}
};
}
});
assertThat(results, containsInAnyOrder(1L, 0L, 1L));
}
}
}
@Override
public void testSort() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
BooleanScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null);
SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false);
TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf));
StoredFields storedFields = reader.storedFields();
assertThat(
storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(),
equalTo("{\"foo\": [false]}")
);
assertThat(
storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(),
equalTo("{\"foo\": [true]}")
);
}
}
}
@Override
public void testUsedInScript() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
{
SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType());
assertThat(
searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() {
@Override
public boolean needs_score() {
return false;
}
@Override
public boolean needs_termStats() {
return false;
}
@Override
public ScoreScript newInstance(DocReader docReader) {
return new ScoreScript(Map.of(), searchContext.lookup(), docReader) {
@Override
public double execute(ExplanationHolder explanation) {
ScriptDocValues.Booleans booleans = (ScriptDocValues.Booleans) getDoc().get("test");
return booleans.get(0) ? 3 : 0;
}
};
}
}, searchContext.lookup(), 2.5f, "test", 0, IndexVersion.current())),
equalTo(1)
);
}
{
SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType());
assertThat(
searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() {
@Override
public boolean needs_score() {
return false;
}
@Override
public boolean needs_termStats() {
return false;
}
@Override
public ScoreScript newInstance(DocReader docReader) {
return new ScoreScript(Map.of(), searchContext.lookup(), docReader) {
@Override
public double execute(ExplanationHolder explanation) {
BooleanDocValuesField booleans = (BooleanDocValuesField) field("test");
return booleans.getInternal(0) ? 3 : 0;
}
};
}
}, searchContext.lookup(), 2.5f, "test", 0, IndexVersion.current())),
equalTo(1)
);
}
}
}
}
@Override
public void testExistsQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))));
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true, false]}"))));
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": []}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(3));
}
}
}
@Override
public void testRangeQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
MappedFieldType ft = simpleMappedFieldType();
assertThat(searcher.count(ft.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(false, true, false, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(0));
}
}
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
MappedFieldType ft = simpleMappedFieldType();
assertThat(searcher.count(ft.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(false, true, true, false, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(0));
}
}
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))));
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
MappedFieldType ft = simpleMappedFieldType();
assertThat(searcher.count(ft.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(1));
assertThat(searcher.count(ft.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(2));
assertThat(searcher.count(ft.rangeQuery(false, false, false, false, null, null, null, mockContext())), equalTo(0));
assertThat(searcher.count(ft.rangeQuery(true, true, false, false, null, null, null, mockContext())), equalTo(0));
}
}
}
public void testRangeQueryDegeneratesIntoNotExpensive() throws IOException {
assertThat(
simpleMappedFieldType().rangeQuery(true, true, false, false, null, null, null, mockContext()),
instanceOf(MatchNoDocsQuery.class)
);
assertThat(
simpleMappedFieldType().rangeQuery(false, false, false, false, null, null, null, mockContext()),
instanceOf(MatchNoDocsQuery.class)
);
// Even if the running the field would blow up because it loops the query *still* just returns none.
assertThat(
loopFieldType().rangeQuery(true, true, false, false, null, null, null, mockContext()),
instanceOf(MatchNoDocsQuery.class)
);
assertThat(
loopFieldType().rangeQuery(false, false, false, false, null, null, null, mockContext()),
instanceOf(MatchNoDocsQuery.class)
);
}
@Override
protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) {
// Builds a random range query that doesn't degenerate into match none
return switch (randomInt(2)) {
case 0 -> ft.rangeQuery(true, true, true, true, null, null, null, ctx);
case 1 -> ft.rangeQuery(false, true, true, true, null, null, null, ctx);
case 2 -> ft.rangeQuery(false, true, false, true, null, null, null, ctx);
default -> throw new UnsupportedOperationException();
};
}
@Override
public void testTermQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().termQuery(true, mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery("true", mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery(false, mockContext())), equalTo(0));
assertThat(
searcher.count(build("xor_param", Map.of("param", false), OnScriptError.FAIL).termQuery(true, mockContext())),
equalTo(1)
);
}
}
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().termQuery(false, mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery("false", mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery(null, mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termQuery(true, mockContext())), equalTo(0));
assertThat(
searcher.count(build("xor_param", Map.of("param", false), OnScriptError.FAIL).termQuery(false, mockContext())),
equalTo(1)
);
}
}
}
@Override
protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) {
return ft.termQuery(randomBoolean(), ctx);
}
@Override
public void testTermsQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(true, true), mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("true", "true"), mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(false, false), mockContext())), equalTo(0));
assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(true, false), mockContext())), equalTo(1));
}
}
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(false, false), mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("false", "false"), mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termsQuery(singletonList(null), mockContext())), equalTo(1));
assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(true, true), mockContext())), equalTo(0));
assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(true, false), mockContext())), equalTo(1));
}
}
}
public void testEmptyTermsQueryDegeneratesIntoMatchNone() throws IOException {
assertThat(simpleMappedFieldType().termsQuery(List.of(), mockContext()), instanceOf(MatchNoDocsQuery.class));
}
@Override
protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) {
return switch (randomInt(2)) {
case 0 -> ft.termsQuery(List.of(true), ctx);
case 1 -> ft.termsQuery(List.of(false), ctx);
case 2 -> ft.termsQuery(List.of(false, true), ctx);
default -> throw new UnsupportedOperationException();
};
}
public void testDualingQueries() throws IOException {
BooleanFieldMapper ootb = new BooleanFieldMapper.Builder("foo", ScriptCompiler.NONE, defaultIndexSettings()).build(
MapperBuilderContext.root(false, false)
);
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
List<Boolean> values = randomList(0, 2, ESTestCase::randomBoolean);
String source = "{\"foo\": " + values + "}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON);
DocumentParserContext ctx = new TestDocumentParserContext(MappingLookup.EMPTY, sourceToParse) {
@Override
public XContentParser parser() {
return parser;
}
};
ctx.doc().add(new StoredField("_source", new BytesRef(source)));
ctx.parser().nextToken();
ctx.parser().nextToken();
ctx.parser().nextToken();
while (ctx.parser().nextToken() != Token.END_ARRAY) {
ootb.parse(ctx);
}
addDocument(iw, ctx.doc());
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
assertSameCount(
searcher,
source,
"*",
simpleMappedFieldType().existsQuery(mockContext()),
ootb.fieldType().existsQuery(mockContext())
);
boolean term = randomBoolean();
assertSameCount(
searcher,
source,
term,
simpleMappedFieldType().termQuery(term, mockContext()),
ootb.fieldType().termQuery(term, mockContext())
);
List<Boolean> terms = randomList(0, 3, ESTestCase::randomBoolean);
assertSameCount(
searcher,
source,
terms,
simpleMappedFieldType().termsQuery(terms, mockContext()),
ootb.fieldType().termsQuery(terms, mockContext())
);
boolean low;
boolean high;
if (randomBoolean()) {
low = high = randomBoolean();
} else {
low = false;
high = true;
}
boolean includeLow = randomBoolean();
boolean includeHigh = randomBoolean();
assertSameCount(
searcher,
source,
(includeLow ? "[" : "(") + low + "," + high + (includeHigh ? "]" : ")"),
simpleMappedFieldType().rangeQuery(low, high, includeLow, includeHigh, null, null, null, mockContext()),
ootb.fieldType().rangeQuery(low, high, includeLow, includeHigh, null, null, null, mockContext())
);
}
}
}
}
public void testBlockLoader() throws IOException {
try (
Directory directory = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), directory, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE))
) {
iw.addDocuments(
List.of(
List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}"))),
List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))
)
);
try (DirectoryReader reader = iw.getReader()) {
BooleanScriptFieldType fieldType = build("xor_param", Map.of("param", false), OnScriptError.FAIL);
List<Boolean> expected = List.of(false, true);
assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 0), equalTo(expected));
assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 1), equalTo(expected.subList(1, 2)));
assertThat(blockLoaderReadValuesFromRowStrideReader(reader, fieldType), equalTo(expected));
}
}
}
public void testBlockLoaderSourceOnlyRuntimeField() throws IOException {
try (
Directory directory = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), directory, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE))
) {
// given
// try multiple variations of boolean as they're all encoded slightly differently
iw.addDocuments(
List.of(
List.of(new StoredField("_source", new BytesRef("{\"test\": [false]}"))),
List.of(new StoredField("_source", new BytesRef("{\"test\": [true]}"))),
List.of(new StoredField("_source", new BytesRef("{\"test\": [\"false\"]}"))),
List.of(new StoredField("_source", new BytesRef("{\"test\": [\"true\"]}"))),
List.of(new StoredField("_source", new BytesRef("{\"test\": [\"\"]}"))),
// ensure a malformed value doesn't crash
List.of(new StoredField("_source", new BytesRef("{\"test\": [\"potato\"]}")))
)
);
BooleanScriptFieldType fieldType = simpleSourceOnlyMappedFieldType();
List<Boolean> expected = Arrays.asList(false, true, false, true, EMPTY_STR_BOOLEAN, MALFORMED_BOOLEAN);
try (DirectoryReader reader = iw.getReader()) {
// when
BlockLoader loader = fieldType.blockLoader(blContext(Settings.EMPTY, true));
// then
// assert loader is of expected instance type
assertThat(loader, instanceOf(BooleanScriptBlockDocValuesReader.BooleanScriptBlockLoader.class));
// ignored source doesn't support column at a time loading:
var columnAtATimeLoader = loader.columnAtATimeReader(reader.leaves().getFirst());
assertThat(columnAtATimeLoader, instanceOf(BooleanScriptBlockDocValuesReader.class));
var rowStrideReader = loader.rowStrideReader(reader.leaves().getFirst());
assertThat(rowStrideReader, instanceOf(BooleanScriptBlockDocValuesReader.class));
// assert values
assertThat(blockLoaderReadValuesFromColumnAtATimeReader(reader, fieldType, 0), equalTo(expected));
assertThat(blockLoaderReadValuesFromRowStrideReader(reader, fieldType), equalTo(expected));
}
}
}
public void testBlockLoaderSourceOnlyRuntimeFieldWithSyntheticSource() throws IOException {
try (
Directory directory = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), directory, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE))
) {
// given
// try multiple variations of boolean as they're all encoded slightly differently
iw.addDocuments(
List.of(
createDocumentWithIgnoredSource("false"),
createDocumentWithIgnoredSource("true"),
createDocumentWithIgnoredSource("[false]"),
createDocumentWithIgnoredSource("[true]"),
createDocumentWithIgnoredSource("[\"false\"]"),
createDocumentWithIgnoredSource("[\"true\"]"),
createDocumentWithIgnoredSource("[\"\"]"),
// ensure a malformed value doesn't crash
createDocumentWithIgnoredSource("[\"potato\"]")
)
);
Settings settings = Settings.builder().put("index.mapping.source.mode", "synthetic").build();
BooleanScriptFieldType fieldType = simpleSourceOnlyMappedFieldType();
List<Boolean> expected = Arrays.asList(false, true, false, true, false, true, EMPTY_STR_BOOLEAN, MALFORMED_BOOLEAN);
try (DirectoryReader reader = iw.getReader()) {
// when
BlockLoader loader = fieldType.blockLoader(blContext(settings, true));
// then
// assert loader is of expected instance type
assertThat(loader, instanceOf(FallbackSyntheticSourceBlockLoader.class));
// ignored source doesn't support column at a time loading:
var columnAtATimeLoader = loader.columnAtATimeReader(reader.leaves().getFirst());
assertThat(columnAtATimeLoader, nullValue());
var rowStrideReader = loader.rowStrideReader(reader.leaves().getFirst());
assertThat(
rowStrideReader.getClass().getName(),
equalTo("org.elasticsearch.index.mapper.FallbackSyntheticSourceBlockLoader$IgnoredSourceRowStrideReader")
);
// assert values
assertThat(blockLoaderReadValuesFromRowStrideReader(settings, reader, fieldType, true), equalTo(expected));
}
}
}
/**
* Returns a source only mapped field type. This is useful, since the available build() function doesn't override isParsedFromSource()
*/
private BooleanScriptFieldType simpleSourceOnlyMappedFieldType() {
Script script = new Script(ScriptType.INLINE, "test", "", emptyMap());
BooleanFieldScript.Factory factory = new BooleanFieldScript.Factory() {
@Override
public BooleanFieldScript.LeafFactory newFactory(
String fieldName,
Map<String, Object> params,
SearchLookup searchLookup,
OnScriptError onScriptError
) {
return ctx -> new BooleanFieldScript(fieldName, params, searchLookup, onScriptError, ctx) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object foo : (List<?>) source.get("test")) {
try {
emit(Booleans.parseBoolean(foo.toString(), false));
} catch (Exception e) {
// skip
}
}
}
};
}
@Override
public boolean isParsedFromSource() {
return true;
}
};
return new BooleanScriptFieldType("test", factory, script, emptyMap(), OnScriptError.FAIL);
}
private void assertSameCount(IndexSearcher searcher, String source, Object queryDescription, Query scriptedQuery, Query ootbQuery)
throws IOException {
assertThat(
"source=" + source + ",query=" + queryDescription + ",scripted=" + scriptedQuery + ",ootb=" + ootbQuery,
searcher.count(scriptedQuery),
equalTo(searcher.count(ootbQuery))
);
}
@Override
protected BooleanScriptFieldType simpleMappedFieldType() {
return build("read_foo", Map.of(), OnScriptError.FAIL);
}
@Override
protected MappedFieldType loopFieldType() {
return build("loop", Map.of(), OnScriptError.FAIL);
}
@Override
protected String typeName() {
return "boolean";
}
protected BooleanScriptFieldType build(String code, Map<String, Object> params, OnScriptError onScriptError) {
Script script = new Script(ScriptType.INLINE, "test", code, params);
return new BooleanScriptFieldType("test", factory(script), script, emptyMap(), onScriptError);
}
private static BooleanFieldScript.Factory factory(Script script) {
return switch (script.getIdOrCode()) {
case "read_foo" -> (fieldName, params, lookup, onScriptError) -> (ctx) -> new BooleanFieldScript(
fieldName,
params,
lookup,
onScriptError,
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object foo : (List<?>) source.get("foo")) {
emit((Boolean) foo);
}
}
};
case "xor_param" -> (fieldName, params, lookup, onScriptError) -> (ctx) -> new BooleanFieldScript(
fieldName,
params,
lookup,
onScriptError,
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object foo : (List<?>) source.get("foo")) {
emit((Boolean) foo ^ ((Boolean) getParams().get("param")));
}
}
};
case "loop" -> (fieldName, params, lookup, onScriptError) -> {
// Indicate that this script wants the field call "test", which *is* the name of this field
lookup.forkAndTrackFieldReferences("test");
throw new IllegalStateException("should have thrown on the line above");
};
case "error" -> (fieldName, params, lookup, onScriptError) -> ctx -> new BooleanFieldScript(
fieldName,
params,
lookup,
onScriptError,
ctx
) {
@Override
public void execute() {
throw new RuntimeException("test error");
}
};
default -> throw new IllegalArgumentException("unsupported script [" + script.getIdOrCode() + "]");
};
}
}
|
BooleanScriptFieldTypeTests
|
java
|
netty__netty
|
transport/src/main/java/io/netty/channel/PendingBytesTracker.java
|
{
"start": 2806,
"end": 3491
}
|
class ____ extends PendingBytesTracker {
private final ChannelOutboundBuffer buffer;
ChannelOutboundBufferPendingBytesTracker(
ChannelOutboundBuffer buffer, MessageSizeEstimator.Handle estimatorHandle) {
super(estimatorHandle);
this.buffer = buffer;
}
@Override
public void incrementPendingOutboundBytes(long bytes) {
buffer.incrementPendingOutboundBytes(bytes);
}
@Override
public void decrementPendingOutboundBytes(long bytes) {
buffer.decrementPendingOutboundBytes(bytes);
}
}
private static final
|
ChannelOutboundBufferPendingBytesTracker
|
java
|
apache__camel
|
components/camel-netty/src/test/java/org/apache/camel/component/netty/NettyConsumerClientModeReconnectTest.java
|
{
"start": 5007,
"end": 5732
}
|
class ____ extends SimpleChannelInboundHandler<String> {
@Override
public void channelActive(ChannelHandlerContext ctx) {
ctx.write("Willem\r\n");
ctx.flush();
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
LOG.warn("Unhandled exception caught: {}", cause.getMessage(), cause);
ctx.close();
}
@Override
protected void channelRead0(ChannelHandlerContext ctx, String msg) {
// Do nothing here
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.flush();
}
}
private static
|
ServerHandler
|
java
|
netty__netty
|
buffer/src/main/java/io/netty/buffer/ReadOnlyByteBuf.java
|
{
"start": 1263,
"end": 10624
}
|
class ____ extends AbstractDerivedByteBuf {
private final ByteBuf buffer;
public ReadOnlyByteBuf(ByteBuf buffer) {
super(buffer.maxCapacity());
if (buffer instanceof ReadOnlyByteBuf || buffer instanceof DuplicatedByteBuf) {
this.buffer = buffer.unwrap();
} else {
this.buffer = buffer;
}
setIndex(buffer.readerIndex(), buffer.writerIndex());
}
@Override
public boolean isReadOnly() {
return true;
}
@Override
public boolean isWritable() {
return false;
}
@Override
public boolean isWritable(int numBytes) {
return false;
}
@Override
public int ensureWritable(int minWritableBytes, boolean force) {
return 1;
}
@Override
public ByteBuf ensureWritable(int minWritableBytes) {
throw new ReadOnlyBufferException();
}
@Override
public ByteBuf unwrap() {
return buffer;
}
@Override
public ByteBufAllocator alloc() {
return unwrap().alloc();
}
@Override
@Deprecated
public ByteOrder order() {
return unwrap().order();
}
@Override
public boolean isDirect() {
return unwrap().isDirect();
}
@Override
public boolean hasArray() {
return false;
}
@Override
public byte[] array() {
throw new ReadOnlyBufferException();
}
@Override
public int arrayOffset() {
throw new ReadOnlyBufferException();
}
@Override
public boolean hasMemoryAddress() {
return unwrap().hasMemoryAddress();
}
@Override
public long memoryAddress() {
return unwrap().memoryAddress();
}
@Override
public ByteBuf discardReadBytes() {
throw new ReadOnlyBufferException();
}
@Override
public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) {
throw new ReadOnlyBufferException();
}
@Override
public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) {
throw new ReadOnlyBufferException();
}
@Override
public ByteBuf setBytes(int index, ByteBuffer src) {
throw new ReadOnlyBufferException();
}
@Override
public ByteBuf setByte(int index, int value) {
throw new ReadOnlyBufferException();
}
@Override
protected void _setByte(int index, int value) {
throw new ReadOnlyBufferException();
}
@Override
public ByteBuf setShort(int index, int value) {
throw new ReadOnlyBufferException();
}
@Override
protected void _setShort(int index, int value) {
throw new ReadOnlyBufferException();
}
@Override
public ByteBuf setShortLE(int index, int value) {
throw new ReadOnlyBufferException();
}
@Override
protected void _setShortLE(int index, int value) {
throw new ReadOnlyBufferException();
}
@Override
public ByteBuf setMedium(int index, int value) {
throw new ReadOnlyBufferException();
}
@Override
protected void _setMedium(int index, int value) {
throw new ReadOnlyBufferException();
}
@Override
public ByteBuf setMediumLE(int index, int value) {
throw new ReadOnlyBufferException();
}
@Override
protected void _setMediumLE(int index, int value) {
throw new ReadOnlyBufferException();
}
@Override
public ByteBuf setInt(int index, int value) {
throw new ReadOnlyBufferException();
}
@Override
protected void _setInt(int index, int value) {
throw new ReadOnlyBufferException();
}
@Override
public ByteBuf setIntLE(int index, int value) {
throw new ReadOnlyBufferException();
}
@Override
protected void _setIntLE(int index, int value) {
throw new ReadOnlyBufferException();
}
@Override
public ByteBuf setLong(int index, long value) {
throw new ReadOnlyBufferException();
}
@Override
protected void _setLong(int index, long value) {
throw new ReadOnlyBufferException();
}
@Override
public ByteBuf setLongLE(int index, long value) {
throw new ReadOnlyBufferException();
}
@Override
protected void _setLongLE(int index, long value) {
throw new ReadOnlyBufferException();
}
@Override
public int setBytes(int index, InputStream in, int length) {
throw new ReadOnlyBufferException();
}
@Override
public int setBytes(int index, ScatteringByteChannel in, int length) {
throw new ReadOnlyBufferException();
}
@Override
public int setBytes(int index, FileChannel in, long position, int length) {
throw new ReadOnlyBufferException();
}
@Override
public int getBytes(int index, GatheringByteChannel out, int length)
throws IOException {
return unwrap().getBytes(index, out, length);
}
@Override
public int getBytes(int index, FileChannel out, long position, int length)
throws IOException {
return unwrap().getBytes(index, out, position, length);
}
@Override
public ByteBuf getBytes(int index, OutputStream out, int length)
throws IOException {
unwrap().getBytes(index, out, length);
return this;
}
@Override
public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) {
unwrap().getBytes(index, dst, dstIndex, length);
return this;
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) {
unwrap().getBytes(index, dst, dstIndex, length);
return this;
}
@Override
public ByteBuf getBytes(int index, ByteBuffer dst) {
unwrap().getBytes(index, dst);
return this;
}
@Override
public ByteBuf duplicate() {
return new ReadOnlyByteBuf(this);
}
@Override
public ByteBuf copy(int index, int length) {
return unwrap().copy(index, length);
}
@Override
public ByteBuf slice(int index, int length) {
return new ReadOnlyByteBuf(unwrap().slice(index, length));
}
@Override
public byte getByte(int index) {
return unwrap().getByte(index);
}
@Override
protected byte _getByte(int index) {
return unwrap().getByte(index);
}
@Override
public short getShort(int index) {
return unwrap().getShort(index);
}
@Override
protected short _getShort(int index) {
return unwrap().getShort(index);
}
@Override
public short getShortLE(int index) {
return unwrap().getShortLE(index);
}
@Override
protected short _getShortLE(int index) {
return unwrap().getShortLE(index);
}
@Override
public int getUnsignedMedium(int index) {
return unwrap().getUnsignedMedium(index);
}
@Override
protected int _getUnsignedMedium(int index) {
return unwrap().getUnsignedMedium(index);
}
@Override
public int getUnsignedMediumLE(int index) {
return unwrap().getUnsignedMediumLE(index);
}
@Override
protected int _getUnsignedMediumLE(int index) {
return unwrap().getUnsignedMediumLE(index);
}
@Override
public int getInt(int index) {
return unwrap().getInt(index);
}
@Override
protected int _getInt(int index) {
return unwrap().getInt(index);
}
@Override
public int getIntLE(int index) {
return unwrap().getIntLE(index);
}
@Override
protected int _getIntLE(int index) {
return unwrap().getIntLE(index);
}
@Override
public long getLong(int index) {
return unwrap().getLong(index);
}
@Override
protected long _getLong(int index) {
return unwrap().getLong(index);
}
@Override
public long getLongLE(int index) {
return unwrap().getLongLE(index);
}
@Override
protected long _getLongLE(int index) {
return unwrap().getLongLE(index);
}
@Override
public int nioBufferCount() {
return unwrap().nioBufferCount();
}
@Override
public ByteBuffer nioBuffer(int index, int length) {
return unwrap().nioBuffer(index, length).asReadOnlyBuffer();
}
@Override
public ByteBuffer[] nioBuffers(int index, int length) {
ByteBuffer[] buffers = unwrap().nioBuffers(index, length);
for (int i = 0; i < buffers.length; i++) {
ByteBuffer buf = buffers[i];
if (!buf.isReadOnly()) {
buffers[i] = buf.asReadOnlyBuffer();
}
}
return buffers;
}
@Override
public int forEachByte(int index, int length, ByteProcessor processor) {
return unwrap().forEachByte(index, length, processor);
}
@Override
public int forEachByteDesc(int index, int length, ByteProcessor processor) {
return unwrap().forEachByteDesc(index, length, processor);
}
@Override
public int capacity() {
return unwrap().capacity();
}
@Override
public ByteBuf capacity(int newCapacity) {
throw new ReadOnlyBufferException();
}
@Override
public ByteBuf asReadOnly() {
return this;
}
}
|
ReadOnlyByteBuf
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AbstractEvent.java
|
{
"start": 981,
"end": 1074
}
|
class ____ all the events. All events extend this class.
*/
@Public
@Evolving
public abstract
|
of
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/kryo/KryoClearedBufferTest.java
|
{
"start": 1641,
"end": 3703
}
|
class ____ {
/**
* Tests that the kryo output buffer is cleared in case of an exception. Flink uses the
* EOFException to signal that a buffer is full. In such a case, the record which was tried to
* be written will be rewritten. Therefore, eventually buffered data of this record has to be
* cleared.
*/
@Test
void testOutputBufferedBeingClearedInCaseOfException() throws Exception {
SerializerConfigImpl serializerConfigImpl = new SerializerConfigImpl();
serializerConfigImpl.registerTypeWithKryoSerializer(
TestRecord.class, new TestRecordSerializer());
serializerConfigImpl.registerKryoType(TestRecord.class);
KryoSerializer<TestRecord> kryoSerializer =
new KryoSerializer<TestRecord>(TestRecord.class, serializerConfigImpl);
int size = 94;
int bufferSize = 150;
TestRecord testRecord = new TestRecord(size);
TestDataOutputView target = new TestDataOutputView(bufferSize);
kryoSerializer.serialize(testRecord, target);
assertThatThrownBy(() -> kryoSerializer.serialize(testRecord, target))
.isInstanceOf(EOFException.class);
TestRecord actualRecord =
kryoSerializer.deserialize(
new DataInputViewStreamWrapper(
new ByteArrayInputStream(target.getBuffer())));
assertThat(actualRecord).isEqualTo(testRecord);
target.clear();
// if the kryo output has been cleared then we can serialize our test record into the target
// because the target buffer 150 bytes can host one TestRecord (total serialization size
// 100)
kryoSerializer.serialize(testRecord, target);
byte[] buffer = target.getBuffer();
int counter = 0;
for (int i = 0; i < buffer.length; i++) {
if (buffer[i] == 42) {
counter++;
}
}
assertThat(counter).isEqualTo(size);
}
public static
|
KryoClearedBufferTest
|
java
|
micronaut-projects__micronaut-core
|
core/src/main/java/io/micronaut/core/annotation/AnnotationMetadataResolver.java
|
{
"start": 772,
"end": 1569
}
|
interface ____ {
/**
* The default resolver.
*/
AnnotationMetadataResolver DEFAULT = new AnnotationMetadataResolver() {
};
/**
* Resolve the {@link AnnotationMetadata} for the given type.
*
* @param type The type
* @return The {@link AnnotationMetadata}
*/
default @NonNull AnnotationMetadata resolveMetadata(@Nullable Class<?> type) {
return AnnotationMetadata.EMPTY_METADATA;
}
/**
* Resolve the {@link AnnotationMetadata} for the given object.
*
* @param object The object
* @return The {@link AnnotationMetadata}
*/
default @NonNull AnnotationMetadata resolveMetadata(Object object) {
return resolveMetadata(object != null ? object.getClass() : null);
}
}
|
AnnotationMetadataResolver
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/web/ServletTestExecutionListenerJUnitIntegrationTests.java
|
{
"start": 1508,
"end": 2482
}
|
class ____ {
/* no beans required for this test */
}
@Autowired
private MockHttpServletRequest servletRequest;
/**
* Verifies bug fix for <a href="https://jira.spring.io/browse/SPR-11626">SPR-11626</a>.
*
* @see #ensureMocksAreReinjectedBetweenTests_2
*/
@Test
void ensureMocksAreReinjectedBetweenTests_1() {
assertInjectedServletRequestEqualsRequestInRequestContextHolder();
}
/**
* Verifies bug fix for <a href="https://jira.spring.io/browse/SPR-11626">SPR-11626</a>.
*
* @see #ensureMocksAreReinjectedBetweenTests_1
*/
@Test
void ensureMocksAreReinjectedBetweenTests_2() {
assertInjectedServletRequestEqualsRequestInRequestContextHolder();
}
private void assertInjectedServletRequestEqualsRequestInRequestContextHolder() {
assertThat(((ServletRequestAttributes) RequestContextHolder.getRequestAttributes()).getRequest()).as("Injected ServletRequest must be stored in the RequestContextHolder").isEqualTo(servletRequest);
}
}
|
Config
|
java
|
elastic__elasticsearch
|
client/sniffer/src/test/java/org/elasticsearch/client/sniff/SnifferTests.java
|
{
"start": 25708,
"end": 26687
}
|
class ____ implements Runnable {
final Sniffer.Task task;
final CountDownLatch completionLatch = new CountDownLatch(1);
TaskWrapper(Sniffer.Task task) {
this.task = task;
}
@Override
public void run() {
try {
task.run();
} finally {
completionLatch.countDown();
}
}
boolean await() throws InterruptedException {
return completionLatch.await(1000, TimeUnit.MILLISECONDS);
}
}
/**
* Mock {@link NodesSniffer} implementation used for testing, which most of the times return a fixed node.
* It rarely throws exception or return an empty list of nodes, to make sure that such situations are properly handled.
* It also asserts that it never gets called concurrently, based on the assumption that only one sniff run can be run
* at a given point in time.
*/
private static
|
TaskWrapper
|
java
|
quarkusio__quarkus
|
extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/vertx/HttpInstrumenterVertxTracer.java
|
{
"start": 17657,
"end": 17965
}
|
class ____ implements TextMapSetter<HttpRequest> {
@Override
public void set(final HttpRequest carrier, final String key, final String value) {
if (carrier != null) {
carrier.headers().set(key, value);
}
}
}
static
|
HttpRequestTextMapSetter
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java
|
{
"start": 11159,
"end": 11522
}
|
class ____ implements Runnable {
@Override
public void run() {
samplerLock.lock();
try {
for (Sampler sampler : samplersList) {
sampler.sample();
}
} finally {
samplerLock.unlock();
}
}
}
@Override
public Map<String, Map<String, ?>> getSnapshot() {
return all;
}
}
|
SamplersRunnable
|
java
|
apache__camel
|
components/camel-cometd/src/test/java/org/apache/camel/component/cometd/CometBindingTest.java
|
{
"start": 1646,
"end": 5307
}
|
class ____ {
private static final Object FOO = new Object();
private static final Long THIRTY_FOUR = Long.valueOf(34L);
private static final Double TWO_POINT_ONE = Double.valueOf(2.1);
private static final Integer EIGHT = Integer.valueOf(8);
private static final String HELLO = "hello";
private static final String FOO_ATTR_NAME = "foo";
private static final String LONG_ATTR_NAME = "long";
private static final String DOUBLE_ATTR_NAME = "double";
private static final String INTEGER_ATTR_NAME = "integer";
private static final String STRING_ATTR_NAME = "string";
private static final String BOOLEAN_ATT_NAME = "boolean";
private CometdBinding testObj;
@Mock
private BayeuxServerImpl bayeux;
@Mock
private ServerSession remote;
@Mock
private ServerMessage cometdMessage;
private final CamelContext camelContext = new DefaultCamelContext();
@BeforeEach
public void before() {
testObj = new CometdBinding(bayeux);
}
@Test
void testBindingTransfersSessionAttributtes() {
// setup
Set<String> attributeNames = new HashSet<>(
Arrays.asList(STRING_ATTR_NAME, INTEGER_ATTR_NAME,
LONG_ATTR_NAME, DOUBLE_ATTR_NAME,
FOO_ATTR_NAME, BOOLEAN_ATT_NAME));
when(remote.getAttributeNames()).thenReturn(attributeNames);
when(remote.getAttribute(STRING_ATTR_NAME)).thenReturn(HELLO);
when(remote.getAttribute(INTEGER_ATTR_NAME)).thenReturn(EIGHT);
when(remote.getAttribute(LONG_ATTR_NAME)).thenReturn(THIRTY_FOUR);
when(remote.getAttribute(DOUBLE_ATTR_NAME)).thenReturn(TWO_POINT_ONE);
when(remote.getAttribute(FOO_ATTR_NAME)).thenReturn(FOO);
when(remote.getAttribute(BOOLEAN_ATT_NAME)).thenReturn(Boolean.TRUE);
testObj = new CometdBinding(bayeux, true);
// act
Message result = testObj.createCamelMessage(camelContext, remote, cometdMessage, null);
// assert
assertEquals(6, result.getHeaders().size());
assertEquals(HELLO, result.getHeader(STRING_ATTR_NAME));
assertEquals(EIGHT, result.getHeader(INTEGER_ATTR_NAME));
assertEquals(THIRTY_FOUR, result.getHeader(LONG_ATTR_NAME));
assertEquals(TWO_POINT_ONE, result.getHeader(DOUBLE_ATTR_NAME));
assertNull(result.getHeader(FOO_ATTR_NAME));
assertTrue((Boolean) result.getHeader(BOOLEAN_ATT_NAME));
}
@Test
void testBindingHonorsFlagForSessionAttributtes() {
// act
Message result = testObj.createCamelMessage(camelContext, remote, cometdMessage, null);
// assert
assertEquals(1, result.getHeaders().size());
assertNull(result.getHeader(STRING_ATTR_NAME));
assertNull(result.getHeader(INTEGER_ATTR_NAME));
assertNull(result.getHeader(LONG_ATTR_NAME));
assertNull(result.getHeader(FOO_ATTR_NAME));
assertNull(result.getHeader(DOUBLE_ATTR_NAME));
assertNull(result.getHeader(BOOLEAN_ATT_NAME));
}
@Test
void testSubscriptionHeadersPassed() {
// setup
String expectedSubscriptionInfo = "subscriptionInfo";
when(cometdMessage.get(CometdBinding.COMETD_SUBSCRIPTION_HEADER_NAME))
.thenReturn(expectedSubscriptionInfo);
// act
Message result = testObj.createCamelMessage(camelContext, remote, cometdMessage, null);
// assert
assertEquals(2, result.getHeaders().size());
assertEquals(expectedSubscriptionInfo,
result.getHeader(CometdBinding.COMETD_SUBSCRIPTION_HEADER_NAME));
}
}
|
CometBindingTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/ingest/SamplingService.java
|
{
"start": 36718,
"end": 46597
}
|
class ____ implements Writeable, ToXContent {
// These are all non-private for the sake of unit testing
final LongAdder samples = new LongAdder();
final LongAdder potentialSamples = new LongAdder();
final LongAdder samplesRejectedForMaxSamplesExceeded = new LongAdder();
final LongAdder samplesRejectedForCondition = new LongAdder();
final LongAdder samplesRejectedForRate = new LongAdder();
final LongAdder samplesRejectedForException = new LongAdder();
final LongAdder samplesRejectedForSize = new LongAdder();
final LongAdder timeSamplingInNanos = new LongAdder();
final LongAdder timeEvaluatingConditionInNanos = new LongAdder();
final LongAdder timeCompilingConditionInNanos = new LongAdder();
Exception lastException = null;
public SampleStats() {}
public SampleStats(SampleStats other) {
addAllFields(other, this);
}
/*
* This constructor is only meant for constructing arbitrary SampleStats for testing
*/
public SampleStats(
long samples,
long potentialSamples,
long samplesRejectedForMaxSamplesExceeded,
long samplesRejectedForCondition,
long samplesRejectedForRate,
long samplesRejectedForException,
long samplesRejectedForSize,
TimeValue timeSampling,
TimeValue timeEvaluatingCondition,
TimeValue timeCompilingCondition,
Exception lastException
) {
this.samples.add(samples);
this.potentialSamples.add(potentialSamples);
this.samplesRejectedForMaxSamplesExceeded.add(samplesRejectedForMaxSamplesExceeded);
this.samplesRejectedForCondition.add(samplesRejectedForCondition);
this.samplesRejectedForRate.add(samplesRejectedForRate);
this.samplesRejectedForException.add(samplesRejectedForException);
this.samplesRejectedForSize.add(samplesRejectedForSize);
this.timeSamplingInNanos.add(timeSampling.nanos());
this.timeEvaluatingConditionInNanos.add(timeEvaluatingCondition.nanos());
this.timeCompilingConditionInNanos.add(timeCompilingCondition.nanos());
this.lastException = lastException;
}
public SampleStats(StreamInput in) throws IOException {
potentialSamples.add(in.readLong());
samplesRejectedForMaxSamplesExceeded.add(in.readLong());
samplesRejectedForCondition.add(in.readLong());
samplesRejectedForRate.add(in.readLong());
samplesRejectedForException.add(in.readLong());
samplesRejectedForSize.add(in.readLong());
samples.add(in.readLong());
timeSamplingInNanos.add(in.readLong());
timeEvaluatingConditionInNanos.add(in.readLong());
timeCompilingConditionInNanos.add(in.readLong());
if (in.readBoolean()) {
lastException = in.readException();
} else {
lastException = null;
}
}
public long getSamples() {
return samples.longValue();
}
public long getPotentialSamples() {
return potentialSamples.longValue();
}
public long getSamplesRejectedForMaxSamplesExceeded() {
return samplesRejectedForMaxSamplesExceeded.longValue();
}
public long getSamplesRejectedForCondition() {
return samplesRejectedForCondition.longValue();
}
public long getSamplesRejectedForRate() {
return samplesRejectedForRate.longValue();
}
public long getSamplesRejectedForException() {
return samplesRejectedForException.longValue();
}
public long getSamplesRejectedForSize() {
return samplesRejectedForSize.longValue();
}
public TimeValue getTimeSampling() {
return TimeValue.timeValueNanos(timeSamplingInNanos.longValue());
}
public TimeValue getTimeEvaluatingCondition() {
return TimeValue.timeValueNanos(timeEvaluatingConditionInNanos.longValue());
}
public TimeValue getTimeCompilingCondition() {
return TimeValue.timeValueNanos(timeCompilingConditionInNanos.longValue());
}
public Exception getLastException() {
return lastException;
}
@Override
public String toString() {
return "potential_samples: "
+ potentialSamples
+ ", samples_rejected_for_max_samples_exceeded: "
+ samplesRejectedForMaxSamplesExceeded
+ ", samples_rejected_for_condition: "
+ samplesRejectedForCondition
+ ", samples_rejected_for_rate: "
+ samplesRejectedForRate
+ ", samples_rejected_for_exception: "
+ samplesRejectedForException
+ ", samples_accepted: "
+ samples
+ ", time_sampling: "
+ TimeValue.timeValueNanos(timeSamplingInNanos.longValue())
+ ", time_evaluating_condition: "
+ TimeValue.timeValueNanos(timeEvaluatingConditionInNanos.longValue())
+ ", time_compiling_condition: "
+ TimeValue.timeValueNanos(timeCompilingConditionInNanos.longValue());
}
public SampleStats combine(SampleStats other) {
SampleStats result = new SampleStats(this);
addAllFields(other, result);
return result;
}
private static void addAllFields(SampleStats source, SampleStats dest) {
dest.potentialSamples.add(source.potentialSamples.longValue());
dest.samplesRejectedForMaxSamplesExceeded.add(source.samplesRejectedForMaxSamplesExceeded.longValue());
dest.samplesRejectedForCondition.add(source.samplesRejectedForCondition.longValue());
dest.samplesRejectedForRate.add(source.samplesRejectedForRate.longValue());
dest.samplesRejectedForException.add(source.samplesRejectedForException.longValue());
dest.samplesRejectedForSize.add(source.samplesRejectedForSize.longValue());
dest.samples.add(source.samples.longValue());
dest.timeSamplingInNanos.add(source.timeSamplingInNanos.longValue());
dest.timeEvaluatingConditionInNanos.add(source.timeEvaluatingConditionInNanos.longValue());
dest.timeCompilingConditionInNanos.add(source.timeCompilingConditionInNanos.longValue());
if (dest.lastException == null) {
dest.lastException = source.lastException;
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field("potential_samples", potentialSamples.longValue());
builder.field("samples_rejected_for_max_samples_exceeded", samplesRejectedForMaxSamplesExceeded.longValue());
builder.field("samples_rejected_for_condition", samplesRejectedForCondition.longValue());
builder.field("samples_rejected_for_rate", samplesRejectedForRate.longValue());
builder.field("samples_rejected_for_exception", samplesRejectedForException.longValue());
builder.field("samples_rejected_for_size", samplesRejectedForSize.longValue());
builder.field("samples_accepted", samples.longValue());
builder.humanReadableField("time_sampling_millis", "time_sampling", TimeValue.timeValueNanos(timeSamplingInNanos.longValue()));
builder.humanReadableField(
"time_evaluating_condition_millis",
"time_evaluating_condition",
TimeValue.timeValueNanos(timeEvaluatingConditionInNanos.longValue())
);
builder.humanReadableField(
"time_compiling_condition_millis",
"time_compiling_condition",
TimeValue.timeValueNanos(timeCompilingConditionInNanos.longValue())
);
if (lastException != null) {
Throwable unwrapped = ExceptionsHelper.unwrapCause(lastException);
builder.startObject("last_exception");
builder.field("type", ElasticsearchException.getExceptionName(unwrapped));
builder.field("message", unwrapped.getMessage());
builder.field("stack_trace", ExceptionsHelper.limitedStackTrace(unwrapped, 5));
builder.endObject();
}
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeLong(potentialSamples.longValue());
out.writeLong(samplesRejectedForMaxSamplesExceeded.longValue());
out.writeLong(samplesRejectedForCondition.longValue());
out.writeLong(samplesRejectedForRate.longValue());
out.writeLong(samplesRejectedForException.longValue());
out.writeLong(samplesRejectedForSize.longValue());
out.writeLong(samples.longValue());
out.writeLong(timeSamplingInNanos.longValue());
out.writeLong(timeEvaluatingConditionInNanos.longValue());
out.writeLong(timeCompilingConditionInNanos.longValue());
if (lastException == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeException(lastException);
}
}
/*
* equals and hashCode are implemented for the sake of testing serialization. Since this
|
SampleStats
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/writeAsArray/WriteAsArray_char_public.java
|
{
"start": 204,
"end": 551
}
|
class ____ extends TestCase {
public void test_0 () throws Exception {
VO vo = new VO();
vo.setId('x');
vo.setName("wenshao");
String text = JSON.toJSONString(vo, SerializerFeature.BeanToArray);
Assert.assertEquals("[\"x\",\"wenshao\"]", text);
}
public static
|
WriteAsArray_char_public
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UngroupedOverloadsTest.java
|
{
"start": 1098,
"end": 1792
}
|
class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(UngroupedOverloads.class, getClass());
private final BugCheckerRefactoringTestHelper refactoringHelper =
BugCheckerRefactoringTestHelper.newInstance(UngroupedOverloads.class, getClass());
@Test
public void ungroupedOverloadsPositiveCasesSingle() {
compilationHelper
.addSourceLines(
"UngroupedOverloadsPositiveCasesSingle.java",
"""
package com.google.errorprone.bugpatterns.testdata;
/**
* @author hanuszczak@google.com (Łukasz Hanuszczak)
*/
public
|
UngroupedOverloadsTest
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/builder/ParentContextCloserApplicationListener.java
|
{
"start": 1556,
"end": 2898
}
|
class ____
implements ApplicationListener<ParentContextAvailableEvent>, ApplicationContextAware, Ordered {
private static final int ORDER = Ordered.LOWEST_PRECEDENCE - 10;
@SuppressWarnings("NullAway.Init")
private ApplicationContext context;
@Override
public int getOrder() {
return ORDER;
}
@Override
public void setApplicationContext(ApplicationContext context) throws BeansException {
this.context = context;
}
@Override
public void onApplicationEvent(ParentContextAvailableEvent event) {
maybeInstallListenerInParent(event.getApplicationContext());
}
private void maybeInstallListenerInParent(ConfigurableApplicationContext child) {
if (child == this.context && child.getParent() instanceof ConfigurableApplicationContext parent) {
parent.addApplicationListener(createContextCloserListener(child));
}
}
/**
* Subclasses may override to create their own subclass of ContextCloserListener. This
* still enforces the use of a weak reference.
* @param child the child context
* @return the {@link ContextCloserListener} to use
*/
protected ContextCloserListener createContextCloserListener(ConfigurableApplicationContext child) {
return new ContextCloserListener(child);
}
/**
* {@link ApplicationListener} to close the context.
*/
protected static
|
ParentContextCloserApplicationListener
|
java
|
apache__flink
|
flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/RocksDBKeyedStateBackendBuilder.java
|
{
"start": 5024,
"end": 34167
}
|
class ____<K> extends AbstractKeyedStateBackendBuilder<K> {
static final String DB_INSTANCE_DIR_STRING = "db";
/** String that identifies the operator that owns this backend. */
private final String operatorIdentifier;
/** The configuration of rocksDB priorityQueue state. */
private final RocksDBPriorityQueueConfig priorityQueueConfig;
/** The configuration of local recovery. */
private final LocalRecoveryConfig localRecoveryConfig;
/** Factory function to create column family options from state name. */
private final Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory;
/** The container of RocksDB option factory and predefined options. */
private final RocksDBResourceContainer optionsContainer;
/** Path where this configured instance stores its data directory. */
private final File instanceBasePath;
/** Path where this configured instance stores its RocksDB database. */
private final File instanceRocksDBPath;
private final MetricGroup metricGroup;
private final StateBackend.CustomInitializationMetrics customInitializationMetrics;
/** True if incremental checkpointing is enabled. */
private boolean enableIncrementalCheckpointing;
/** RocksDB property-based and statistics-based native metrics options. */
private RocksDBNativeMetricOptions nativeMetricOptions;
private int numberOfTransferingThreads;
private long writeBatchSize =
RocksDBConfigurableOptions.WRITE_BATCH_SIZE.defaultValue().getBytes();
private RocksDB injectedTestDB; // for testing
private boolean incrementalRestoreAsyncCompactAfterRescale =
INCREMENTAL_RESTORE_ASYNC_COMPACT_AFTER_RESCALE.defaultValue();
private boolean rescalingUseDeleteFilesInRange =
USE_DELETE_FILES_IN_RANGE_DURING_RESCALING.defaultValue();
private double overlapFractionThreshold = RESTORE_OVERLAP_FRACTION_THRESHOLD.defaultValue();
private boolean useIngestDbRestoreMode = USE_INGEST_DB_RESTORE_MODE.defaultValue();
private ColumnFamilyHandle injectedDefaultColumnFamilyHandle; // for testing
private RocksDBStateUploader injectRocksDBStateUploader; // for testing
private RocksDBManualCompactionConfig manualCompactionConfig =
RocksDBManualCompactionConfig.getDefault();
private ExecutorService ioExecutor;
private AsyncExceptionHandler asyncExceptionHandler;
public RocksDBKeyedStateBackendBuilder(
String operatorIdentifier,
ClassLoader userCodeClassLoader,
File instanceBasePath,
RocksDBResourceContainer optionsContainer,
Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory,
TaskKvStateRegistry kvStateRegistry,
TypeSerializer<K> keySerializer,
int numberOfKeyGroups,
KeyGroupRange keyGroupRange,
ExecutionConfig executionConfig,
LocalRecoveryConfig localRecoveryConfig,
RocksDBPriorityQueueConfig priorityQueueConfig,
TtlTimeProvider ttlTimeProvider,
LatencyTrackingStateConfig latencyTrackingStateConfig,
SizeTrackingStateConfig sizeTrackingStateConfig,
MetricGroup metricGroup,
StateBackend.CustomInitializationMetrics customInitializationMetrics,
@Nonnull Collection<KeyedStateHandle> stateHandles,
StreamCompressionDecorator keyGroupCompressionDecorator,
CloseableRegistry cancelStreamRegistry) {
super(
kvStateRegistry,
keySerializer,
userCodeClassLoader,
numberOfKeyGroups,
keyGroupRange,
executionConfig,
ttlTimeProvider,
latencyTrackingStateConfig,
sizeTrackingStateConfig,
stateHandles,
keyGroupCompressionDecorator,
cancelStreamRegistry);
this.operatorIdentifier = operatorIdentifier;
this.priorityQueueConfig = priorityQueueConfig;
this.localRecoveryConfig = localRecoveryConfig;
// ensure that we use the right merge operator, because other code relies on this
this.columnFamilyOptionsFactory = Preconditions.checkNotNull(columnFamilyOptionsFactory);
this.optionsContainer = optionsContainer;
this.instanceBasePath = instanceBasePath;
this.instanceRocksDBPath = getInstanceRocksDBPath(instanceBasePath);
this.metricGroup = metricGroup;
this.customInitializationMetrics = customInitializationMetrics;
this.enableIncrementalCheckpointing = false;
this.nativeMetricOptions = new RocksDBNativeMetricOptions();
this.numberOfTransferingThreads =
RocksDBOptions.CHECKPOINT_TRANSFER_THREAD_NUM.defaultValue();
}
@VisibleForTesting
RocksDBKeyedStateBackendBuilder(
String operatorIdentifier,
ClassLoader userCodeClassLoader,
File instanceBasePath,
RocksDBResourceContainer optionsContainer,
Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory,
TaskKvStateRegistry kvStateRegistry,
TypeSerializer<K> keySerializer,
int numberOfKeyGroups,
KeyGroupRange keyGroupRange,
ExecutionConfig executionConfig,
LocalRecoveryConfig localRecoveryConfig,
RocksDBPriorityQueueConfig rocksDBPriorityQueueConfig,
TtlTimeProvider ttlTimeProvider,
LatencyTrackingStateConfig latencyTrackingStateConfig,
SizeTrackingStateConfig sizeTrackingStateConfig,
MetricGroup metricGroup,
@Nonnull Collection<KeyedStateHandle> stateHandles,
StreamCompressionDecorator keyGroupCompressionDecorator,
RocksDB injectedTestDB,
ColumnFamilyHandle injectedDefaultColumnFamilyHandle,
CloseableRegistry cancelStreamRegistry) {
this(
operatorIdentifier,
userCodeClassLoader,
instanceBasePath,
optionsContainer,
columnFamilyOptionsFactory,
kvStateRegistry,
keySerializer,
numberOfKeyGroups,
keyGroupRange,
executionConfig,
localRecoveryConfig,
rocksDBPriorityQueueConfig,
ttlTimeProvider,
latencyTrackingStateConfig,
sizeTrackingStateConfig,
metricGroup,
(key, value) -> {},
stateHandles,
keyGroupCompressionDecorator,
cancelStreamRegistry);
this.injectedTestDB = injectedTestDB;
this.injectedDefaultColumnFamilyHandle = injectedDefaultColumnFamilyHandle;
}
RocksDBKeyedStateBackendBuilder<K> setEnableIncrementalCheckpointing(
boolean enableIncrementalCheckpointing) {
this.enableIncrementalCheckpointing = enableIncrementalCheckpointing;
return this;
}
RocksDBKeyedStateBackendBuilder<K> setNativeMetricOptions(
RocksDBNativeMetricOptions nativeMetricOptions) {
this.nativeMetricOptions = nativeMetricOptions;
return this;
}
RocksDBKeyedStateBackendBuilder<K> setNumberOfTransferingThreads(
int numberOfTransferingThreads) {
Preconditions.checkState(
injectRocksDBStateUploader == null,
"numberOfTransferingThreads can be set only when injectRocksDBStateUploader is null.");
this.numberOfTransferingThreads = numberOfTransferingThreads;
return this;
}
RocksDBKeyedStateBackendBuilder<K> setWriteBatchSize(long writeBatchSize) {
checkArgument(writeBatchSize >= 0, "Write batch size should be non negative.");
this.writeBatchSize = writeBatchSize;
return this;
}
RocksDBKeyedStateBackendBuilder<K> setRocksDBStateUploader(
RocksDBStateUploader rocksDBStateUploader) {
Preconditions.checkState(
injectRocksDBStateUploader == null, "rocksDBStateUploader can be only set once");
Preconditions.checkState(
numberOfTransferingThreads
== RocksDBOptions.CHECKPOINT_TRANSFER_THREAD_NUM.defaultValue(),
"RocksDBStateUploader can only be set if numberOfTransferingThreads has not been manually set.");
this.injectRocksDBStateUploader = rocksDBStateUploader;
return this;
}
RocksDBKeyedStateBackendBuilder<K> setOverlapFractionThreshold(
double overlapFractionThreshold) {
this.overlapFractionThreshold = overlapFractionThreshold;
return this;
}
RocksDBKeyedStateBackendBuilder<K> setIncrementalRestoreAsyncCompactAfterRescale(
boolean incrementalRestoreAsyncCompactAfterRescale) {
this.incrementalRestoreAsyncCompactAfterRescale =
incrementalRestoreAsyncCompactAfterRescale;
return this;
}
RocksDBKeyedStateBackendBuilder<K> setUseIngestDbRestoreMode(boolean useIngestDbRestoreMode) {
this.useIngestDbRestoreMode = useIngestDbRestoreMode;
return this;
}
RocksDBKeyedStateBackendBuilder<K> setRescalingUseDeleteFilesInRange(
boolean rescalingUseDeleteFilesInRange) {
this.rescalingUseDeleteFilesInRange = rescalingUseDeleteFilesInRange;
return this;
}
RocksDBKeyedStateBackendBuilder<K> setIOExecutor(ExecutorService ioExecutor) {
this.ioExecutor = ioExecutor;
return this;
}
public static File getInstanceRocksDBPath(File instanceBasePath) {
return new File(instanceBasePath, DB_INSTANCE_DIR_STRING);
}
private static void checkAndCreateDirectory(File directory) throws IOException {
if (directory.exists()) {
if (!directory.isDirectory()) {
throw new IOException("Not a directory: " + directory);
}
} else if (!directory.mkdirs()) {
throw new IOException(
String.format("Could not create RocksDB data directory at %s.", directory));
}
}
@Override
public RocksDBKeyedStateBackend<K> build() throws BackendBuildingException {
RocksDBWriteBatchWrapper writeBatchWrapper = null;
ColumnFamilyHandle defaultColumnFamilyHandle = null;
RocksDBNativeMetricMonitor nativeMetricMonitor = null;
CloseableRegistry cancelRegistryForBackend = new CloseableRegistry();
LinkedHashMap<String, RocksDBKeyedStateBackend.RocksDbKvStateInfo> kvStateInformation =
new LinkedHashMap<>();
LinkedHashMap<String, HeapPriorityQueueSnapshotRestoreWrapper<?>> registeredPQStates =
new LinkedHashMap<>();
RocksDB db = null;
RocksDBRestoreOperation restoreOperation = null;
CompletableFuture<Void> asyncCompactAfterRestoreFuture = null;
RocksDbTtlCompactFiltersManager ttlCompactFiltersManager =
new RocksDbTtlCompactFiltersManager(
ttlTimeProvider,
optionsContainer.getQueryTimeAfterNumEntries(),
optionsContainer.getPeriodicCompactionTime());
ResourceGuard rocksDBResourceGuard = new ResourceGuard();
RocksDBSnapshotStrategyBase<K, ?> checkpointStrategy = null;
PriorityQueueSetFactory priorityQueueFactory;
SerializedCompositeKeyBuilder<K> sharedRocksKeyBuilder;
// Number of bytes required to prefix the key groups.
int keyGroupPrefixBytes =
CompositeKeySerializationUtils.computeRequiredBytesInKeyGroupPrefix(
numberOfKeyGroups);
RocksDBManualCompactionManager manualCompactionManager;
try {
// Variables for snapshot strategy when incremental checkpoint is enabled
UUID backendUID = UUID.randomUUID();
SortedMap<Long, Collection<HandleAndLocalPath>> materializedSstFiles = new TreeMap<>();
long lastCompletedCheckpointId = -1L;
if (injectedTestDB != null) {
db = injectedTestDB;
defaultColumnFamilyHandle = injectedDefaultColumnFamilyHandle;
nativeMetricMonitor =
nativeMetricOptions.isEnabled()
? new RocksDBNativeMetricMonitor(
nativeMetricOptions, metricGroup, db, null)
: null;
} else {
prepareDirectories();
restoreOperation =
getRocksDBRestoreOperation(
keyGroupPrefixBytes,
rocksDBResourceGuard,
cancelStreamRegistry,
cancelRegistryForBackend,
kvStateInformation,
registeredPQStates,
ttlCompactFiltersManager);
RocksDBRestoreResult restoreResult = restoreOperation.restore();
db = restoreResult.getDb();
defaultColumnFamilyHandle = restoreResult.getDefaultColumnFamilyHandle();
nativeMetricMonitor = restoreResult.getNativeMetricMonitor();
if (ioExecutor != null) {
asyncCompactAfterRestoreFuture =
restoreResult
.getAsyncCompactTaskAfterRestore()
.map((task) -> CompletableFuture.runAsync(task, ioExecutor))
.orElse(null);
}
if (restoreOperation instanceof RocksDBIncrementalRestoreOperation) {
backendUID = restoreResult.getBackendUID();
materializedSstFiles = restoreResult.getRestoredSstFiles();
lastCompletedCheckpointId = restoreResult.getLastCompletedCheckpointId();
}
}
writeBatchWrapper =
new RocksDBWriteBatchWrapper(
db, optionsContainer.getWriteOptions(), writeBatchSize);
// it is important that we only create the key builder after the restore, and not
// before;
// restore operations may reconfigure the key serializer, so accessing the key
// serializer
// only now we can be certain that the key serializer used in the builder is final.
sharedRocksKeyBuilder =
new SerializedCompositeKeyBuilder<>(
keySerializerProvider.currentSchemaSerializer(),
keyGroupPrefixBytes,
32);
// init snapshot strategy after db is assured to be initialized
checkpointStrategy =
initializeSavepointAndCheckpointStrategies(
rocksDBResourceGuard,
kvStateInformation,
keyGroupPrefixBytes,
db,
backendUID,
materializedSstFiles,
lastCompletedCheckpointId);
// init priority queue factory
manualCompactionManager =
RocksDBManualCompactionManager.create(db, manualCompactionConfig, ioExecutor);
priorityQueueFactory =
initPriorityQueueFactory(
keyGroupPrefixBytes,
kvStateInformation,
db,
writeBatchWrapper,
nativeMetricMonitor,
manualCompactionManager);
} catch (Throwable e) {
boolean cancelled = cancelStreamRegistry.isClosed();
// log ASAP because close can block or fail too
if (cancelled) {
logger.info("RocksDB state backend build cancelled");
} else {
logger.warn("Failed to build RocksDB state backend", e);
}
// Do clean up
List<ColumnFamilyOptions> columnFamilyOptions =
new ArrayList<>(kvStateInformation.values().size());
IOUtils.closeQuietly(cancelRegistryForBackend);
IOUtils.closeQuietly(writeBatchWrapper);
IOUtils.closeQuietly(rocksDBResourceGuard);
RocksDBOperationUtils.addColumnFamilyOptionsToCloseLater(
columnFamilyOptions, defaultColumnFamilyHandle);
IOUtils.closeQuietly(defaultColumnFamilyHandle);
IOUtils.closeQuietly(nativeMetricMonitor);
for (RocksDBKeyedStateBackend.RocksDbKvStateInfo kvStateInfo :
kvStateInformation.values()) {
RocksDBOperationUtils.addColumnFamilyOptionsToCloseLater(
columnFamilyOptions, kvStateInfo.columnFamilyHandle);
IOUtils.closeQuietly(kvStateInfo.columnFamilyHandle);
}
IOUtils.closeQuietly(db);
// it's possible that db has been initialized but later restore steps failed
IOUtils.closeQuietly(restoreOperation);
IOUtils.closeAllQuietly(columnFamilyOptions);
IOUtils.closeQuietly(optionsContainer);
ttlCompactFiltersManager.disposeAndClearRegisteredCompactionFactories();
kvStateInformation.clear();
IOUtils.closeQuietly(checkpointStrategy);
try {
FileUtils.deleteDirectory(instanceBasePath);
} catch (Exception ex) {
logger.warn("Failed to delete base path for RocksDB: " + instanceBasePath, ex);
}
// Log and rethrow
if (e instanceof BackendBuildingException) {
throw (BackendBuildingException) e;
} else if (cancelled) {
throw new BackendBuildingException(
"Task was cancelled", new CancelTaskException(e));
} else {
String errMsg = "Caught unexpected exception.";
logger.error(errMsg, e);
throw new BackendBuildingException(errMsg, e);
}
}
InternalKeyContext<K> keyContext =
new InternalKeyContextImpl<>(keyGroupRange, numberOfKeyGroups);
logger.info("Finished building RocksDB keyed state-backend at {}.", instanceBasePath);
return new RocksDBKeyedStateBackend<>(
this.userCodeClassLoader,
this.instanceBasePath,
this.optionsContainer,
columnFamilyOptionsFactory,
this.kvStateRegistry,
this.keySerializerProvider.currentSchemaSerializer(),
this.executionConfig,
this.ttlTimeProvider,
latencyTrackingStateConfig,
sizeTrackingStateConfig,
db,
kvStateInformation,
registeredPQStates,
keyGroupPrefixBytes,
cancelRegistryForBackend,
this.keyGroupCompressionDecorator,
rocksDBResourceGuard,
checkpointStrategy,
writeBatchWrapper,
defaultColumnFamilyHandle,
nativeMetricMonitor,
sharedRocksKeyBuilder,
priorityQueueFactory,
ttlCompactFiltersManager,
keyContext,
writeBatchSize,
asyncCompactAfterRestoreFuture,
manualCompactionManager);
}
private RocksDBRestoreOperation getRocksDBRestoreOperation(
int keyGroupPrefixBytes,
ResourceGuard rocksDBResourceGuard,
CloseableRegistry cancelStreamRegistryForRestore,
CloseableRegistry cancelRegistryForBackend,
LinkedHashMap<String, RocksDBKeyedStateBackend.RocksDbKvStateInfo> kvStateInformation,
LinkedHashMap<String, HeapPriorityQueueSnapshotRestoreWrapper<?>> registeredPQStates,
RocksDbTtlCompactFiltersManager ttlCompactFiltersManager) {
DBOptions dbOptions = optionsContainer.getDbOptions();
if (CollectionUtil.isEmptyOrAllElementsNull(restoreStateHandles)) {
return new RocksDBNoneRestoreOperation<>(
kvStateInformation,
instanceRocksDBPath,
dbOptions,
columnFamilyOptionsFactory,
nativeMetricOptions,
metricGroup,
ttlCompactFiltersManager,
optionsContainer.getWriteBufferManagerCapacity());
}
KeyedStateHandle firstStateHandle = restoreStateHandles.iterator().next();
if (firstStateHandle instanceof IncrementalKeyedStateHandle) {
return new RocksDBIncrementalRestoreOperation<>(
operatorIdentifier,
keyGroupRange,
keyGroupPrefixBytes,
numberOfTransferingThreads,
rocksDBResourceGuard,
cancelStreamRegistryForRestore,
cancelRegistryForBackend,
userCodeClassLoader,
kvStateInformation,
keySerializerProvider,
instanceBasePath,
instanceRocksDBPath,
dbOptions,
columnFamilyOptionsFactory,
nativeMetricOptions,
metricGroup,
customInitializationMetrics,
CollectionUtil.checkedSubTypeCast(
restoreStateHandles, IncrementalKeyedStateHandle.class),
ttlCompactFiltersManager,
writeBatchSize,
optionsContainer.getWriteBufferManagerCapacity(),
overlapFractionThreshold,
useIngestDbRestoreMode,
incrementalRestoreAsyncCompactAfterRescale,
rescalingUseDeleteFilesInRange,
ioExecutor,
asyncExceptionHandler);
} else if (priorityQueueConfig.getPriorityQueueStateType()
== EmbeddedRocksDBStateBackend.PriorityQueueStateType.HEAP) {
return new RocksDBHeapTimersFullRestoreOperation<>(
keyGroupRange,
numberOfKeyGroups,
userCodeClassLoader,
kvStateInformation,
registeredPQStates,
createHeapQueueFactory(),
keySerializerProvider,
instanceRocksDBPath,
dbOptions,
columnFamilyOptionsFactory,
nativeMetricOptions,
metricGroup,
restoreStateHandles,
ttlCompactFiltersManager,
writeBatchSize,
optionsContainer.getWriteBufferManagerCapacity(),
cancelStreamRegistry);
} else {
return new RocksDBFullRestoreOperation<>(
keyGroupRange,
userCodeClassLoader,
kvStateInformation,
keySerializerProvider,
instanceRocksDBPath,
dbOptions,
columnFamilyOptionsFactory,
nativeMetricOptions,
metricGroup,
restoreStateHandles,
ttlCompactFiltersManager,
writeBatchSize,
optionsContainer.getWriteBufferManagerCapacity(),
cancelStreamRegistry);
}
}
private RocksDBSnapshotStrategyBase<K, ?> initializeSavepointAndCheckpointStrategies(
ResourceGuard rocksDBResourceGuard,
LinkedHashMap<String, RocksDBKeyedStateBackend.RocksDbKvStateInfo> kvStateInformation,
int keyGroupPrefixBytes,
RocksDB db,
UUID backendUID,
SortedMap<Long, Collection<HandleAndLocalPath>> materializedSstFiles,
long lastCompletedCheckpointId) {
RocksDBSnapshotStrategyBase<K, ?> checkpointSnapshotStrategy;
RocksDBStateUploader stateUploader =
injectRocksDBStateUploader == null
? new RocksDBStateUploader(
RocksDBStateDataTransferHelper.forThreadNumIfSpecified(
numberOfTransferingThreads, ioExecutor))
: injectRocksDBStateUploader;
if (enableIncrementalCheckpointing) {
checkpointSnapshotStrategy =
new RocksIncrementalSnapshotStrategy<>(
db,
rocksDBResourceGuard,
keySerializerProvider.currentSchemaSerializer(),
kvStateInformation,
keyGroupRange,
keyGroupPrefixBytes,
localRecoveryConfig,
instanceBasePath,
backendUID,
materializedSstFiles,
stateUploader,
lastCompletedCheckpointId);
} else {
checkpointSnapshotStrategy =
new RocksNativeFullSnapshotStrategy<>(
db,
rocksDBResourceGuard,
keySerializerProvider.currentSchemaSerializer(),
kvStateInformation,
keyGroupRange,
keyGroupPrefixBytes,
localRecoveryConfig,
instanceBasePath,
backendUID,
stateUploader);
}
return checkpointSnapshotStrategy;
}
private PriorityQueueSetFactory initPriorityQueueFactory(
int keyGroupPrefixBytes,
Map<String, RocksDBKeyedStateBackend.RocksDbKvStateInfo> kvStateInformation,
RocksDB db,
RocksDBWriteBatchWrapper writeBatchWrapper,
RocksDBNativeMetricMonitor nativeMetricMonitor,
RocksDBManualCompactionManager manualCompactionManager) {
PriorityQueueSetFactory priorityQueueFactory;
switch (priorityQueueConfig.getPriorityQueueStateType()) {
case HEAP:
priorityQueueFactory = createHeapQueueFactory();
break;
case ROCKSDB:
priorityQueueFactory =
new RocksDBPriorityQueueSetFactory(
keyGroupRange,
keyGroupPrefixBytes,
numberOfKeyGroups,
kvStateInformation,
db,
optionsContainer.getReadOptions(),
writeBatchWrapper,
nativeMetricMonitor,
columnFamilyOptionsFactory,
optionsContainer.getWriteBufferManagerCapacity(),
priorityQueueConfig.getRocksDBPriorityQueueSetCacheSize(),
manualCompactionManager);
break;
default:
throw new IllegalArgumentException(
"Unknown priority queue state type: "
+ priorityQueueConfig.getPriorityQueueStateType());
}
return priorityQueueFactory;
}
private HeapPriorityQueueSetFactory createHeapQueueFactory() {
return new HeapPriorityQueueSetFactory(keyGroupRange, numberOfKeyGroups, 128);
}
private void prepareDirectories() throws IOException {
checkAndCreateDirectory(instanceBasePath);
if (instanceRocksDBPath.exists()) {
// Clear the base directory when the backend is created
// in case something crashed and the backend never reached dispose()
FileUtils.deleteDirectory(instanceBasePath);
}
}
public RocksDBKeyedStateBackendBuilder<K> setManualCompactionConfig(
RocksDBManualCompactionConfig manualCompactionConfig) {
this.manualCompactionConfig = checkNotNull(manualCompactionConfig);
return this;
}
public RocksDBKeyedStateBackendBuilder<K> setAsyncExceptionHandler(
AsyncExceptionHandler asyncExceptionHandler) {
this.asyncExceptionHandler = asyncExceptionHandler;
return this;
}
}
|
RocksDBKeyedStateBackendBuilder
|
java
|
micronaut-projects__micronaut-core
|
http/src/main/java/io/micronaut/http/resource/ResourceLoaderFactory.java
|
{
"start": 1966,
"end": 3259
}
|
class ____ resource loader
*/
@Singleton
@BootstrapContextCompatible
protected @NonNull ClassPathResourceLoader getClassPathResourceLoader() {
return new DefaultClassPathResourceLoader(classLoader);
}
/**
* @return The file system resource loader
*/
@Singleton
@BootstrapContextCompatible
protected @NonNull FileSystemResourceLoader fileSystemResourceLoader() {
return new DefaultFileSystemResourceLoader();
}
/**
* @return The string resource loader
*/
@Singleton
@BootstrapContextCompatible
protected @NonNull ResourceLoader getStringResourceLoader() {
return StringResourceLoader.getInstance();
}
/**
* @return The base64 resource loader
*/
@Singleton
@BootstrapContextCompatible
protected @NonNull ResourceLoader getBase64ResourceLoader() {
return Base64ResourceLoader.getInstance();
}
/**
* @param resourceLoaders The resource loaders
* @return The resource resolver
*/
@Singleton
@BootstrapContextCompatible
@Indexed(ResourceResolver.class)
protected @NonNull ResourceResolver resourceResolver(@NonNull List<ResourceLoader> resourceLoaders) {
return new ResourceResolver(resourceLoaders);
}
}
|
path
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/index/HiddenIndexIT.java
|
{
"start": 1770,
"end": 11597
}
|
class ____ extends ESIntegTestCase {
public void testHiddenIndexSearch() {
assertAcked(indicesAdmin().prepareCreate("hidden-index").setSettings(Settings.builder().put("index.hidden", true).build()).get());
prepareIndex("hidden-index").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get();
// default not visible to wildcard expansion
assertResponse(
prepareSearch(randomFrom("*", "_all", "h*", "*index")).setSize(1000).setQuery(QueryBuilders.matchAllQuery()),
response -> {
boolean matchedHidden = Arrays.stream(response.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex()));
assertFalse(matchedHidden);
}
);
// direct access allowed
assertResponse(prepareSearch("hidden-index").setSize(1000).setQuery(QueryBuilders.matchAllQuery()), response -> {
boolean matchedHidden = Arrays.stream(response.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex()));
assertTrue(matchedHidden);
});
// with indices option to include hidden
assertResponse(
prepareSearch(randomFrom("*", "_all", "h*", "*index")).setSize(1000)
.setQuery(QueryBuilders.matchAllQuery())
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN),
response -> {
boolean matchedHidden = Arrays.stream(response.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex()));
assertTrue(matchedHidden);
}
);
// implicit based on use of pattern starting with . and a wildcard
assertAcked(indicesAdmin().prepareCreate(".hidden-index").setSettings(Settings.builder().put("index.hidden", true).build()).get());
prepareIndex(".hidden-index").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get();
assertResponse(prepareSearch(randomFrom(".*", ".hidden-*")).setSize(1000).setQuery(QueryBuilders.matchAllQuery()), response -> {
boolean matchedHidden = Arrays.stream(response.getHits().getHits()).anyMatch(hit -> ".hidden-index".equals(hit.getIndex()));
assertTrue(matchedHidden);
});
// make index not hidden
updateIndexSettings(Settings.builder().put("index.hidden", false), "hidden-index");
assertResponse(
prepareSearch(randomFrom("*", "_all", "h*", "*index")).setSize(1000).setQuery(QueryBuilders.matchAllQuery()),
response -> {
boolean matchedHidden = Arrays.stream(response.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex()));
assertTrue(matchedHidden);
}
);
}
public void testGlobalTemplatesDoNotApply() {
assertAcked(indicesAdmin().preparePutTemplate("a_global_template").setPatterns(List.of("*")).setMapping("foo", "type=text"));
assertAcked(indicesAdmin().preparePutTemplate("not_global_template").setPatterns(List.of("a*")).setMapping("bar", "type=text"));
assertAcked(
indicesAdmin().preparePutTemplate("specific_template").setPatterns(List.of("a_hidden_index")).setMapping("baz", "type=text")
);
assertAcked(
indicesAdmin().preparePutTemplate("unused_template").setPatterns(List.of("not_used")).setMapping("foobar", "type=text")
);
assertAcked(indicesAdmin().prepareCreate("a_hidden_index").setSettings(Settings.builder().put("index.hidden", true).build()));
GetMappingsResponse mappingsResponse = indicesAdmin().prepareGetMappings(TEST_REQUEST_TIMEOUT, "a_hidden_index").get();
assertThat(mappingsResponse.mappings().size(), is(1));
MappingMetadata mappingMetadata = mappingsResponse.mappings().get("a_hidden_index");
assertNotNull(mappingMetadata);
@SuppressWarnings("unchecked")
Map<String, Object> propertiesMap = (Map<String, Object>) mappingMetadata.getSourceAsMap().get("properties");
assertNotNull(propertiesMap);
assertThat(propertiesMap.size(), is(2));
@SuppressWarnings("unchecked")
Map<String, Object> barMap = (Map<String, Object>) propertiesMap.get("bar");
assertNotNull(barMap);
assertThat(barMap.get("type"), is("text"));
@SuppressWarnings("unchecked")
Map<String, Object> bazMap = (Map<String, Object>) propertiesMap.get("baz");
assertNotNull(bazMap);
assertThat(bazMap.get("type"), is("text"));
}
public void testGlobalTemplateCannotMakeIndexHidden() {
InvalidIndexTemplateException invalidIndexTemplateException = expectThrows(
InvalidIndexTemplateException.class,
indicesAdmin().preparePutTemplate("a_global_template")
.setPatterns(List.of("*"))
.setSettings(Settings.builder().put("index.hidden", randomBoolean()).build())
);
assertThat(invalidIndexTemplateException.getMessage(), containsString("global templates may not specify the setting index.hidden"));
}
public void testNonGlobalTemplateCanMakeIndexHidden() {
assertAcked(
indicesAdmin().preparePutTemplate("a_global_template")
.setPatterns(List.of("my_hidden_pattern*"))
.setMapping("foo", "type=text")
.setSettings(Settings.builder().put("index.hidden", true).build())
);
assertAcked(indicesAdmin().prepareCreate("my_hidden_pattern1").get());
GetSettingsResponse getSettingsResponse = indicesAdmin().prepareGetSettings(TEST_REQUEST_TIMEOUT, "my_hidden_pattern1").get();
assertThat(getSettingsResponse.getSetting("my_hidden_pattern1", "index.hidden"), is("true"));
}
public void testAliasesForHiddenIndices() {
final String hiddenIndex = "hidden-index";
final String visibleAlias = "alias-visible";
final String hiddenAlias = "alias-hidden";
final String dotHiddenAlias = ".alias-hidden";
assertAcked(indicesAdmin().prepareCreate(hiddenIndex).setSettings(Settings.builder().put("index.hidden", true).build()).get());
assertAcked(
indicesAdmin().prepareAliases(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT)
.addAliasAction(IndicesAliasesRequest.AliasActions.add().index(hiddenIndex).alias(visibleAlias))
);
// The index should be returned here when queried by name or by wildcard because the alias is visible
final GetAliasesRequestBuilder req = indicesAdmin().prepareGetAliases(TEST_REQUEST_TIMEOUT, visibleAlias);
GetAliasesResponse response = req.get();
assertThat(response.getAliases().get(hiddenIndex), hasSize(1));
assertThat(response.getAliases().get(hiddenIndex).get(0).alias(), equalTo(visibleAlias));
assertThat(response.getAliases().get(hiddenIndex).get(0).isHidden(), nullValue());
response = indicesAdmin().prepareGetAliases(TEST_REQUEST_TIMEOUT, "alias*").get();
assertThat(response.getAliases().get(hiddenIndex), hasSize(1));
assertThat(response.getAliases().get(hiddenIndex).get(0).alias(), equalTo(visibleAlias));
assertThat(response.getAliases().get(hiddenIndex).get(0).isHidden(), nullValue());
// Now try with a hidden alias
assertAcked(
indicesAdmin().prepareAliases(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT)
.addAliasAction(IndicesAliasesRequest.AliasActions.remove().index(hiddenIndex).alias(visibleAlias))
.addAliasAction(IndicesAliasesRequest.AliasActions.add().index(hiddenIndex).alias(hiddenAlias).isHidden(true))
);
// Querying by name directly should get the right result
response = indicesAdmin().prepareGetAliases(TEST_REQUEST_TIMEOUT, hiddenAlias).get();
assertThat(response.getAliases().get(hiddenIndex), hasSize(1));
assertThat(response.getAliases().get(hiddenIndex).get(0).alias(), equalTo(hiddenAlias));
assertThat(response.getAliases().get(hiddenIndex).get(0).isHidden(), equalTo(true));
// querying by wildcard should get the right result because the indices options include hidden by default
response = indicesAdmin().prepareGetAliases(TEST_REQUEST_TIMEOUT, "alias*").get();
assertThat(response.getAliases().get(hiddenIndex), hasSize(1));
assertThat(response.getAliases().get(hiddenIndex).get(0).alias(), equalTo(hiddenAlias));
assertThat(response.getAliases().get(hiddenIndex).get(0).isHidden(), equalTo(true));
// But we should get no results if we specify indices options that don't include hidden
response = indicesAdmin().prepareGetAliases(TEST_REQUEST_TIMEOUT, "alias*")
.setIndicesOptions(IndicesOptions.strictExpandOpen())
.get();
assertThat(response.getAliases().get(hiddenIndex), nullValue());
// Now try with a hidden alias that starts with a dot
assertAcked(
indicesAdmin().prepareAliases(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT)
.addAliasAction(IndicesAliasesRequest.AliasActions.remove().index(hiddenIndex).alias(hiddenAlias))
.addAliasAction(IndicesAliasesRequest.AliasActions.add().index(hiddenIndex).alias(dotHiddenAlias).isHidden(true))
);
// Check that querying by dot-prefixed pattern returns the alias
response = indicesAdmin().prepareGetAliases(TEST_REQUEST_TIMEOUT, ".alias*").get();
assertThat(response.getAliases().get(hiddenIndex), hasSize(1));
assertThat(response.getAliases().get(hiddenIndex).get(0).alias(), equalTo(dotHiddenAlias));
assertThat(response.getAliases().get(hiddenIndex).get(0).isHidden(), equalTo(true));
}
}
|
HiddenIndexIT
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/Spr16217Tests.java
|
{
"start": 2159,
"end": 2576
}
|
class ____ implements ConfigurationCondition {
@Override
public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) {
return false;
}
@Override
public ConfigurationPhase getConfigurationPhase() {
return ConfigurationPhase.PARSE_CONFIGURATION;
}
}
@Import({RegisterBeanPhaseConditionConfiguration.class, BarConfiguration.class})
public static
|
ParseConfigurationPhaseCondition
|
java
|
google__auto
|
value/src/it/functional/src/test/java/com/google/auto/value/AutoBuilderTest.java
|
{
"start": 3110,
"end": 3507
}
|
class ____ {
abstract int getFoo();
abstract String getBar();
static Builder builder() {
return new AutoBuilder_AutoBuilderTest_SimpleAuto_Builder();
}
// There's no particular reason to do this since @AutoValue.Builder works just as well, but
// let's check anyway.
@AutoBuilder(ofClass = AutoValue_AutoBuilderTest_SimpleAuto.class)
abstract static
|
SimpleAuto
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
|
{
"start": 13779,
"end": 14451
}
|
interface ____ query (e.g. eth0).
* Must not be null.
* @return The default host name associated with IPs bound to the network
* interface
* @throws UnknownHostException
* If one is encountered while querying the default interface
*/
public static String getDefaultHost(@Nullable String strInterface)
throws UnknownHostException {
return getDefaultHost(strInterface, null, false);
}
/**
* @return Returns the default (first) host name associated by the provided
* nameserver with the address bound to the specified network interface.
*
* @param strInterface
* The name of the network
|
to
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/specific/SpecificData.java
|
{
"start": 13545,
"end": 14372
}
|
class ____ must mangle just the
String[] packageWords = word.split("\\.");
String[] newPackageWords = new String[packageWords.length];
for (int i = 0; i < packageWords.length; i++) {
String oldName = packageWords[i];
newPackageWords[i] = mangle(oldName, reservedWords, false);
}
return String.join(".", newPackageWords);
}
if (reservedWords.contains(word) || (isMethod && reservedWords
.contains(Character.toLowerCase(word.charAt(0)) + ((word.length() > 1) ? word.substring(1) : "")))) {
return word + "$";
}
return word;
}
/** Undoes mangling for reserved words. */
protected static String unmangle(String word) {
while (word.endsWith("$")) {
word = word.substring(0, word.length() - 1);
}
return word;
}
/** Return the
|
we
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/event/spi/LoadEventListener.java
|
{
"start": 292,
"end": 1795
}
|
interface ____ {
/**
* Handle the given load event.
*
* @param event The load event to be handled.
*
*/
void onLoad(LoadEvent event, LoadType loadType) throws HibernateException;
LoadType RELOAD = new LoadType( "RELOAD" )
.setAllowNulls( false )
.setAllowProxyCreation( false )
.setCheckDeleted( true )
.setNakedEntityReturned( false );
LoadType GET = new LoadType( "GET" )
.setAllowNulls( true )
.setAllowProxyCreation( false )
.setCheckDeleted( true )
.setNakedEntityReturned( false );
LoadType LOAD = new LoadType( "LOAD" )
.setAllowNulls( false )
.setAllowProxyCreation( true )
.setCheckDeleted( true )
.setNakedEntityReturned( false );
LoadType IMMEDIATE_LOAD = new LoadType( "IMMEDIATE_LOAD" )
.setAllowNulls( true )
.setAllowProxyCreation( false )
.setCheckDeleted( false )
.setNakedEntityReturned( true );
LoadType INTERNAL_LOAD_EAGER = new LoadType( "INTERNAL_LOAD_EAGER" )
.setAllowNulls( false )
.setAllowProxyCreation( false )
.setCheckDeleted( false )
.setNakedEntityReturned( false );
LoadType INTERNAL_LOAD_LAZY = new LoadType( "INTERNAL_LOAD_LAZY" )
.setAllowNulls( false )
.setAllowProxyCreation( true )
.setCheckDeleted( false )
.setNakedEntityReturned( false );
LoadType INTERNAL_LOAD_NULLABLE = new LoadType( "INTERNAL_LOAD_NULLABLE" )
.setAllowNulls( true )
.setAllowProxyCreation( false )
.setCheckDeleted( false )
.setNakedEntityReturned( false );
final
|
LoadEventListener
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/MonoCacheInvalidateIf.java
|
{
"start": 11334,
"end": 11964
}
|
class ____<T> extends Operators.MonoSubscriber<T, T> {
@Nullable CoordinatorSubscriber<T> coordinator;
CacheMonoSubscriber(CoreSubscriber<? super T> actual) {
super(actual);
}
@Override
public void cancel() {
super.cancel();
CoordinatorSubscriber<T> coordinator = this.coordinator;
if (coordinator != null) {
coordinator.remove(this);
}
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.PARENT) return coordinator == null ? null : coordinator.main;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return super.scanUnsafe(key);
}
}
}
|
CacheMonoSubscriber
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/results/Dto.java
|
{
"start": 236,
"end": 479
}
|
class ____ {
private final Integer key;
private final String text;
public Dto(Integer key, String text) {
this.key = key;
this.text = text;
}
public Integer getKey() {
return key;
}
public String getText() {
return text;
}
}
|
Dto
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_2673/Issue2673Mapper.java
|
{
"start": 1070,
"end": 1523
}
|
class ____ {
private final int primitive;
private final Optional<String> nonPrimitive;
public Source(int primitive, Optional<String> nonPrimitive) {
this.primitive = primitive;
this.nonPrimitive = nonPrimitive;
}
public int getPrimitive() {
return primitive;
}
public Optional<String> getNonPrimitive() {
return nonPrimitive;
}
}
}
|
Source
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java
|
{
"start": 18864,
"end": 20713
}
|
enum ____ the same term and retrieving
// the term ordinal. This is cheaper than calling lookupTerm for every term.
while (globalTermsEnum.term().compareTo(term) < 0) {
BytesRef nextGlobalTerm = globalTermsEnum.next();
assert nextGlobalTerm != null;
}
assert globalTermsEnum.term().equals(term);
final long globalOrd = globalTermsEnum.ord();
postings.add(new PostingsEnumAndOrd(termsEnum.postings(null, PostingsEnum.NONE), globalOrd));
}
}
}
}
disjunction = new PriorityQueue<>(size) {
@Override
protected boolean lessThan(PostingsEnumAndOrd a, PostingsEnumAndOrd b) {
return a.postings.docID() < b.postings.docID();
}
};
disjunction.addAll(postings);
}
public void updateBounds() throws IOException {
if (highestCompetitiveValueGlobalOrd == null) {
return;
}
// TODO If this is the only source, we know we are done with the buckets of the after_key.
// We could optimize even further by skipping to the next global ordinal after the after_key.
long lowOrd;
if (afterValueGlobalOrd != null && afterValueGlobalOrd != MISSING_VALUE_FLAG) {
lowOrd = afterValueGlobalOrd;
} else {
lowOrd = reverseMul == 1 ? 0 : lookup.getValueCount() - 1;
}
update(Math.min(highestCompetitiveValueGlobalOrd, lowOrd), Math.max(highestCompetitiveValueGlobalOrd, lowOrd));
}
}
}
|
to
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/BlockBlobAppendStream.java
|
{
"start": 35031,
"end": 35872
}
|
class ____ implements Runnable {
private final UploadCommand command;
WriteRequest(UploadCommand command) {
this.command = command;
}
@Override
public void run() {
try {
command.dump();
long startTime = System.nanoTime();
command.execute();
command.setCompleted();
LOG.debug("command finished for {} ms",
TimeUnit.NANOSECONDS.toMillis(
System.nanoTime() - startTime));
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
} catch (Exception ex) {
LOG.debug(
"Encountered exception during execution of command for Blob :"
+ " {} Exception : {}", key, ex);
firstError.compareAndSet(null, new AzureException(ex));
}
}
}
}
|
WriteRequest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-micrometer-metrics/src/test/java/org/springframework/boot/micrometer/metrics/autoconfigure/task/TaskExecutorMetricsAutoConfigurationTests.java
|
{
"start": 8665,
"end": 9058
}
|
class ____ {
@Bean
ThreadPoolTaskExecutor standardTaskExecutor() {
return new ThreadPoolTaskExecutor();
}
@Bean(defaultCandidate = false)
ThreadPoolTaskExecutor nonDefault() {
return new ThreadPoolTaskExecutor();
}
@Bean(autowireCandidate = false)
ThreadPoolTaskExecutor nonAutowire() {
return new ThreadPoolTaskExecutor();
}
}
}
|
MultipleTaskExecutorsConfiguration
|
java
|
apache__maven
|
impl/maven-core/src/main/java/org/apache/maven/lifecycle/MojoExecutionConfigurator.java
|
{
"start": 1226,
"end": 1347
}
|
interface ____ part of work in progress and can be changed or removed without notice.
* @since 3.3.1, MNG-5753
*/
public
|
is
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/resource/ResourceHttpRequestHandlerTests.java
|
{
"start": 23827,
"end": 35008
}
|
class ____ {
private ResourceHttpRequestHandler handler;
private MockHttpServletRequest request;
private MockHttpServletResponse response;
@BeforeEach
void setup() {
TestServletContext servletContext = new TestServletContext();
this.handler = new ResourceHttpRequestHandler();
this.handler.setLocations(List.of(testResource, testAlternatePathResource, webjarsResource));
this.handler.setServletContext(servletContext);
this.request = new MockHttpServletRequest(servletContext, "GET", "");
this.response = new MockHttpServletResponse();
}
@Test
void servesResourcesFromAlternatePath() throws Exception {
this.handler.afterPropertiesSet();
this.request.setAttribute(HandlerMapping.PATH_WITHIN_HANDLER_MAPPING_ATTRIBUTE, "baz.css");
this.handler.handleRequest(this.request, this.response);
assertThat(this.response.getContentType()).isEqualTo("text/css");
assertThat(this.response.getContentLength()).isEqualTo(17);
assertThat(this.response.getContentAsString()).isEqualTo("h1 { color:red; }");
}
@Test
void servesResourcesFromSubDirectory() throws Exception {
this.handler.afterPropertiesSet();
this.request.setAttribute(HandlerMapping.PATH_WITHIN_HANDLER_MAPPING_ATTRIBUTE, "js/foo.js");
this.handler.handleRequest(this.request, this.response);
assertThat(this.response.getContentType()).isEqualTo("text/javascript");
assertThat(this.response.getContentAsString()).isEqualTo("function foo() { console.log(\"hello world\"); }");
}
@Test
void servesResourcesFromSubDirectoryOfAlternatePath() throws Exception {
this.handler.afterPropertiesSet();
this.request.setAttribute(HandlerMapping.PATH_WITHIN_HANDLER_MAPPING_ATTRIBUTE, "js/baz.js");
this.handler.handleRequest(this.request, this.response);
assertThat(this.response.getContentType()).isEqualTo("text/javascript");
assertThat(this.response.getContentAsString()).isEqualTo("function foo() { console.log(\"hello world\"); }");
}
@Test // gh-27538, gh-27624
void filterNonExistingLocations() throws Exception {
List<Resource> inputLocations = List.of(testResource, testAlternatePathResource,
new ClassPathResource("nosuchpath/", ResourceHttpRequestHandlerTests.class));
this.handler.setLocations(inputLocations);
this.handler.setOptimizeLocations(true);
this.handler.afterPropertiesSet();
List<Resource> actual = handler.getLocations();
assertThat(actual).hasSize(2);
assertThat(actual.get(0).getURL().toString()).endsWith("test/");
assertThat(actual.get(1).getURL().toString()).endsWith("testalternatepath/");
}
@Test
void shouldRejectInvalidPath() throws Exception {
// Use mock ResourceResolver: i.e. we're only testing upfront validations...
Resource resource = mock();
given(resource.getFilename()).willThrow(new AssertionError("Resource should not be resolved"));
given(resource.getInputStream()).willThrow(new AssertionError("Resource should not be resolved"));
ResourceResolver resolver = mock();
given(resolver.resolveResource(any(), any(), any(), any())).willReturn(resource);
this.handler.setLocations(List.of(testResource));
this.handler.setResourceResolvers(List.of(resolver));
this.handler.setServletContext(new TestServletContext());
this.handler.afterPropertiesSet();
testInvalidPath("../testsecret/secret.txt");
testInvalidPath("test/../../testsecret/secret.txt");
testInvalidPath(":/../../testsecret/secret.txt");
testInvalidPath("/testsecret/test/../secret.txt");
Resource location = new UrlResource(ResourceHttpRequestHandlerTests.class.getResource("./test/"));
this.handler.setLocations(List.of(location));
Resource secretResource = new UrlResource(ResourceHttpRequestHandlerTests.class.getResource("testsecret/secret.txt"));
String secretPath = secretResource.getURL().getPath();
testInvalidPath("file:" + secretPath);
testInvalidPath("/file:" + secretPath);
testInvalidPath("url:" + secretPath);
testInvalidPath("/url:" + secretPath);
testInvalidPath("/../.." + secretPath);
testInvalidPath("/%2E%2E/testsecret/secret.txt");
testInvalidPath("/%2E%2E/testsecret/secret.txt");
}
private void testInvalidPath(String requestPath) {
this.request.setAttribute(HandlerMapping.PATH_WITHIN_HANDLER_MAPPING_ATTRIBUTE, requestPath);
this.response = new MockHttpServletResponse();
assertNotFound();
}
private void assertNotFound() {
assertThatThrownBy(() -> this.handler.handleRequest(this.request, this.response))
.isInstanceOf(NoResourceFoundException.class);
}
@Test
void shouldRejectPathWithTraversal() throws Exception {
this.handler.afterPropertiesSet();
for (HttpMethod method : HttpMethod.values()) {
this.request = new MockHttpServletRequest("GET", "");
this.response = new MockHttpServletResponse();
shouldRejectPathWithTraversal(method);
}
}
private void shouldRejectPathWithTraversal(HttpMethod httpMethod) throws Exception {
this.request.setMethod(httpMethod.name());
Resource location = new ClassPathResource("test/", getClass());
this.handler.setLocations(List.of(location));
testResolvePathWithTraversal(location, "../testsecret/secret.txt");
testResolvePathWithTraversal(location, "test/../../testsecret/secret.txt");
testResolvePathWithTraversal(location, ":/../../testsecret/secret.txt");
location = new UrlResource(ResourceHttpRequestHandlerTests.class.getResource("./test/"));
this.handler.setLocations(List.of(location));
Resource secretResource = new UrlResource(ResourceHttpRequestHandlerTests.class.getResource("testsecret/secret.txt"));
String secretPath = secretResource.getURL().getPath();
testResolvePathWithTraversal(location, "file:" + secretPath);
testResolvePathWithTraversal(location, "/file:" + secretPath);
testResolvePathWithTraversal(location, "url:" + secretPath);
testResolvePathWithTraversal(location, "/url:" + secretPath);
testResolvePathWithTraversal(location, "/" + secretPath);
testResolvePathWithTraversal(location, "////../.." + secretPath);
testResolvePathWithTraversal(location, "/%2E%2E/testsecret/secret.txt");
testResolvePathWithTraversal(location, "%2F%2F%2E%2E%2F%2Ftestsecret/secret.txt");
testResolvePathWithTraversal(location, "/ " + secretPath);
}
private void testResolvePathWithTraversal(Resource location, String requestPath) {
this.request.setAttribute(HandlerMapping.PATH_WITHIN_HANDLER_MAPPING_ATTRIBUTE, requestPath);
this.response = new MockHttpServletResponse();
assertNotFound();
}
@Test
void ignoreInvalidEscapeSequence() throws Exception {
this.handler.afterPropertiesSet();
this.request.setAttribute(HandlerMapping.PATH_WITHIN_HANDLER_MAPPING_ATTRIBUTE, "/%foo%/bar.txt");
this.response = new MockHttpServletResponse();
assertNotFound();
}
@Test
void processPath() {
// Unchanged
assertThat(this.handler.processPath("/foo/bar")).isSameAs("/foo/bar");
assertThat(this.handler.processPath("foo/bar")).isSameAs("foo/bar");
// leading whitespace control characters (00-1F)
assertThat(this.handler.processPath(" /foo/bar")).isEqualTo("/foo/bar");
assertThat(this.handler.processPath((char) 1 + "/foo/bar")).isEqualTo("/foo/bar");
assertThat(this.handler.processPath((char) 31 + "/foo/bar")).isEqualTo("/foo/bar");
assertThat(this.handler.processPath(" foo/bar")).isEqualTo("foo/bar");
assertThat(this.handler.processPath((char) 31 + "foo/bar")).isEqualTo("foo/bar");
// leading control character 0x7F (DEL)
assertThat(this.handler.processPath((char) 127 + "/foo/bar")).isEqualTo("/foo/bar");
assertThat(this.handler.processPath((char) 127 + "/foo/bar")).isEqualTo("/foo/bar");
// leading control and '/' characters
assertThat(this.handler.processPath(" / foo/bar")).isEqualTo("/foo/bar");
assertThat(this.handler.processPath(" / / foo/bar")).isEqualTo("/foo/bar");
assertThat(this.handler.processPath(" // /// //// foo/bar")).isEqualTo("/foo/bar");
assertThat(this.handler.processPath((char) 1 + " / " + (char) 127 + " // foo/bar")).isEqualTo("/foo/bar");
// root or empty path
assertThat(this.handler.processPath(" ")).isEmpty();
assertThat(this.handler.processPath("/")).isEqualTo("/");
assertThat(this.handler.processPath("///")).isEqualTo("/");
assertThat(this.handler.processPath("/ / / ")).isEqualTo("/");
assertThat(this.handler.processPath("\\/ \\/ \\/ ")).isEqualTo("/");
// duplicate slash or backslash
assertThat(this.handler.processPath("//foo/ /bar//baz//")).isEqualTo("/foo/ /bar/baz/");
assertThat(this.handler.processPath("\\\\foo\\ \\bar\\\\baz\\\\")).isEqualTo("/foo/ /bar/baz/");
assertThat(this.handler.processPath("foo\\\\/\\////bar")).isEqualTo("foo/bar");
}
@Test
void initAllowedLocations() throws Exception {
this.handler.afterPropertiesSet();
PathResourceResolver resolver = (PathResourceResolver) this.handler.getResourceResolvers().get(0);
Resource[] locations = resolver.getAllowedLocations();
assertThat(locations).containsExactly(testResource, testAlternatePathResource, webjarsResource);
}
@Test
void initAllowedLocationsWithExplicitConfiguration() throws Exception {
PathResourceResolver pathResolver = new PathResourceResolver();
pathResolver.setAllowedLocations(testResource);
this.handler.setResourceResolvers(List.of(pathResolver));
this.handler.setLocations(List.of(testResource, testAlternatePathResource));
this.handler.afterPropertiesSet();
assertThat(pathResolver.getAllowedLocations()).containsExactly(testResource);
}
@Test
void shouldNotServeDirectory() throws Exception {
this.handler.afterPropertiesSet();
this.request.setAttribute(HandlerMapping.PATH_WITHIN_HANDLER_MAPPING_ATTRIBUTE, "js/");
assertNotFound();
}
@Test
void shouldNotServeDirectoryInJarFile() throws Exception {
this.handler.afterPropertiesSet();
this.request.setAttribute(HandlerMapping.PATH_WITHIN_HANDLER_MAPPING_ATTRIBUTE, "underscorejs/");
assertNotFound();
}
@Test
void shouldNotServeMissingResourcePath() throws Exception {
this.handler.afterPropertiesSet();
this.request.setAttribute(HandlerMapping.PATH_WITHIN_HANDLER_MAPPING_ATTRIBUTE, "");
assertNotFound();
}
@Test
void noPathWithinHandlerMappingAttribute() throws Exception {
this.handler.afterPropertiesSet();
assertThatIllegalStateException().isThrownBy(() ->
this.handler.handleRequest(this.request, this.response));
}
@Test
void servletContextRootValidation() {
StaticWebApplicationContext context = new StaticWebApplicationContext() {
@Override
public Resource getResource(String location) {
return new FileSystemResource("/");
}
};
ResourceHttpRequestHandler handler = new ResourceHttpRequestHandler();
handler.setLocationValues(List.of("/"));
handler.setApplicationContext(context);
assertThatIllegalStateException().isThrownBy(handler::afterPropertiesSet)
.withMessage("The String-based location \"/\" should be relative to the web application root but " +
"resolved to a Resource of type:
|
ResourceLocationTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/transaction/TransactionRolledBackInDifferentThreadTest.java
|
{
"start": 1431,
"end": 5921
}
|
class ____ {
@Test
public void testTransactionRolledBackInDifferentThreadFailure(EntityManagerFactoryScope scope) throws Exception {
/*
* The three test threads share the same entity manager.
* The main test thread creates an EntityManager, joins it to the transaction and ends the transaction.
* Test thread 1 joins the EntityManager to its transaction, sets rollbackonly and ends the transaction.
* Test thread 2 attempts to join the EntityManager to its transaction but will fail with a
* HibernateException("Transaction was rolled back in a different thread!")
*/
// main test thread
TransactionManager transactionManager = TestingJtaPlatformImpl.INSTANCE.getTransactionManager();
transactionManager.begin();
final EntityManager em = scope.getEntityManagerFactory().createEntityManager();
try {
em.joinTransaction();
transactionManager.commit();
// will be set to the failing exception
final HibernateException[] transactionRolledBackInDifferentThreadException = new HibernateException[2];
transactionRolledBackInDifferentThreadException[0] = transactionRolledBackInDifferentThreadException[1] = null;
// background test thread 1
final Runnable run1 = () -> {
try {
transactionManager.begin();
em.joinTransaction();
transactionManager.setRollbackOnly();
transactionManager.commit();
}
catch (jakarta.persistence.PersistenceException e) {
if ( e.getCause() instanceof HibernateException &&
e.getCause().getMessage().equals( "Transaction was rolled back in a different thread!" ) ) {
/*
* Save the exception for the main test thread to fail
*/
e.printStackTrace(); // show the error first
transactionRolledBackInDifferentThreadException[0] = (HibernateException) e.getCause();
}
}
catch (RollbackException ignored) {
// expected to see RollbackException: ARJUNA016053: Could not commit transaction.
}
catch (Throwable throwable) {
throwable.printStackTrace();
}
finally {
try {
if ( transactionManager
.getStatus() != Status.STATUS_NO_TRANSACTION ) {
transactionManager.rollback();
}
}
catch (SystemException ignore) {
}
}
};
// test thread 2
final Runnable run2 = () -> {
try {
transactionManager.begin();
/*
* the following call to em.joinTransaction() will throw:
* org.hibernate.HibernateException: Transaction was rolled back in a different thread!
*/
em.joinTransaction();
transactionManager.commit();
}
catch (jakarta.persistence.PersistenceException e) {
if ( e.getCause() instanceof HibernateException &&
e.getCause().getMessage().equals( "Transaction was rolled back in a different thread!" ) ) {
/*
* Save the exception for the main test thread to fail
*/
e.printStackTrace(); // show the error first
transactionRolledBackInDifferentThreadException[1] = (HibernateException) e.getCause();
}
}
catch (Throwable throwable) {
throwable.printStackTrace();
}
finally {
try {
if ( transactionManager
.getStatus() != Status.STATUS_NO_TRANSACTION ) {
transactionManager.rollback();
}
}
catch (SystemException ignore) {
}
}
};
Thread thread = new Thread( run1, "test thread1" );
thread.start();
thread.join();
Thread thread2 = new Thread( run2, "test thread2" );
thread2.start();
thread2.join();
// show failure for exception caught in run2.run()
if ( transactionRolledBackInDifferentThreadException[0] != null
|| transactionRolledBackInDifferentThreadException[1] != null ) {
fail(
"failure in test thread 1 = " +
( transactionRolledBackInDifferentThreadException[0] != null ?
transactionRolledBackInDifferentThreadException[0].getMessage() :
"(none)" )
+ ", failure in test thread 2 = " +
( transactionRolledBackInDifferentThreadException[1] != null ?
transactionRolledBackInDifferentThreadException[1].getMessage() :
"(none)" )
);
}
}
finally {
try {
switch ( transactionManager.getStatus() ) {
case Status.STATUS_ACTIVE:
case Status.STATUS_MARKED_ROLLBACK:
transactionManager.rollback();
}
}
catch (Exception exception) {
//ignore exception
}
em.close();
}
}
}
|
TransactionRolledBackInDifferentThreadTest
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
|
{
"start": 2318,
"end": 5825
}
|
class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(TestFileSystem.class);
private static Configuration conf = new Configuration();
private static int BUFFER_SIZE = conf.getInt("io.file.buffer.size", 4096);
private static final long MEGA = 1024 * 1024;
private static final int SEEKS_PER_FILE = 4;
private static String ROOT = System.getProperty("test.build.data","fs_test");
private static Path CONTROL_DIR = new Path(ROOT, "fs_control");
private static Path WRITE_DIR = new Path(ROOT, "fs_write");
private static Path READ_DIR = new Path(ROOT, "fs_read");
private static Path DATA_DIR = new Path(ROOT, "fs_data");
@Test
public void testFs() throws Exception {
testFs(10 * MEGA, 100, 0);
}
public static void testFs(long megaBytes, int numFiles, long seed)
throws Exception {
FileSystem fs = FileSystem.get(conf);
if (seed == 0)
seed = new Random().nextLong();
LOG.info("seed = "+seed);
createControlFile(fs, megaBytes, numFiles, seed);
writeTest(fs, false);
readTest(fs, false);
seekTest(fs, false);
fs.delete(CONTROL_DIR, true);
fs.delete(DATA_DIR, true);
fs.delete(WRITE_DIR, true);
fs.delete(READ_DIR, true);
}
@Test
public void testCommandFormat() throws Exception {
// This should go to TestFsShell.java when it is added.
CommandFormat cf;
cf= new CommandFormat("copyToLocal", 2,2,"crc","ignoreCrc");
assertThat(cf.parse(new String[] {"-get", "file", "-"}, 1).get(1))
.isEqualTo("-");
try {
cf.parse(new String[] {"-get","file","-ignoreCrc","/foo"}, 1);
fail("Expected parsing to fail as it should stop at first non-option");
}
catch (Exception e) {
// Expected
}
cf = new CommandFormat("tail", 1, 1, "f");
assertThat(cf.parse(new String[] {"-tail", "fileName"}, 1).get(0))
.isEqualTo("fileName");
assertThat(cf.parse(new String[] {"-tail", "-f", "fileName"}, 1).get(0))
.isEqualTo("fileName");
cf = new CommandFormat("setrep", 2, 2, "R", "w");
assertThat(cf.parse(new String[] {"-setrep", "-R", "2", "/foo/bar"}, 1)
.get(1)).isEqualTo("/foo/bar");
cf = new CommandFormat("put", 2, 10000);
assertThat(cf.parse(new String[] {"-put", "-", "dest"}, 1).get(1))
.isEqualTo("dest");
}
public static void createControlFile(FileSystem fs,
long megaBytes, int numFiles,
long seed) throws Exception {
LOG.info("creating control file: "+megaBytes+" bytes, "+numFiles+" files");
Path controlFile = new Path(CONTROL_DIR, "files");
fs.delete(controlFile, true);
Random random = new Random(seed);
SequenceFile.Writer writer =
SequenceFile.createWriter(fs, conf, controlFile,
Text.class, LongWritable.class, CompressionType.NONE);
long totalSize = 0;
long maxSize = ((megaBytes / numFiles) * 2) + 1;
try {
while (totalSize < megaBytes) {
Text name = new Text(Long.toString(random.nextLong()));
long size = random.nextLong();
if (size < 0)
size = -size;
size = size % maxSize;
//LOG.info(" adding: name="+name+" size="+size);
writer.append(name, new LongWritable(size));
totalSize += size;
}
} finally {
writer.close();
}
LOG.info("created control file for: "+totalSize+" bytes");
}
public static
|
TestFileSystem
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/state/internals/InMemoryWindowBytesStoreSupplier.java
|
{
"start": 1017,
"end": 2919
}
|
class ____ implements WindowBytesStoreSupplier {
private final String name;
private final long retentionPeriod;
private final long windowSize;
private final boolean retainDuplicates;
public InMemoryWindowBytesStoreSupplier(final String name,
final long retentionPeriod,
final long windowSize,
final boolean retainDuplicates) {
this.name = name;
this.retentionPeriod = retentionPeriod;
this.windowSize = windowSize;
this.retainDuplicates = retainDuplicates;
}
@Override
public String name() {
return name;
}
@Override
public WindowStore<Bytes, byte[]> get() {
return new InMemoryWindowStore(name,
retentionPeriod,
windowSize,
retainDuplicates,
metricsScope());
}
@Override
public String metricsScope() {
return "in-memory-window";
}
@Override
public long retentionPeriod() {
return retentionPeriod;
}
@Override
public long windowSize() {
return windowSize;
}
// In-memory window store is not *really* segmented, so just say size is 1 ms
@Override
public long segmentIntervalMs() {
return 1;
}
@Override
public boolean retainDuplicates() {
return retainDuplicates;
}
@Override
public String toString() {
return "InMemoryWindowBytesStoreSupplier{" +
"name='" + name + '\'' +
", retentionPeriod=" + retentionPeriod +
", windowSize=" + windowSize +
", retainDuplicates=" + retainDuplicates +
'}';
}
}
|
InMemoryWindowBytesStoreSupplier
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/foreignkeys/crossschema/SchemaTwoEntity.java
|
{
"start": 466,
"end": 626
}
|
class ____ {
@Id
private String id;
@OneToMany
@JoinColumn
private Set<SchemaOneEntity> schemaOneEntities = new HashSet<SchemaOneEntity>();
}
|
SchemaTwoEntity
|
java
|
google__error-prone
|
check_api/src/main/java/com/google/errorprone/util/ASTHelpers.java
|
{
"start": 26929,
"end": 33349
}
|
class ____ that matches the given symbol within the compilation unit, or null if
* none was found.
*/
public static @Nullable ClassTree findClass(ClassSymbol symbol, VisitorState state) {
return JavacTrees.instance(state.context).getTree(symbol);
}
// TODO(ghm): Using a comparison of tsym here appears to be a behaviour change.
@SuppressWarnings("TypeEquals")
public static @Nullable MethodSymbol findSuperMethodInType(
MethodSymbol methodSymbol, Type superType, Types types) {
if (methodSymbol.isStatic() || superType.equals(methodSymbol.owner.type)) {
return null;
}
Scope scope = superType.tsym.members();
for (Symbol sym : scope.getSymbolsByName(methodSymbol.name)) {
if (sym != null
&& !isStatic(sym)
&& ((sym.flags() & Flags.SYNTHETIC) == 0)
&& methodSymbol.overrides(
sym, (TypeSymbol) methodSymbol.owner, types, /* checkResult= */ true)) {
return (MethodSymbol) sym;
}
}
return null;
}
/**
* Finds supermethods of {@code methodSymbol}, not including {@code methodSymbol} itself, and
* including interfaces.
*/
public static Set<MethodSymbol> findSuperMethods(MethodSymbol methodSymbol, Types types) {
return streamSuperMethods(methodSymbol, types).collect(toCollection(LinkedHashSet::new));
}
/** See {@link #findSuperMethods(MethodSymbol, Types)}. */
public static Stream<MethodSymbol> streamSuperMethods(MethodSymbol methodSymbol, Types types) {
TypeSymbol owner = (TypeSymbol) methodSymbol.owner;
return types.closure(owner.type).stream()
.map(type -> findSuperMethodInType(methodSymbol, type, types))
.filter(Objects::nonNull);
}
/**
* Finds all methods in any superclass of {@code startClass} with a certain {@code name} that
* match the given {@code predicate}.
*
* @return The (possibly empty) list of methods in any superclass that match {@code predicate} and
* have the given {@code name}. Results are returned least-abstract first, i.e., starting in
* the {@code startClass} itself, progressing through its superclasses, and finally interfaces
* in an unspecified order.
*/
public static Stream<MethodSymbol> matchingMethods(
Name name, Predicate<MethodSymbol> predicate, Type startClass, Types types) {
Predicate<Symbol> matchesMethodPredicate =
sym -> sym instanceof MethodSymbol methodSymbol && predicate.test(methodSymbol);
// Iterate over all classes and interfaces that startClass inherits from.
return types.closure(startClass).stream()
.flatMap(
(Type superClass) -> {
// Iterate over all the methods declared in superClass.
TypeSymbol superClassSymbol = superClass.tsym;
Scope superClassSymbols = superClassSymbol.members();
if (superClassSymbols == null) { // Can be null if superClass is a type variable
return Stream.empty();
}
return stream(
superClassSymbols.getSymbolsByName(
name, matchesMethodPredicate, NON_RECURSIVE))
// By definition of the filter, we know that the symbol is a MethodSymbol.
.map(symbol -> (MethodSymbol) symbol);
});
}
/**
* Finds all methods in any superclass of {@code startClass} with a certain {@code name} that
* match the given {@code predicate}.
*
* @return The (possibly empty) set of methods in any superclass that match {@code predicate} and
* have the given {@code name}. The set's iteration order will be the same as the order
* documented in {@link #matchingMethods(Name, java.util.function.Predicate, Type, Types)}.
*/
public static ImmutableSet<MethodSymbol> findMatchingMethods(
Name name, Predicate<MethodSymbol> predicate, Type startClass, Types types) {
return matchingMethods(name, predicate, startClass, types).collect(toImmutableSet());
}
/**
* Determines whether a method can be overridden.
*
* @return true if the method can be overridden.
*/
public static boolean methodCanBeOverridden(MethodSymbol methodSymbol) {
if (methodSymbol.getModifiers().contains(Modifier.ABSTRACT)) {
return true;
}
if (methodSymbol.isStatic()
|| methodSymbol.isPrivate()
|| isFinal(methodSymbol)
|| methodSymbol.isConstructor()) {
return false;
}
ClassSymbol classSymbol = (ClassSymbol) methodSymbol.owner;
return !isFinal(classSymbol) && !classSymbol.isAnonymous();
}
private static boolean isFinal(Symbol symbol) {
return (symbol.flags() & Flags.FINAL) == Flags.FINAL;
}
/**
* Flag for record types, canonical record constructors and type members that are part of a
* record's state vector. Can be replaced by {@code com.sun.tools.javac.code.Flags.RECORD} once
* the minimum JDK version is 14.
*/
private static final long RECORD_FLAG = 1L << 61;
/**
* Returns whether the given {@link Symbol} is a record, a record's canonical constructor or a
* member that is part of a record's state vector.
*
* <p>Health warning: some things are flagged within a compilation, but won't be flagged across
* compilation boundaries, like canonical constructors.
*/
public static boolean isRecord(Symbol symbol) {
return (symbol.flags() & RECORD_FLAG) == RECORD_FLAG;
}
/** Finds the canonical constructor on a record. */
public static MethodSymbol canonicalConstructor(ClassSymbol record, VisitorState state) {
var fieldTypes =
record.getRecordComponents().stream().map(rc -> rc.type).collect(toImmutableList());
return stream(record.members().getSymbols(s -> s.getKind() == CONSTRUCTOR))
.map(c -> (MethodSymbol) c)
.filter(
c ->
c.getParameters().size() == fieldTypes.size()
&& zip(
c.getParameters().stream(),
fieldTypes.stream(),
(a, b) -> isSameType(a.type, b, state))
.allMatch(x -> x))
.collect(onlyElement());
}
/**
* Determines whether a symbol has an annotation of the given type. This includes annotations
* inherited from superclasses due to {@code @Inherited}.
*
* @param annotationClass the binary
|
tree
|
java
|
elastic__elasticsearch
|
modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java
|
{
"start": 12374,
"end": 19741
}
|
class ____ {
Exception exception = null;
private int count = 0;
void useOrMaybeSuppress(Exception e) {
if (count < MAX_DELETE_EXCEPTIONS) {
exception = ExceptionsHelper.useOrSuppress(exception, e);
count++;
}
}
}
void deleteBlobs(OperationPurpose purpose, Iterator<String> blobNames) throws IOException {
if (blobNames.hasNext() == false) {
return;
}
final List<ObjectIdentifier> partition = new ArrayList<>();
try {
// S3 API only allows 1k blobs per delete so we split up the given blobs into requests of max. 1k deletes
final var deletionExceptions = new DeletionExceptions();
blobNames.forEachRemaining(key -> {
partition.add(ObjectIdentifier.builder().key(key).build());
if (partition.size() == bulkDeletionBatchSize) {
deletePartition(purpose, partition, deletionExceptions);
partition.clear();
}
});
if (partition.isEmpty() == false) {
deletePartition(purpose, partition, deletionExceptions);
}
if (deletionExceptions.exception != null) {
throw deletionExceptions.exception;
}
} catch (Exception e) {
throw new IOException("Failed to delete blobs " + partition.stream().limit(10).toList(), e);
}
}
/**
* Delete one partition of a batch of blobs
*
* @param purpose The {@link OperationPurpose} of the deletion
* @param partition The list of blobs to delete
* @param deletionExceptions A holder for any exception(s) thrown during the deletion
*/
private void deletePartition(OperationPurpose purpose, List<ObjectIdentifier> partition, DeletionExceptions deletionExceptions) {
final Iterator<TimeValue> retries = retryThrottledDeleteBackoffPolicy.iterator();
int retryCounter = 0;
while (true) {
try (AmazonS3Reference clientReference = clientReference()) {
final var response = clientReference.client().deleteObjects(bulkDelete(purpose, this, partition));
if (response.hasErrors()) {
final var exception = new ElasticsearchException(buildDeletionErrorMessage(response.errors()));
logger.warn(exception.getMessage(), exception);
deletionExceptions.useOrMaybeSuppress(exception);
return;
}
s3RepositoriesMetrics.retryDeletesHistogram().record(retryCounter);
return;
} catch (SdkException e) {
if (shouldRetryDelete(purpose) && RetryUtils.isThrottlingException(e)) {
// S3 is asking us to slow down. Pause for a bit and retry
if (maybeDelayAndRetryDelete(retries)) {
retryCounter++;
} else {
s3RepositoriesMetrics.retryDeletesHistogram().record(retryCounter);
deletionExceptions.useOrMaybeSuppress(e);
return;
}
} else {
// The AWS client threw any unexpected exception and did not execute the request at all so we do not
// remove any keys from the outstanding deletes set.
deletionExceptions.useOrMaybeSuppress(e);
return;
}
}
}
}
private String buildDeletionErrorMessage(List<S3Error> errors) {
final var sb = new StringBuilder("Failed to delete some blobs ");
for (int i = 0; i < errors.size() && i < MAX_DELETE_EXCEPTIONS; i++) {
final var err = errors.get(i);
sb.append("[").append(err.key()).append("][").append(err.code()).append("][").append(err.message()).append("]");
if (i < errors.size() - 1) {
sb.append(",");
}
}
if (errors.size() > MAX_DELETE_EXCEPTIONS) {
sb.append("... (")
.append(errors.size())
.append(" in total, ")
.append(errors.size() - MAX_DELETE_EXCEPTIONS)
.append(" omitted)");
}
return sb.toString();
}
/**
* If there are remaining retries, pause for the configured interval then return true
*
* @param retries The retries iterator
* @return true to try the deletion again, false otherwise
*/
private boolean maybeDelayAndRetryDelete(Iterator<TimeValue> retries) {
if (retries.hasNext()) {
try {
Thread.sleep(retries.next().millis());
return true;
} catch (InterruptedException iex) {
Thread.currentThread().interrupt();
// If we're interrupted, record the exception and abort retries
logger.warn("Aborting tenacious snapshot delete retries due to interrupt");
}
} else {
logger.warn(
"Exceeded maximum tenacious snapshot delete retries, aborting. Using back-off policy "
+ retryThrottledDeleteBackoffPolicy
+ ", see the throttled_delete_retry.* S3 repository properties to configure the back-off parameters"
);
}
return false;
}
private boolean shouldRetryDelete(OperationPurpose operationPurpose) {
return operationPurpose == OperationPurpose.SNAPSHOT_DATA || operationPurpose == OperationPurpose.SNAPSHOT_METADATA;
}
private static DeleteObjectsRequest bulkDelete(OperationPurpose purpose, S3BlobStore blobStore, List<ObjectIdentifier> blobs) {
final var requestBuilder = DeleteObjectsRequest.builder().bucket(blobStore.bucket()).delete(b -> b.quiet(true).objects(blobs));
configureRequestForMetrics(requestBuilder, blobStore, Operation.DELETE_OBJECTS, purpose);
return requestBuilder.build();
}
@Override
public void close() throws IOException {
service.onBlobStoreClose(projectId);
}
@Override
public Map<String, BlobStoreActionStats> stats() {
return statsCollectors.statsMap(service.isStateless);
}
// Package private for testing
StatsCollectors getStatsCollectors() {
return statsCollectors;
}
public ObjectCannedACL getCannedACL() {
return cannedACL;
}
public StorageClass getStorageClass() {
return storageClass;
}
public TimeValue getGetRegisterRetryDelay() {
return getRegisterRetryDelay;
}
public static StorageClass initStorageClass(String storageClassName) {
if ((storageClassName == null) || storageClassName.equals("")) {
return StorageClass.STANDARD;
}
final StorageClass storageClass;
try {
storageClass = StorageClass.fromValue(storageClassName.toUpperCase(Locale.ENGLISH));
} catch (final Exception e) {
throw new BlobStoreException("`" + storageClassName + "` is not a valid S3 Storage Class.", e);
}
if (storageClass.equals(StorageClass.GLACIER)) {
throw new BlobStoreException("Glacier storage
|
DeletionExceptions
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
|
{
"start": 9697,
"end": 11324
}
|
enum ____ newly appended value should
// be handled by NameNodeLayoutVersion#Feature.
private static final ReplicaState[] cachedValues = ReplicaState.values();
private final int value;
ReplicaState(int v) {
value = v;
}
public int getValue() {
return value;
}
/**
* Retrieve ReplicaState corresponding to given index.
*
* @param v Index to retrieve {@link ReplicaState}.
* @return {@link ReplicaState} object.
* @throws IndexOutOfBoundsException if the index is invalid.
*/
public static ReplicaState getState(int v) {
Validate.validIndex(cachedValues, v, "Index Expected range: [0, "
+ (cachedValues.length - 1) + "]. Actual value: " + v);
return cachedValues[v];
}
/**
* Retrieve ReplicaState corresponding to index provided in binary stream.
*
* @param in Index value provided as bytes in given binary stream.
* @return {@link ReplicaState} object.
* @throws IOException if an I/O error occurs while reading bytes.
* @throws IndexOutOfBoundsException if the index is invalid.
*/
public static ReplicaState read(DataInput in) throws IOException {
byte idx = in.readByte();
Validate.validIndex(cachedValues, idx, "Index Expected range: [0, "
+ (cachedValues.length - 1) + "]. Actual value: " + idx);
return cachedValues[idx];
}
/** Write to out */
public void write(DataOutput out) throws IOException {
out.writeByte(ordinal());
}
}
/**
* States, which a block can go through while it is under construction.
*/
|
or
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/RestResponse.java
|
{
"start": 34205,
"end": 35262
}
|
class ____ to build RestResponse instances that contain metadata instead
* of or in addition to an entity. An initial instance may be obtained via
* static methods of the RestResponse.ResponseBuilder class, instance methods provide the
* ability to set metadata. E.g. to create a response that indicates the
* creation of a new resource:
*
* <pre>
* @POST
* RestResponse<Void> addWidget(...) {
* Widget w = ...
* URI widgetId = UriBuilder.fromResource(Widget.class)...
* return RestResponse.ResponseBuilder.created(widgetId).build();
* }
* </pre>
*
* <p>
* Several methods have parameters of type URI, {@link UriBuilder} provides
* convenient methods to create such values as does {@code URI.create()}.
* </p>
*
* <p>
* Where multiple variants of the same method are provided, the type of
* the supplied parameter is retained in the metadata of the built
* {@code RestResponse}.
* </p>
*/
public static abstract
|
used
|
java
|
google__guice
|
extensions/persist/test/com/google/inject/persist/jpa/ManualLocalTransactionsConfidenceTest.java
|
{
"start": 2524,
"end": 3060
}
|
class ____ {
@Inject Provider<EntityManager> emProvider;
@Transactional
public void runOperationInTxn() {
EntityManager em = emProvider.get();
JpaParentTestEntity entity = new JpaParentTestEntity();
JpaTestEntity child = new JpaTestEntity();
child.setText(UNIQUE_TEXT_3);
em.persist(child);
entity.getChildren().add(child);
em.persist(entity);
entity = new JpaParentTestEntity();
entity.getChildren().add(child);
em.persist(entity);
}
}
}
|
TransactionalObject
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/query/internal/property/EntityPropertyName.java
|
{
"start": 371,
"end": 633
}
|
class ____ implements PropertyNameGetter {
private final String propertyName;
public EntityPropertyName(String propertyName) {
this.propertyName = propertyName;
}
public String get(Configuration configuration) {
return propertyName;
}
}
|
EntityPropertyName
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_doesNotContain_Test.java
|
{
"start": 967,
"end": 1322
}
|
class ____ extends LongArrayAssertBaseTest {
@Override
protected LongArrayAssert invoke_api_method() {
return assertions.doesNotContain(6L, 8L);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertDoesNotContain(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L));
}
}
|
LongArrayAssert_doesNotContain_Test
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/jdk8/LocalDateTimeTest.java
|
{
"start": 2395,
"end": 2620
}
|
class ____ {
private LocalDateTime date;
public LocalDateTime getDate() {
return date;
}
public void setDate(LocalDateTime date) {
this.date = date;
}
}
}
|
VO
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/features/GetSnapshottableFeaturesResponse.java
|
{
"start": 2563,
"end": 4197
}
|
class ____ implements Writeable, ToXContentObject {
private final String featureName;
private final String description;
public SnapshottableFeature(String featureName, String description) {
this.featureName = featureName;
this.description = description;
}
public SnapshottableFeature(StreamInput in) throws IOException {
featureName = in.readString();
description = in.readString();
}
public String getFeatureName() {
return featureName;
}
public String getDescription() {
return description;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(featureName);
out.writeString(description);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("name", featureName);
builder.field("description", description);
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if ((o instanceof SnapshottableFeature) == false) return false;
SnapshottableFeature feature = (SnapshottableFeature) o;
return Objects.equals(getFeatureName(), feature.getFeatureName());
}
@Override
public int hashCode() {
return Objects.hash(getFeatureName());
}
}
}
|
SnapshottableFeature
|
java
|
google__dagger
|
javatests/dagger/functional/subcomponent/ParentGetters.java
|
{
"start": 698,
"end": 807
}
|
interface ____ {
Provider<UnscopedType> getUnscopedTypeProvider();
Set<Object> objectSet();
}
|
ParentGetters
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/bytecode/spi/ClassTransformer.java
|
{
"start": 603,
"end": 696
}
|
class ____ defined by the JVM.
*
* @author Bill Burke
* @author Emmanuel Bernard
*/
public
|
is
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/common/typeutils/base/DoubleComparatorTest.java
|
{
"start": 1074,
"end": 2061
}
|
class ____ extends ComparatorTestBase<Double> {
@Override
protected TypeComparator<Double> createComparator(boolean ascending) {
return new DoubleComparator(ascending);
}
@Override
protected TypeSerializer<Double> createSerializer() {
return new DoubleSerializer();
}
@Override
protected Double[] getSortedTestData() {
Random rnd = new Random(874597969123412338L);
double rndDouble = rnd.nextDouble();
if (rndDouble < 0) {
rndDouble = -rndDouble;
}
if (rndDouble == Double.MAX_VALUE) {
rndDouble -= 3;
}
if (rndDouble <= 2) {
rndDouble += 3;
}
return new Double[] {
Double.valueOf(-rndDouble),
Double.valueOf(-1.0D),
Double.valueOf(0.0D),
Double.valueOf(2.0D),
Double.valueOf(rndDouble),
Double.valueOf(Double.MAX_VALUE)
};
}
}
|
DoubleComparatorTest
|
java
|
elastic__elasticsearch
|
build-tools/src/main/java/org/elasticsearch/gradle/ReaperPlugin.java
|
{
"start": 757,
"end": 2129
}
|
class ____ implements Plugin<Project> {
/**
* The unique identifier to register the reaper shared service within a gradle build
* */
public static final String REAPER_SERVICE_NAME = "reaper";
private final ProjectLayout projectLayout;
@Inject
ReaperPlugin(ProjectLayout projectLayout) {
this.projectLayout = projectLayout;
}
@Override
public void apply(Project project) {
registerReaperService(project, projectLayout, false);
}
public static void registerReaperService(Project project, ProjectLayout projectLayout, boolean internal) {
if (project != project.getRootProject()) {
throw new IllegalArgumentException("ReaperPlugin can only be applied to the root project of a build");
}
File inputDir = projectLayout.getProjectDirectory()
.dir(".gradle")
.dir("reaper")
.dir("build-" + ProcessHandle.current().pid())
.getAsFile();
project.getGradle().getSharedServices().registerIfAbsent(REAPER_SERVICE_NAME, ReaperService.class, spec -> {
// Provide some parameters
spec.getParameters().getInputDir().set(inputDir);
spec.getParameters().getBuildDir().set(projectLayout.getBuildDirectory());
spec.getParameters().setInternal(internal);
});
}
}
|
ReaperPlugin
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/EndpointProducerResolver.java
|
{
"start": 840,
"end": 936
}
|
interface ____ represent an object that can be resolved as a producer {@link Endpoint}
*/
public
|
to
|
java
|
apache__camel
|
components/camel-telegram/src/main/java/org/apache/camel/component/telegram/model/InlineQueryResultVenue.java
|
{
"start": 1194,
"end": 2013
}
|
class ____ extends InlineQueryResult {
private static final String TYPE = "venue";
private Float latitude;
private Float longitude;
@JsonProperty("thumb_url")
private String thumbUrl;
@JsonProperty("thumb_width")
private String thumbWidth;
@JsonProperty("thumb_height")
private String thumbHeight;
private String title;
private String address;
@JsonProperty("foursquare_id")
private String foursquareId;
@JsonProperty("foursquare_type")
private String foursquareType;
@JsonProperty("input_message_content")
private InputMessageContent inputMessageContext;
public InlineQueryResultVenue() {
super(TYPE);
}
public static Builder builder() {
return new Builder();
}
public static final
|
InlineQueryResultVenue
|
java
|
quarkusio__quarkus
|
integration-tests/mongodb-panache/src/main/java/io/quarkus/it/mongodb/panache/person/Person.java
|
{
"start": 103,
"end": 250
}
|
class ____ {
@BsonId
public Long id;
public String firstname;
public String lastname;
public Status status = Status.ALIVE;
}
|
Person
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/FluxBufferWhen.java
|
{
"start": 14314,
"end": 16677
}
|
class ____<T, BUFFER extends Collection<? super T>>
implements Disposable, InnerConsumer<Object> {
volatile @Nullable Subscription subscription;
// https://github.com/uber/NullAway/issues/1157
@SuppressWarnings({"rawtypes", "DataFlowIssue"})
static final AtomicReferenceFieldUpdater<BufferWhenCloseSubscriber, @Nullable Subscription> SUBSCRIPTION =
AtomicReferenceFieldUpdater.newUpdater(BufferWhenCloseSubscriber.class, Subscription.class, "subscription");
final BufferWhenMainSubscriber<T, ?, ?, BUFFER> parent;
final long index;
BufferWhenCloseSubscriber(BufferWhenMainSubscriber<T, ?, ?, BUFFER> parent, long index) {
this.parent = parent;
this.index = index;
}
@Override
public Context currentContext() {
return parent.currentContext();
}
@Override
public void onSubscribe(Subscription s) {
if (Operators.setOnce(SUBSCRIPTION, this, s)) {
s.request(Long.MAX_VALUE);
}
}
@Override
public void dispose() {
Operators.terminate(SUBSCRIPTION, this);
}
@Override
public boolean isDisposed() {
return subscription == Operators.cancelledSubscription();
}
@Override
public void onNext(Object t) {
Subscription s = subscription;
if (s != Operators.cancelledSubscription()) {
SUBSCRIPTION.lazySet(this, Operators.cancelledSubscription());
assert s != null : "subscription can not be null when onNext happens";
s.cancel();
parent.close(this, index);
}
}
@Override
public void onError(Throwable t) {
if (subscription != Operators.cancelledSubscription()) {
SUBSCRIPTION.lazySet(this, Operators.cancelledSubscription());
parent.boundaryError(this, t);
}
else {
Operators.onErrorDropped(t, parent.ctx);
}
}
@Override
public void onComplete() {
if (subscription != Operators.cancelledSubscription()) {
SUBSCRIPTION.lazySet(this, Operators.cancelledSubscription());
parent.close(this, index);
}
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.ACTUAL) return parent;
if (key == Attr.PARENT) return subscription;
if (key == Attr.REQUESTED_FROM_DOWNSTREAM) return Long.MAX_VALUE;
if (key == Attr.CANCELLED) return isDisposed();
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return null;
}
}
}
|
BufferWhenCloseSubscriber
|
java
|
apache__camel
|
components/camel-kubernetes/src/main/java/org/apache/camel/component/kubernetes/properties/SecretPropertiesFunction.java
|
{
"start": 978,
"end": 1126
}
|
class ____ extends BaseSecretPropertiesFunction {
@Override
public String getName() {
return "secret";
}
}
|
SecretPropertiesFunction
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringThrottlerTest.java
|
{
"start": 1925,
"end": 2124
}
|
class ____ implements Processor {
@Override
public void process(Exchange exchange) throws Exception {
throw new RuntimeException();
}
}
}
|
RuntimeExceptionProcessor
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/index/ApplicationArchiveBuildStep.java
|
{
"start": 2545,
"end": 2940
}
|
interface ____ {
/**
* Artifacts on the classpath that should also be indexed.
* <p>
* Their classes will be in the index and processed by Quarkus processors.
*/
@WithParentName
@ConfigDocSection
@ConfigDocMapKey("dependency-name")
Map<String, IndexDependencyConfig> indexDependency();
|
IndexDependencyConfiguration
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/validator/PredicateValidatorDefinition.java
|
{
"start": 1432,
"end": 2331
}
|
class ____ extends ValidatorDefinition {
@XmlElementRef
private ExpressionDefinition expression;
public PredicateValidatorDefinition() {
}
protected PredicateValidatorDefinition(PredicateValidatorDefinition source) {
super(source);
this.expression = source.expression != null ? source.expression.copyDefinition() : null;
}
@Override
public ValidatorDefinition copyDefinition() {
return new PredicateValidatorDefinition(this);
}
public ExpressionDefinition getExpression() {
return expression;
}
public void setExpression(ExpressionDefinition expression) {
// favour using the helper to set the expression as it can unwrap some
// unwanted builders when using Java DSL
this.expression = ExpressionNodeHelper.toExpressionDefinition((Expression) expression);
}
}
|
PredicateValidatorDefinition
|
java
|
elastic__elasticsearch
|
x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringMigrateAlertsResponseTests.java
|
{
"start": 980,
"end": 1124
}
|
class ____ extends AbstractWireSerializingTestCase<MonitoringMigrateAlertsResponse> {
private static final
|
MonitoringMigrateAlertsResponseTests
|
java
|
alibaba__nacos
|
config/src/test/java/com/alibaba/nacos/config/server/utils/ResponseUtilTest.java
|
{
"start": 1579,
"end": 12883
}
|
class ____ {
String lineSeparator = System.lineSeparator();
@Test
void testWriteErrMsg() {
MockHttpServletResponse response = new MockHttpServletResponse();
ResponseUtil.writeErrMsg(response, 404, "test");
assertEquals(404, response.getStatus());
try {
assertEquals("test" + lineSeparator, response.getContentAsString());
} catch (UnsupportedEncodingException e) {
System.out.println(e.toString());
}
}
@Test
void testTransferToConfigDetailInfo() {
ConfigAllInfo configAllInfo = new ConfigAllInfo();
configAllInfo.setId(1L);
configAllInfo.setTenant("testNs");
configAllInfo.setGroup(Constants.DEFAULT_GROUP);
configAllInfo.setDataId("testDs");
configAllInfo.setMd5("testMd5");
configAllInfo.setEncryptedDataKey("testEncryptedDataKey");
configAllInfo.setContent("testContent");
configAllInfo.setDesc("testDesc");
configAllInfo.setType("text");
configAllInfo.setAppName("testAppName");
configAllInfo.setCreateIp("1.1.1.1");
configAllInfo.setCreateUser("testCreateUser");
configAllInfo.setCreateTime(System.currentTimeMillis());
configAllInfo.setModifyTime(System.currentTimeMillis());
configAllInfo.setConfigTags("testConfigTag1,testConfigTag2");
configAllInfo.setUse("testUse");
configAllInfo.setEffect("testEffect");
configAllInfo.setSchema("testSchema");
ConfigDetailInfo configDetailInfo = ResponseUtil.transferToConfigDetailInfo(configAllInfo);
assertEquals(configAllInfo.getId(), configDetailInfo.getId());
assertEquals(configAllInfo.getTenant(), configDetailInfo.getNamespaceId());
assertEquals(configAllInfo.getGroup(), configDetailInfo.getGroupName());
assertEquals(configAllInfo.getDataId(), configDetailInfo.getDataId());
assertEquals(configAllInfo.getMd5(), configDetailInfo.getMd5());
assertEquals(configAllInfo.getEncryptedDataKey(), configDetailInfo.getEncryptedDataKey());
assertEquals(configAllInfo.getContent(), configDetailInfo.getContent());
assertEquals(configAllInfo.getDesc(), configDetailInfo.getDesc());
assertEquals(configAllInfo.getType(), configDetailInfo.getType());
assertEquals(configAllInfo.getAppName(), configDetailInfo.getAppName());
assertEquals(configAllInfo.getCreateIp(), configDetailInfo.getCreateIp());
assertEquals(configAllInfo.getCreateUser(), configDetailInfo.getCreateUser());
assertEquals(configAllInfo.getCreateTime(), configDetailInfo.getCreateTime());
assertEquals(configAllInfo.getModifyTime(), configDetailInfo.getModifyTime());
assertEquals(configAllInfo.getConfigTags(), configDetailInfo.getConfigTags());
}
@Test
void testTransferToConfigBasicInfo() {
ConfigInfo configInfo = new ConfigInfo();
configInfo.setId(1L);
configInfo.setTenant("testNs");
configInfo.setGroup(Constants.DEFAULT_GROUP);
configInfo.setDataId("testDs");
configInfo.setMd5("testMd5");
configInfo.setEncryptedDataKey("testEncryptedDataKey");
configInfo.setContent("testContent");
configInfo.setType("text");
configInfo.setAppName("testAppName");
configInfo.setDesc("testDesc");
configInfo.setConfigTags("tag1,tag2");
ConfigBasicInfo configBasicInfo = ResponseUtil.transferToConfigBasicInfo(configInfo);
assertEquals(configInfo.getId(), configBasicInfo.getId());
assertEquals(configInfo.getTenant(), configBasicInfo.getNamespaceId());
assertEquals(configInfo.getGroup(), configBasicInfo.getGroupName());
assertEquals(configInfo.getDataId(), configBasicInfo.getDataId());
assertEquals(configInfo.getMd5(), configBasicInfo.getMd5());
assertEquals(configInfo.getType(), configBasicInfo.getType());
assertEquals(configInfo.getAppName(), configBasicInfo.getAppName());
assertEquals(configInfo.getDesc(), configBasicInfo.getDesc());
assertEquals(configInfo.getConfigTags(), configBasicInfo.getConfigTags());
assertEquals(0L, configBasicInfo.getCreateTime());
assertEquals(0L, configBasicInfo.getModifyTime());
}
@Test
void testTransferToConfigBasicInfoFromWrapper() {
ConfigInfoWrapper configInfo = new ConfigInfoWrapper();
configInfo.setId(1L);
configInfo.setTenant("testNs");
configInfo.setGroup(Constants.DEFAULT_GROUP);
configInfo.setDataId("testDs");
configInfo.setMd5("testMd5");
configInfo.setEncryptedDataKey("testEncryptedDataKey");
configInfo.setContent("testContent");
configInfo.setType("text");
configInfo.setAppName("testAppName");
configInfo.setDesc("testDesc");
configInfo.setConfigTags("tag1,tag2");
configInfo.setLastModified(System.currentTimeMillis());
ConfigBasicInfo configBasicInfo = ResponseUtil.transferToConfigBasicInfo(configInfo);
assertEquals(configInfo.getId(), configBasicInfo.getId());
assertEquals(configInfo.getTenant(), configBasicInfo.getNamespaceId());
assertEquals(configInfo.getGroup(), configBasicInfo.getGroupName());
assertEquals(configInfo.getDataId(), configBasicInfo.getDataId());
assertEquals(configInfo.getMd5(), configBasicInfo.getMd5());
assertEquals(configInfo.getType(), configBasicInfo.getType());
assertEquals(configInfo.getAppName(), configBasicInfo.getAppName());
assertEquals(configInfo.getDesc(), configBasicInfo.getDesc());
assertEquals(configInfo.getConfigTags(), configBasicInfo.getConfigTags());
assertEquals(0L, configBasicInfo.getCreateTime());
assertEquals(configInfo.getLastModified(), configBasicInfo.getModifyTime());
}
@Test
void testTransferToConfigGrayInfo() {
ConfigInfoGrayWrapper configInfoGray = new ConfigInfoGrayWrapper();
configInfoGray.setId(1L);
configInfoGray.setTenant("testNs");
configInfoGray.setGroup(Constants.DEFAULT_GROUP);
configInfoGray.setDataId("testDs");
configInfoGray.setMd5("testMd5");
configInfoGray.setEncryptedDataKey("testEncryptedDataKey");
configInfoGray.setContent("testContent");
configInfoGray.setType("text");
configInfoGray.setAppName("testAppName");
configInfoGray.setGrayName("testGrayName");
configInfoGray.setGrayRule("testGrayRule");
configInfoGray.setSrcUser("testSrcUser");
configInfoGray.setLastModified(System.currentTimeMillis());
ConfigGrayInfo configGrayInfo = ResponseUtil.transferToConfigGrayInfo(configInfoGray);
assertEquals(configInfoGray.getId(), configGrayInfo.getId());
assertEquals(configInfoGray.getTenant(), configGrayInfo.getNamespaceId());
assertEquals(configInfoGray.getGroup(), configGrayInfo.getGroupName());
assertEquals(configInfoGray.getDataId(), configGrayInfo.getDataId());
assertEquals(configInfoGray.getMd5(), configGrayInfo.getMd5());
assertEquals(configInfoGray.getType(), configGrayInfo.getType());
assertEquals(configInfoGray.getEncryptedDataKey(), configGrayInfo.getEncryptedDataKey());
assertEquals(configInfoGray.getAppName(), configGrayInfo.getAppName());
assertEquals(0, configGrayInfo.getCreateTime());
assertEquals(configInfoGray.getLastModified(), configGrayInfo.getModifyTime());
assertEquals(configInfoGray.getSrcUser(), configGrayInfo.getCreateUser());
assertEquals(configInfoGray.getGrayName(), configGrayInfo.getGrayName());
assertEquals(configInfoGray.getGrayRule(), configGrayInfo.getGrayRule());
}
@Test
void testTransferToConfigHistoryBasicInfo() {
ConfigHistoryInfo configHistoryInfo = mockConfigHistoryInfo();
ConfigHistoryBasicInfo configHistoryBasicInfo = ResponseUtil.transferToConfigHistoryBasicInfo(
configHistoryInfo);
assertConfigHistoryBasicInfo(configHistoryInfo, configHistoryBasicInfo);
}
@Test
void testTransferToConfigHistoryDetialInfo() {
ConfigHistoryInfo configHistoryInfo = mockConfigHistoryInfo();
ConfigHistoryDetailInfo configHistoryBasicInfo = ResponseUtil.transferToConfigHistoryDetailInfo(
configHistoryInfo);
assertConfigHistoryBasicInfo(configHistoryInfo, configHistoryBasicInfo);
assertEquals(configHistoryInfo.getContent(), configHistoryBasicInfo.getContent());
assertEquals(configHistoryInfo.getEncryptedDataKey(), configHistoryBasicInfo.getEncryptedDataKey());
assertEquals(configHistoryInfo.getGrayName(), configHistoryBasicInfo.getGrayName());
assertEquals(configHistoryInfo.getExtInfo(), configHistoryBasicInfo.getExtInfo());
}
private ConfigHistoryInfo mockConfigHistoryInfo() {
ConfigHistoryInfo configHistoryInfo = new ConfigHistoryInfo();
configHistoryInfo.setId(1L);
configHistoryInfo.setTenant("testNs");
configHistoryInfo.setGroup(Constants.DEFAULT_GROUP);
configHistoryInfo.setDataId("testDs");
configHistoryInfo.setAppName("testAppName");
configHistoryInfo.setMd5("testMd5");
configHistoryInfo.setContent("testContent");
configHistoryInfo.setSrcIp("1.1.1.1");
configHistoryInfo.setSrcUser("testSrcUser");
configHistoryInfo.setOpType("I");
configHistoryInfo.setPublishType("formal");
configHistoryInfo.setGrayName("testGrayName");
configHistoryInfo.setExtInfo("{\"type\":\"text\"}");
configHistoryInfo.setCreatedTime(new Timestamp(System.currentTimeMillis()));
configHistoryInfo.setLastModifiedTime(new Timestamp(System.currentTimeMillis()));
configHistoryInfo.setEncryptedDataKey("testEncryptedDataKey");
return configHistoryInfo;
}
private void assertConfigHistoryBasicInfo(ConfigHistoryInfo configHistoryInfo,
ConfigHistoryBasicInfo configHistoryBasicInfo) {
assertEquals(configHistoryInfo.getId(), configHistoryBasicInfo.getId());
assertEquals(configHistoryInfo.getTenant(), configHistoryBasicInfo.getNamespaceId());
assertEquals(configHistoryInfo.getGroup(), configHistoryBasicInfo.getGroupName());
assertEquals(configHistoryInfo.getDataId(), configHistoryBasicInfo.getDataId());
assertEquals(configHistoryInfo.getAppName(), configHistoryBasicInfo.getAppName());
assertEquals(configHistoryInfo.getMd5(), configHistoryBasicInfo.getMd5());
assertEquals(configHistoryInfo.getSrcIp(), configHistoryBasicInfo.getSrcIp());
assertEquals(configHistoryInfo.getSrcUser(), configHistoryBasicInfo.getSrcUser());
assertEquals(configHistoryInfo.getOpType(), configHistoryBasicInfo.getOpType());
assertEquals(configHistoryInfo.getPublishType(), configHistoryBasicInfo.getPublishType());
assertEquals(configHistoryInfo.getCreatedTime().getTime(), configHistoryBasicInfo.getCreateTime());
assertEquals(configHistoryInfo.getLastModifiedTime().getTime(), configHistoryBasicInfo.getModifyTime());
}
}
|
ResponseUtilTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/cache/Continent.java
|
{
"start": 445,
"end": 870
}
|
class ____ {
private Long id;
private String code;
private String name;
@Id
@GeneratedValue
public Long getId() {
return id;
}
public void setId(final Long id) {
this.id = id;
}
public String getCode() {
return code;
}
public void setCode(final String code) {
this.code = code;
}
public String getName() {
return name;
}
public void setName(final String name) {
this.name = name;
}
}
|
Continent
|
java
|
apache__thrift
|
lib/java/src/test/java/org/apache/thrift/transport/TestTSaslTransports.java
|
{
"start": 12072,
"end": 13281
}
|
class ____ implements SaslClient {
private final String username;
private boolean hasProvidedInitialResponse;
public AnonymousClient(String username) {
this.username = username;
}
@Override
public String getMechanismName() {
return "ANONYMOUS";
}
@Override
public boolean hasInitialResponse() {
return true;
}
@Override
public byte[] evaluateChallenge(byte[] challenge) throws SaslException {
if (hasProvidedInitialResponse) {
throw new SaslException("Already complete!");
}
hasProvidedInitialResponse = true;
return username.getBytes(StandardCharsets.UTF_8);
}
@Override
public boolean isComplete() {
return hasProvidedInitialResponse;
}
@Override
public byte[] unwrap(byte[] incoming, int offset, int len) {
throw new UnsupportedOperationException();
}
@Override
public byte[] wrap(byte[] outgoing, int offset, int len) {
throw new UnsupportedOperationException();
}
@Override
public Object getNegotiatedProperty(String propName) {
return null;
}
@Override
public void dispose() {}
}
private static
|
AnonymousClient
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/EncryptionS3ClientFactory.java
|
{
"start": 1912,
"end": 1990
}
|
class ____ create encrypted s3 client and encrypted async s3 client.
*/
public
|
to
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/indices/cluster/ResolveClusterTimeoutIT.java
|
{
"start": 1343,
"end": 4591
}
|
class ____ extends AbstractMultiClustersTestCase {
private static final String REMOTE_CLUSTER_1 = "cluster-a";
@Override
protected List<String> remoteClusterAlias() {
return List.of(REMOTE_CLUSTER_1);
}
public void testTimeoutParameter() {
long maxTimeoutInMillis = 500;
// First part: we query _resolve/cluster without stalling a remote.
ResolveClusterActionRequest resolveClusterActionRequest;
if (randomBoolean()) {
resolveClusterActionRequest = new ResolveClusterActionRequest(new String[0], IndicesOptions.DEFAULT, true, true);
} else {
resolveClusterActionRequest = new ResolveClusterActionRequest(new String[] { "*:*" });
}
// We set a timeout but since we don't stall any cluster, we should always get back response just fine before the timeout.
resolveClusterActionRequest.setTimeout(TimeValue.timeValueSeconds(10));
ResolveClusterActionResponse clusterActionResponse = safeGet(
client().execute(TransportResolveClusterAction.TYPE, resolveClusterActionRequest)
);
Map<String, ResolveClusterInfo> clusterInfo = clusterActionResponse.getResolveClusterInfo();
// Remote is connected and error message is null.
assertThat(clusterInfo.get(REMOTE_CLUSTER_1).isConnected(), equalTo(true));
assertThat(clusterInfo.get(REMOTE_CLUSTER_1).getError(), is(nullValue()));
// Second part: now we stall the remote and utilise the timeout feature.
CountDownLatch latch = new CountDownLatch(1);
// Add an override so that the remote cluster receives the TransportResolveClusterAction request but stalls.
for (var nodes : cluster(REMOTE_CLUSTER_1).getNodeNames()) {
((MockTransportService) cluster(REMOTE_CLUSTER_1).getInstance(TransportService.class, nodes)).addRequestHandlingBehavior(
TransportResolveClusterAction.REMOTE_TYPE.name(),
(requestHandler, transportRequest, transportChannel, transportTask) -> {
// Wait until the TransportResolveRequestAction times out following which the latch is released.
latch.await();
requestHandler.messageReceived(transportRequest, transportChannel, transportTask);
}
);
}
long randomlyChosenTimeout = randomLongBetween(100, maxTimeoutInMillis);
// We now randomly choose a timeout which is guaranteed to hit since the remote is stalled.
resolveClusterActionRequest.setTimeout(TimeValue.timeValueMillis(randomlyChosenTimeout));
clusterActionResponse = safeGet(client().execute(TransportResolveClusterAction.TYPE, resolveClusterActionRequest));
latch.countDown();
clusterInfo = clusterActionResponse.getResolveClusterInfo();
// Ensure that the request timed out and that the remote is marked as not connected.
assertThat(clusterInfo.get(REMOTE_CLUSTER_1).isConnected(), equalTo(false));
assertThat(
clusterInfo.get(REMOTE_CLUSTER_1).getError(),
equalTo("Request timed out before receiving a response from the remote cluster")
);
}
}
|
ResolveClusterTimeoutIT
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/serialization/EntityProxySerializationTest.java
|
{
"start": 7096,
"end": 7474
}
|
class ____ {
private Long id;
private SimpleEntity parent;
@Id
public Long getId() {
return id;
}
public void setId(final Long id) {
this.id = id;
}
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn
public SimpleEntity getParent() {
return parent;
}
public void setParent(final SimpleEntity parent) {
this.parent = parent;
}
}
}
|
ChildEntity
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/StringSplitterTest.java
|
{
"start": 10055,
"end": 10464
}
|
class ____ {
void f() {
String[] xs = "".split("c");
xs[0] = null;
System.err.println(xs[0]);
}
}
""")
.addOutputLines(
"Test.java",
"""
import com.google.common.base.Splitter;
import java.util.ArrayList;
import java.util.List;
|
Test
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/EnvironmentInfo.java
|
{
"start": 1214,
"end": 2486
}
|
class ____ implements ResponseBody {
private static final String FIELD_NAME_JVM_INFO = "jvm";
private static final String FIELD_NAME_CLASSPATH = "classpath";
@JsonProperty(FIELD_NAME_JVM_INFO)
private final JVMInfo jvmInfo;
@JsonProperty(FIELD_NAME_CLASSPATH)
private final List<String> classpath;
@JsonCreator
public EnvironmentInfo(
@JsonProperty(FIELD_NAME_JVM_INFO) JVMInfo jvmInfo,
@JsonProperty(FIELD_NAME_CLASSPATH) List<String> classpath) {
this.jvmInfo = jvmInfo;
this.classpath = classpath;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EnvironmentInfo that = (EnvironmentInfo) o;
return jvmInfo.equals(that.jvmInfo) && classpath.equals(that.classpath);
}
@Override
public int hashCode() {
return Objects.hash(jvmInfo, classpath);
}
public static EnvironmentInfo create() {
return new EnvironmentInfo(
JVMInfo.create(), Arrays.asList(System.getProperty("java.class.path").split(":")));
}
/** JVM information. */
private static
|
EnvironmentInfo
|
java
|
mapstruct__mapstruct
|
processor/src/main/java/org/mapstruct/ap/internal/util/NativeTypes.java
|
{
"start": 2411,
"end": 7698
}
|
class ____ {
int radix;
String val;
boolean isIntegralType;
boolean isLong;
boolean isFloat;
NumberRepresentation(String in, boolean isIntegralType, boolean isLong, boolean isFloat) {
this.isLong = isLong;
this.isFloat = isFloat;
this.isIntegralType = isIntegralType;
String valWithoutSign;
boolean isNegative = in.startsWith( "-" );
boolean hasSign = PTRN_SIGN.matcher( in ).find();
if ( hasSign ) {
valWithoutSign = in.substring( 1 );
}
else {
valWithoutSign = in;
}
if ( PTRN_HEX.matcher( valWithoutSign ).matches() ) {
// hex
radix = 16;
val = (isNegative ? "-" : "") + valWithoutSign.substring( 2 );
}
else if ( PTRN_BIN.matcher( valWithoutSign ).matches() ) {
// binary
radix = 2;
val = (isNegative ? "-" : "") + valWithoutSign.substring( 2 );
}
else if ( PTRN_OCT.matcher( valWithoutSign ).matches() ) {
// octal
radix = 8;
val = (isNegative ? "-" : "") + valWithoutSign.substring( 1 );
}
else {
// decimal
radix = 10;
val = (isNegative ? "-" : "") + valWithoutSign;
}
}
abstract void parse(String val, int radix);
void validate() {
strip();
parse( val, radix );
}
void strip() {
if ( isIntegralType ) {
removeAndValidateIntegerLiteralSuffix();
removeAndValidateIntegerLiteralUnderscore();
}
else {
removeAndValidateFloatingPointLiteralSuffix();
removeAndValidateFloatingPointLiteralUnderscore();
}
}
/**
* remove java7+ underscores from the input
*/
void removeAndValidateIntegerLiteralUnderscore() {
if ( PTRN_FAULTY_UNDERSCORE_INT.matcher( val ).find() ) {
throw new NumberFormatException( "improperly placed underscores" );
}
else {
val = val.replace( "_", "" );
}
}
/**
* remove java7+ underscores from the input
*/
void removeAndValidateFloatingPointLiteralUnderscore() {
boolean isHex = radix == 16;
if ( PTRN_FAULTY_UNDERSCORE_FLOAT.matcher( val ).find()
|| !isHex && PTRN_FAULTY_DEC_UNDERSCORE_FLOAT.matcher( val ).find()
|| isHex && PTRN_FAULTY_HEX_UNDERSCORE_FLOAT.matcher( val ).find() ) {
throw new NumberFormatException( "improperly placed underscores" );
}
else {
val = val.replace( "_", "" );
}
}
/**
*
*/
void removeAndValidateIntegerLiteralSuffix() {
boolean endsWithLSuffix = PTRN_LONG.matcher( val ).find();
// error handling
if ( endsWithLSuffix && !isLong ) {
throw new NumberFormatException( "L/l not allowed for non-long types" );
}
if ( !endsWithLSuffix && isLong ) {
throw new NumberFormatException( "L/l mandatory for long types" );
}
// remove suffix
if ( endsWithLSuffix ) {
val = val.substring( 0, val.length() - 1 );
}
}
/**
* Double suffix forbidden for float.
*
*/
void removeAndValidateFloatingPointLiteralSuffix() {
boolean endsWithLSuffix = PTRN_LONG.matcher( val ).find();
boolean endsWithFSuffix = PTRN_FLOAT.matcher( val ).find();
boolean endsWithDSuffix = PTRN_DOUBLE.matcher( val ).find();
// error handling
if ( isFloat && endsWithDSuffix ) {
throw new NumberFormatException( "Assigning double to a float" );
}
// remove suffix
if ( endsWithLSuffix || endsWithFSuffix || endsWithDSuffix ) {
val = val.substring( 0, val.length() - 1 );
}
}
boolean floatHasBecomeZero(float parsed) {
if ( parsed == 0f ) {
return floatHasBecomeZero();
}
else {
return false;
}
}
boolean doubleHasBecomeZero(double parsed) {
if ( parsed == 0d ) {
return floatHasBecomeZero();
}
else {
return false;
}
}
private boolean floatHasBecomeZero() {
if ( radix == 10 ) {
// decimal, should be at least some number before exponent (eE) unequal to 0.
return PTRN_FLOAT_DEC_ZERO.matcher( val ).matches();
}
else {
// hex, should be at least some number before exponent (pP) unequal to 0.
return PTRN_FLOAT_HEX_ZERO.matcher( val ).matches();
}
}
}
private static
|
NumberRepresentation
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/sql/results/jdbc/internal/StandardJdbcValuesMapping.java
|
{
"start": 1703,
"end": 5127
}
|
class ____ implements JdbcValuesMapping {
private final List<SqlSelection> sqlSelections;
private final List<DomainResult<?>> domainResults;
private final boolean needsResolve;
private final int[] valueIndexesToCacheIndexes;
// Is only meaningful if valueIndexesToCacheIndexes is not null
// Contains the size of the row to cache, or if the value is negative,
// represents the inverted index of the single value to cache
private final int rowToCacheSize;
private JdbcValuesMappingResolutionImpl resolution;
public StandardJdbcValuesMapping(
List<SqlSelection> sqlSelections,
List<DomainResult<?>> domainResults) {
this.sqlSelections = sqlSelections;
this.domainResults = domainResults;
final int rowSize = sqlSelections.size();
final BitSet valueIndexesToCache = new BitSet( rowSize );
for ( DomainResult<?> domainResult : domainResults ) {
domainResult.collectValueIndexesToCache( valueIndexesToCache );
}
final int[] valueIndexesToCacheIndexes = new int[rowSize];
int cacheIndex = 0;
boolean needsResolve = false;
for ( int i = 0; i < valueIndexesToCacheIndexes.length; i++ ) {
final SqlSelection sqlSelection = sqlSelections.get( i );
needsResolve = needsResolve
|| sqlSelection instanceof SqlSelectionImpl selection && selection.needsResolve();
if ( valueIndexesToCache.get( i ) ) {
valueIndexesToCacheIndexes[i] = cacheIndex++;
}
else {
valueIndexesToCacheIndexes[i] = -1;
}
}
this.needsResolve = needsResolve;
this.valueIndexesToCacheIndexes = cacheIndex == 0 ? EMPTY_INT_ARRAY : valueIndexesToCacheIndexes;
this.rowToCacheSize = cacheIndex;
}
@Override
public List<SqlSelection> getSqlSelections() {
return sqlSelections;
}
@Override
public List<DomainResult<?>> getDomainResults() {
return domainResults;
}
@Override
public int getRowSize() {
return sqlSelections.size();
}
@Override
public int[] getValueIndexesToCacheIndexes() {
return valueIndexesToCacheIndexes;
}
@Override
public int getRowToCacheSize() {
return rowToCacheSize;
}
public boolean needsResolve() {
return needsResolve;
}
@Override
public JdbcValuesMappingResolution resolveAssemblers(SessionFactoryImplementor sessionFactory) {
if ( resolution != null ) {
return resolution;
}
else {
final AssemblerCreationStateImpl creationState =
new AssemblerCreationStateImpl( this,
sessionFactory.getSqlTranslationEngine() );
final var domainResultAssemblers = resolveAssemblers( creationState );
creationState.initializerMap.logInitializers();
resolution = new JdbcValuesMappingResolutionImpl(
domainResultAssemblers,
creationState.hasCollectionInitializers,
creationState.initializerListBuilder.build()
);
return resolution;
}
}
private DomainResultAssembler<?>[] resolveAssemblers(AssemblerCreationState creationState) {
final int size = domainResults.size();
final List<DomainResultAssembler<?>> assemblers = arrayList( size );
for ( int i = 0; i < size; i++ ) {
final DomainResultAssembler<?> resultAssembler =
domainResults.get( i )
.createResultAssembler( null, creationState );
assemblers.add( resultAssembler );
}
return assemblers.toArray( new DomainResultAssembler[0] );
}
@Override
public LockMode determineDefaultLockMode(String alias, LockMode defaultLockMode) {
return defaultLockMode;
}
private static
|
StandardJdbcValuesMapping
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/root/ApplicationTest.java
|
{
"start": 6368,
"end": 6706
}
|
class ____ implements ContainerResponseFilter {
@Override
public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext)
throws IOException {
responseContext.getHeaders().add("X-RF-4", "Value");
}
}
@Provider
public static
|
ResponseFilter4
|
java
|
apache__camel
|
core/camel-main/src/main/java/org/apache/camel/main/ProfileConfigurer.java
|
{
"start": 1166,
"end": 3880
}
|
class ____ {
protected static final Logger LOG = LoggerFactory.getLogger(ProfileConfigurer.class);
/**
* Configures camel-main to run in given profile
*
* @param camelContext the camel context
* @param profile the profile
* @param config the main configuration
*/
public static void configureMain(CamelContext camelContext, String profile, MainConfigurationProperties config) {
if (profile == null || profile.isBlank()) {
// no profile is active
return;
}
if ("dev".equals(profile)) {
// make tracing at least standby so we can use it in dev-mode
boolean enabled = config.tracerConfig().isEnabled();
if (!enabled) {
config.tracerConfig().withStandby(true);
}
}
configureCommon(camelContext, profile, config);
}
/**
* Configures camel in general (standalone, quarkus, spring-boot etc) to run in given profile
*
* @param camelContext the camel context
* @param profile the profile
* @param config the core configuration
*/
public static void configureCommon(CamelContext camelContext, String profile, DefaultConfigurationProperties<?> config) {
camelContext.getCamelContextExtension().setProfile(profile);
if (profile == null || profile.isBlank()) {
// no profile is active
return;
}
if ("dev".equals(profile)) {
// always enable developer console as it is needed by camel-cli-connector
config.setDevConsoleEnabled(true);
// and enable a bunch of other stuff that gives more details for developers
config.setCamelEventsTimestampEnabled(true);
config.setLoadHealthChecks(true);
config.setSourceLocationEnabled(true);
config.setModeline(true);
config.setLoadStatisticsEnabled(true);
config.setMessageHistory(true);
config.setInflightRepositoryBrowseEnabled(true);
config.setEndpointRuntimeStatisticsEnabled(true);
config.setJmxManagementStatisticsLevel(ManagementStatisticsLevel.Extended);
config.setJmxUpdateRouteEnabled(true);
config.setShutdownLogInflightExchangesOnTimeout(false);
config.setShutdownTimeout(10);
config.setStartupRecorder("backlog");
}
if ("prod".equals(profile)) {
profile = "production"; // use nicer name
}
// no special configuration for other kind of profiles
LOG.info("The application is starting with profile: {}", profile);
}
}
|
ProfileConfigurer
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/example/HelloWorld.java
|
{
"start": 1658,
"end": 2028
}
|
class ____ extends HtmlPage {
@Override protected void render(Page.HTML<__> html) {
html. // produces valid html 4.01 strict
title($("title")).
p("#hello-for-css").
__($("title")).__().__();
}
}
public static void main(String[] args) {
WebApps.$for(new HelloWorld()).at(8888).inDevMode().start().joinThread();
}
}
|
HelloView
|
java
|
google__truth
|
core/src/main/java/com/google/common/truth/ActualValueInference.java
|
{
"start": 41516,
"end": 41636
}
|
class ____ represent a frame. */
@AutoValue
@CopyAnnotations
@GwtIncompatible
@J2ktIncompatible
abstract static
|
to
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/convert/ApplicationConversionService.java
|
{
"start": 19375,
"end": 20308
}
|
class ____ extends BeanAdapter<Parser<?>> {
ParserBeanAdapter(Parser<?> bean, ResolvableType beanType) {
super(bean, beanType);
}
@Override
protected ResolvableTypePair getResolvableTypePair(ResolvableType[] generics) {
return new ResolvableTypePair(STRING, generics[0]);
}
@Override
public @Nullable Object convert(@Nullable Object source, TypeDescriptor sourceType, TypeDescriptor targetType) {
String text = (String) source;
return (!StringUtils.hasText(text)) ? null : parse(text);
}
private Object parse(String text) {
try {
return bean().parse(text, LocaleContextHolder.getLocale());
}
catch (IllegalArgumentException ex) {
throw ex;
}
catch (Throwable ex) {
throw new IllegalArgumentException("Parse attempt failed for value [" + text + "]", ex);
}
}
}
/**
* Adapts a {@link Converter} bean to a {@link GenericConverter}.
*/
static final
|
ParserBeanAdapter
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/IgnoreProvisionKeyWildcardsTest.java
|
{
"start": 10929,
"end": 11250
}
|
interface ____ {",
" @Provides @IntoSet static Foo<? extends Bar> setExtends() { return null; }",
" @Multibinds Set<Foo<Bar>> mulitbindSet();",
"}"),
/* kotlinComponentClass = */
NEW_LINES.join(
"@Component(modules = [MyModule::class])",
"
|
MyModule
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/type/SqlxmlTypeHandlerTest.java
|
{
"start": 5611,
"end": 5893
}
|
interface ____ {
@Select("select id, content from mbtest.test_sqlxml where id = #{id}")
XmlBean select(Integer id);
@Insert("insert into mbtest.test_sqlxml (id, content) values (#{id}, #{content,jdbcType=SQLXML})")
void insert(XmlBean bean);
}
public static
|
Mapper
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.