language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__dubbo | dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/router/file/FileStateRouterFactory.java | {
"start": 1507,
"end": 3252
} | class ____ implements StateRouterFactory {
public static final String NAME = "file";
private StateRouterFactory routerFactory;
public void setRouterFactory(StateRouterFactory routerFactory) {
this.routerFactory = routerFactory;
}
@Override
public <T> StateRouter<T> getRouter(Class<T> interfaceClass, URL url) {
try {
// Transform File URL into Script Route URL, and Load
// file:///d:/path/to/route.js?router=script ==> script:///d:/path/to/route.js?type=js&rule=<file-content>
String protocol = url.getParameter(
ROUTER_KEY,
ScriptStateRouterFactory.NAME); // Replace original protocol (maybe 'file') with 'script'
String type = null; // Use file suffix to config script type, e.g., js, groovy ...
String path = url.getPath();
if (path != null) {
int i = path.lastIndexOf('.');
if (i > 0) {
type = path.substring(i + 1);
}
}
String rule = IOUtils.read(new FileReader(url.getAbsolutePath()));
// FIXME: this code looks useless
boolean runtime = url.getParameter(RUNTIME_KEY, false);
URL script = URLBuilder.from(url)
.setProtocol(protocol)
.addParameter(TYPE_KEY, type)
.addParameter(RUNTIME_KEY, runtime)
.addParameterAndEncoded(RULE_KEY, rule)
.build();
return routerFactory.getRouter(interfaceClass, script);
} catch (IOException e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
}
| FileStateRouterFactory |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/contexts/singleton/SingletonDestructionTest.java | {
"start": 2382,
"end": 2782
} | class ____ {
static final AtomicInteger createdCounter = new AtomicInteger(0);
static final AtomicInteger destroyedCounter = new AtomicInteger(0);
@PostConstruct
void postConstruct() {
createdCounter.incrementAndGet();
}
@PreDestroy
void preDestroy() {
destroyedCounter.incrementAndGet();
}
}
}
| MySingletonBean |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/support/BoundedAsyncPool.java | {
"start": 1172,
"end": 16061
} | class ____<T> extends BasePool implements AsyncPool<T> {
private static final CompletableFuture<Void> COMPLETED = CompletableFuture.completedFuture(null);
private static final IllegalStateException POOL_SHUTDOWN = unknownStackTrace(
new IllegalStateException("AsyncPool is closed"), BoundedAsyncPool.class, "acquire()");
private static final NoSuchElementException POOL_EXHAUSTED = unknownStackTrace(new NoSuchElementException("Pool exhausted"),
BoundedAsyncPool.class, "acquire()");
private static final IllegalStateException NOT_PART_OF_POOL = unknownStackTrace(
new IllegalStateException("Returned object not currently part of this pool"), BoundedAsyncPool.class, "release()");
public static final CompletableFuture<Object> COMPLETED_FUTURE = CompletableFuture.completedFuture(null);
private final int maxTotal;
private final int maxIdle;
private final int minIdle;
private final AsyncObjectFactory<T> factory;
private final Queue<T> cache;
private final Queue<T> all;
private final AtomicInteger objectCount = new AtomicInteger();
private final AtomicInteger objectsInCreationCount = new AtomicInteger();
private final AtomicInteger idleCount = new AtomicInteger();
private final CompletableFuture<Void> closeFuture = new CompletableFuture<>();
private volatile State state = State.ACTIVE;
/**
* Create a new {@link BoundedAsyncPool} given {@link BasePoolConfig} and {@link AsyncObjectFactory}. The factory creates
* idle objects upon construction and requires {@link #closeAsync() termination} once it's no longer in use.
* <p>
* Please note that pre-initialization cannot be awaited when using this constructor. Please use
* {@link #create(AsyncObjectFactory, BoundedPoolConfig)} instead.
*
* @param factory must not be {@code null}.
* @param poolConfig must not be {@code null}.
*/
public BoundedAsyncPool(AsyncObjectFactory<T> factory, BoundedPoolConfig poolConfig) {
this(factory, poolConfig, true);
}
/**
* Create a new {@link BoundedAsyncPool} given {@link BasePoolConfig} and {@link AsyncObjectFactory}.
*
* @param factory must not be {@code null}.
* @param poolConfig must not be {@code null}.
* @param createIdle whether to pre-initialize the pool.
* @since 5.3.2
*/
BoundedAsyncPool(AsyncObjectFactory<T> factory, BoundedPoolConfig poolConfig, boolean createIdle) {
super(poolConfig);
LettuceAssert.notNull(factory, "AsyncObjectFactory must not be null");
this.maxTotal = poolConfig.getMaxTotal();
this.maxIdle = poolConfig.getMaxIdle();
this.minIdle = poolConfig.getMinIdle();
this.factory = factory;
this.cache = new ConcurrentLinkedQueue<>();
this.all = new ConcurrentLinkedQueue<>();
if (createIdle) {
createIdle();
}
}
/**
* Create and initialize {@link BoundedAsyncPool} asynchronously.
*
* @param factory must not be {@code null}.
* @param poolConfig must not be {@code null}.
* @param <T> object type that is managed by the pool.
* @return a {@link CompletionStage} that completes with the {@link BoundedAsyncPool} when created and pre-initialized
* successfully. Completes exceptionally if the pool initialization failed.
* @since 5.3.3
*/
public static <T> CompletionStage<BoundedAsyncPool<T>> create(AsyncObjectFactory<T> factory, BoundedPoolConfig poolConfig) {
BoundedAsyncPool<T> pool = new BoundedAsyncPool<>(factory, poolConfig, false);
CompletableFuture<BoundedAsyncPool<T>> future = new CompletableFuture<>();
pool.createIdle().whenComplete((v, throwable) -> {
if (throwable == null) {
future.complete(pool);
} else {
pool.closeAsync().whenComplete((v1, throwable1) -> {
future.completeExceptionally(new RedisConnectionException("Could not create pool", throwable));
});
}
});
return future;
}
@SuppressWarnings("rawtypes")
CompletableFuture<Void> createIdle() {
int potentialIdle = getMinIdle() - getIdle();
if (potentialIdle <= 0 || !isPoolActive()) {
return (CompletableFuture) COMPLETED_FUTURE;
}
long totalLimit = getAvailableCapacity();
int toCreate = Math.toIntExact(Math.min(Math.max(0, totalLimit), potentialIdle));
CompletableFuture[] futures = new CompletableFuture[toCreate];
for (int i = 0; i < toCreate; i++) {
if (getAvailableCapacity() <= 0) {
futures[i] = COMPLETED_FUTURE;
continue;
}
CompletableFuture<T> future = new CompletableFuture<>();
futures[i] = future;
makeObject0(future);
future.thenAccept(it -> {
if (isPoolActive()) {
idleCount.incrementAndGet();
cache.add(it);
} else {
factory.destroy(it);
}
});
}
return CompletableFuture.allOf(futures);
}
private long getAvailableCapacity() {
return getActualMaxTotal() - (getCreationInProgress() + getObjectCount());
}
@Override
public CompletableFuture<T> acquire() {
T object = cache.poll();
CompletableFuture<T> res = new CompletableFuture<>();
acquire0(object, res);
return res;
}
private void acquire0(T object, CompletableFuture<T> res) {
if (object != null) {
idleCount.decrementAndGet();
if (isTestOnAcquire()) {
factory.validate(object).whenComplete((state, throwable) -> {
if (!isPoolActive()) {
res.completeExceptionally(POOL_SHUTDOWN);
return;
}
if (state != null && state) {
completeAcquire(res, object);
return;
}
destroy0(object).whenComplete((aVoid, th) -> makeObject0(res));
});
return;
}
if (isPoolActive()) {
completeAcquire(res, object);
} else {
res.completeExceptionally(POOL_SHUTDOWN);
}
createIdle();
return;
}
long objects = (long) (getObjectCount() + getCreationInProgress());
if ((long) getActualMaxTotal() >= (objects + 1)) {
makeObject0(res);
return;
}
res.completeExceptionally(POOL_EXHAUSTED);
}
private void makeObject0(CompletableFuture<T> res) {
long total = getObjectCount();
long creations = objectsInCreationCount.incrementAndGet();
if (((long) getActualMaxTotal()) < total + creations) {
res.completeExceptionally(POOL_EXHAUSTED);
objectsInCreationCount.decrementAndGet();
return;
}
factory.create().whenComplete((o, t) -> {
if (t != null) {
objectsInCreationCount.decrementAndGet();
res.completeExceptionally(new IllegalStateException("Cannot allocate object", t));
return;
}
if (isTestOnCreate()) {
factory.validate(o).whenComplete((state, throwable) -> {
try {
if (isPoolActive() && state != null && state) {
objectCount.incrementAndGet();
all.add(o);
completeAcquire(res, o);
return;
}
if (!isPoolActive()) {
rejectPoolClosed(res, o);
return;
}
factory.destroy(o).whenComplete((v, th) -> res.completeExceptionally(
new IllegalStateException("Cannot allocate object: Validation failed", throwable)));
} catch (Exception e) {
factory.destroy(o).whenComplete((v, th) -> res.completeExceptionally(
new IllegalStateException("Cannot allocate object: Validation failed", throwable)));
} finally {
objectsInCreationCount.decrementAndGet();
}
});
return;
}
try {
if (isPoolActive()) {
objectCount.incrementAndGet();
all.add(o);
completeAcquire(res, o);
} else {
rejectPoolClosed(res, o);
}
} catch (Exception e) {
objectCount.decrementAndGet();
all.remove(o);
factory.destroy(o).whenComplete((v, th) -> res.completeExceptionally(e));
} finally {
objectsInCreationCount.decrementAndGet();
}
});
}
private void completeAcquire(CompletableFuture<T> res, T o) {
if (res.isCancelled()) {
return0(o);
} else {
res.complete(o);
}
}
private void rejectPoolClosed(CompletableFuture<T> res, T o) {
factory.destroy(o);
res.completeExceptionally(POOL_SHUTDOWN);
}
@Override
public CompletableFuture<Void> release(T object) {
if (!all.contains(object)) {
return Futures.failed(NOT_PART_OF_POOL);
}
if (idleCount.get() >= getActualMaxIdle()) {
return destroy0(object);
}
if (isTestOnRelease()) {
CompletableFuture<Boolean> valid = factory.validate(object);
CompletableFuture<Void> res = new CompletableFuture<>();
valid.whenComplete((state1, throwable) -> {
if (state1 != null && state1) {
return0(object).whenComplete((x, y) -> res.complete(null));
} else {
destroy0(object).whenComplete((x, y) -> res.complete(null));
}
});
return res;
}
return return0(object);
}
private CompletableFuture<Void> return0(T object) {
int idleCount = this.idleCount.incrementAndGet();
if (idleCount > getActualMaxIdle()) {
this.idleCount.decrementAndGet();
return destroy0(object);
}
cache.add(object);
return COMPLETED;
}
private CompletableFuture<Void> destroy0(T object) {
objectCount.decrementAndGet();
all.remove(object);
return factory.destroy(object);
}
@Override
public void clear() {
clearAsync().join();
}
@Override
public CompletableFuture<Void> clearAsync() {
List<CompletableFuture<Void>> futures = new ArrayList<>(all.size());
T cached;
while ((cached = cache.poll()) != null) {
idleCount.decrementAndGet();
objectCount.decrementAndGet();
all.remove(cached);
futures.add(factory.destroy(cached));
}
return Futures.allOf(futures);
}
@Override
public void close() {
closeAsync().join();
}
@Override
public CompletableFuture<Void> closeAsync() {
if (!isPoolActive()) {
return closeFuture;
}
state = State.TERMINATING;
CompletableFuture<Void> clear = clearAsync();
state = State.TERMINATED;
clear.whenComplete((aVoid, throwable) -> {
if (throwable != null) {
closeFuture.completeExceptionally(throwable);
} else {
closeFuture.complete(aVoid);
}
});
return closeFuture;
}
/**
* Returns the maximum number of objects that can be allocated by the pool (checked out to clients, or idle awaiting
* checkout) at a given time. When negative, there is no limit to the number of objects that can be managed by the pool at
* one time.
*
* @return the cap on the total number of object instances managed by the pool. Unlimited max objects are capped at
* {@link Integer#MAX_VALUE Integer#MAX_VALUE}.
* @see BoundedPoolConfig#getMaxTotal()
*/
public int getMaxTotal() {
return maxTotal;
}
private int getActualMaxTotal() {
return maxOrActual(maxTotal);
}
/**
* Returns the cap on the number of "idle" instances in the pool. If {@code maxIdle} is set too low on heavily loaded
* systems it is possible you will see objects being destroyed and almost immediately new objects being created. This is a
* result of the active threads momentarily returning objects faster than they are requesting them, causing the number of
* idle objects to rise above maxIdle. The best value for maxIdle for heavily loaded system will vary but the default is a
* good starting point.
*
* @return the maximum number of "idle" instances that can be held in the pool. Unlimited idle objects are capped at
* {@link Integer#MAX_VALUE Integer#MAX_VALUE}.
* @see BoundedPoolConfig#getMaxIdle()
*/
public int getMaxIdle() {
return maxIdle;
}
private int getActualMaxIdle() {
return maxOrActual(maxIdle);
}
/**
* Returns the target for the minimum number of idle objects to maintain in the pool. If this is the case, an attempt is
* made to ensure that the pool has the required minimum number of instances during idle object eviction runs.
* <p>
* If the configured value of minIdle is greater than the configured value for {@code maxIdle} then the value of
* {@code maxIdle} will be used instead.
*
* @return The minimum number of objects.
* @see BoundedPoolConfig#getMinIdle()
*/
public int getMinIdle() {
int maxIdleSave = getActualMaxIdle();
if (this.minIdle > maxIdleSave) {
return maxIdleSave;
} else {
return minIdle;
}
}
public int getIdle() {
return idleCount.get();
}
public int getObjectCount() {
return objectCount.get();
}
public int getCreationInProgress() {
return objectsInCreationCount.get();
}
private boolean isPoolActive() {
return this.state == State.ACTIVE;
}
private static int maxOrActual(int count) {
return count > -1 ? count : Integer.MAX_VALUE;
}
| BoundedAsyncPool |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/model/FixedHandlerChainCustomizer.java | {
"start": 241,
"end": 1272
} | class ____ implements HandlerChainCustomizer {
private ServerRestHandler handler;
private Phase phase;
public FixedHandlerChainCustomizer(ServerRestHandler handler, Phase phase) {
this.handler = handler;
this.phase = phase;
}
public FixedHandlerChainCustomizer() {
}
@Override
public List<ServerRestHandler> handlers(Phase phase, ResourceClass resourceClass,
ServerResourceMethod serverResourceMethod) {
if (this.phase == phase) {
return Collections.singletonList(handler);
}
return Collections.emptyList();
}
public ServerRestHandler getHandler() {
return handler;
}
public FixedHandlerChainCustomizer setHandler(ServerRestHandler handler) {
this.handler = handler;
return this;
}
public Phase getPhase() {
return phase;
}
public FixedHandlerChainCustomizer setPhase(Phase phase) {
this.phase = phase;
return this;
}
}
| FixedHandlerChainCustomizer |
java | quarkusio__quarkus | extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/health/InfinispanHealthCheck.java | {
"start": 2740,
"end": 3047
} | class ____ {
String state;
String servers;
int cachesCount;
public HealthInfo(String state, String servers, int cachesCount) {
this.state = state;
this.servers = servers;
this.cachesCount = cachesCount;
}
}
private | HealthInfo |
java | playframework__playframework | documentation/manual/working/javaGuide/main/async/code/javaguide/async/JavaWebSockets.java | {
"start": 1089,
"end": 1397
} | class ____ extends AbstractActor {
@Override
public Receive createReceive() {
return receiveBuilder()
// match() messages here
.build();
}
{
// #actor-stop
self().tell(PoisonPill.getInstance(), self());
// #actor-stop
}
}
public static | Actor2 |
java | apache__rocketmq | remoting/src/main/java/org/apache/rocketmq/remoting/rpc/RpcResponse.java | {
"start": 910,
"end": 1929
} | class ____ {
private int code;
private CommandCustomHeader header;
private Object body;
public RpcException exception;
public RpcResponse() {
}
public RpcResponse(int code, CommandCustomHeader header, Object body) {
this.code = code;
this.header = header;
this.body = body;
}
public RpcResponse(RpcException rpcException) {
this.code = rpcException.getErrorCode();
this.exception = rpcException;
}
public int getCode() {
return code;
}
public CommandCustomHeader getHeader() {
return header;
}
public void setHeader(CommandCustomHeader header) {
this.header = header;
}
public Object getBody() {
return body;
}
public void setBody(Object body) {
this.body = body;
}
public RpcException getException() {
return exception;
}
public void setException(RpcException exception) {
this.exception = exception;
}
}
| RpcResponse |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AStore.java | {
"start": 3359,
"end": 3615
} | interface ____ the Hadoop {@link Service} interface
* and follows its lifecycle: it MUST NOT be used until
* {@link Service#init(Configuration)} has been invoked.
*/
@InterfaceAudience.LimitedPrivate("Extensions")
@InterfaceStability.Unstable
public | extends |
java | quarkusio__quarkus | extensions/websockets-next/runtime/src/main/java/io/quarkus/websockets/next/BasicWebSocketConnector.java | {
"start": 1677,
"end": 7023
} | interface ____ {
/**
* Obtains a new basic connector. An alternative to {@code @Inject BasicWebSocketConnector}.
*
* @return a new basic connector
*/
static BasicWebSocketConnector create() {
return Arc.container().instance(BasicWebSocketConnector.class).get();
}
/**
* Set the base URI.
*
* @param uri
* @return self
*/
BasicWebSocketConnector baseUri(URI uri);
/**
* Set the base URI.
*
* @param baseUri
* @return self
*/
default BasicWebSocketConnector baseUri(String baseUri) {
return baseUri(URI.create(baseUri));
}
/**
* Set the name of the {@link TlsConfiguration}.
*
* @param tlsConfigurationName
* @return self
* @see io.quarkus.tls.TlsConfigurationRegistry#get(String)
*/
BasicWebSocketConnector tlsConfigurationName(String tlsConfigurationName);
/**
* Set the path that should be appended to the path of the URI set by {@link #baseUri(URI)}.
* <p>
* The path may contain path parameters as defined by {@link WebSocketClient#path()}. In this case, the
* {@link #pathParam(String, String)} method must be used to pass path param values.
*
* @param path
* @return self
*/
BasicWebSocketConnector path(String path);
/**
* Set the path param.
* <p>
* The value is encoded using {@link URLEncoder#encode(String, java.nio.charset.Charset)} before it's used to build the
* target URI.
*
* @param name
* @param value
* @return self
* @throws IllegalArgumentException If the path set by {@link #path(String)} does not contain a parameter with the given
* name
*/
BasicWebSocketConnector pathParam(String name, String value);
/**
* Add a header used during the initial handshake request.
*
* @param name
* @param value
* @return self
* @see HandshakeRequest
*/
BasicWebSocketConnector addHeader(String name, String value);
/**
* Add the subprotocol.
*
* @param name
* @param value
* @return self
*/
BasicWebSocketConnector addSubprotocol(String value);
/**
* Add a value to the connection user data.
*
* @param key
* @param value
* @param <VALUE>
* @return self
* @see UserData#put(TypedKey, Object)
* @see WebSocketClientConnection#userData()
*/
<VALUE> BasicWebSocketConnector userData(TypedKey<VALUE> key, VALUE value);
/**
* Set the execution model for callback handlers.
* <p>
* By default, {@link ExecutionModel#BLOCKING} is used.
*
* @return self
* @see #onTextMessage(BiConsumer)
* @see #onBinaryMessage(BiConsumer)
* @see #onPong(BiConsumer)
* @see #onOpen(Consumer)
* @see #onClose(BiConsumer)
* @see #onError(BiConsumer)
*/
BasicWebSocketConnector executionModel(ExecutionModel model);
/**
* Set a callback to be invoked when a connection to the server is open.
*
* @param consumer
* @return self
* @see #executionModel(ExecutionModel)
*/
BasicWebSocketConnector onOpen(Consumer<WebSocketClientConnection> consumer);
/**
* Set a callback to be invoked when a text message is received from the server.
*
* @param consumer
* @return self
* @see #executionModel(ExecutionModel)
*/
BasicWebSocketConnector onTextMessage(BiConsumer<WebSocketClientConnection, String> consumer);
/**
* Set a callback to be invoked when a binary message is received from the server.
*
* @param consumer
* @return self
* @see #executionModel(ExecutionModel)
*/
BasicWebSocketConnector onBinaryMessage(BiConsumer<WebSocketClientConnection, Buffer> consumer);
/**
* Set a callback to be invoked when a ping message is received from the server.
*
* @param consumer
* @return self
* @see #executionModel(ExecutionModel)
*/
BasicWebSocketConnector onPing(BiConsumer<WebSocketClientConnection, Buffer> consumer);
/**
* Set a callback to be invoked when a pong message is received from the server.
*
* @param consumer
* @return self
* @see #executionModel(ExecutionModel)
*/
BasicWebSocketConnector onPong(BiConsumer<WebSocketClientConnection, Buffer> consumer);
/**
* Set a callback to be invoked when a connection to the server is closed.
*
* @param consumer
* @return self
* @see #executionModel(ExecutionModel)
*/
BasicWebSocketConnector onClose(BiConsumer<WebSocketClientConnection, CloseReason> consumer);
/**
* Set a callback to be invoked when an error occurs.
*
* @param consumer
* @return self
* @see #executionModel(ExecutionModel)
*/
BasicWebSocketConnector onError(BiConsumer<WebSocketClientConnection, Throwable> consumer);
/**
*
* @return a new {@link Uni} with a {@link WebSocketClientConnection} item
*/
@CheckReturnValue
Uni<WebSocketClientConnection> connect();
/**
*
* @return the client connection
*/
default WebSocketClientConnection connectAndAwait() {
return connect().await().indefinitely();
}
| BasicWebSocketConnector |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/lazyonetoone/Person.java | {
"start": 178,
"end": 547
} | class ____ {
private String name;
private Employee employee;
Person() {}
public Person(String name) {
this.name = name;
}
public Employee getEmployee() {
return employee;
}
public void setEmployee(Employee employee) {
this.employee = employee;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| Person |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java | {
"start": 1816,
"end": 8440
} | class ____ implements FetchSubPhase {
private static final List<FieldAndFormat> DEFAULT_METADATA_FIELDS = List.of(
new FieldAndFormat(IgnoredFieldMapper.NAME, null),
new FieldAndFormat(RoutingFieldMapper.NAME, null),
// will only be fetched when mapped (older archived indices)
new FieldAndFormat(LegacyTypeFieldMapper.NAME, null)
);
@Override
public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) {
final FetchFieldsContext fetchFieldsContext = fetchContext.fetchFieldsContext();
final StoredFieldsContext storedFieldsContext = fetchContext.storedFieldsContext();
boolean fetchStoredFields = storedFieldsContext != null && storedFieldsContext.fetchFields();
if (fetchFieldsContext == null && fetchStoredFields == false) {
return null;
}
// NOTE: FieldFetcher for non-metadata fields, as well as `_id` and `_source`.
// We need to retain `_id` and `_source` here to correctly populate the `StoredFieldSpecs` created by the
// `FieldFetcher` constructor.
final SearchExecutionContext searchExecutionContext = fetchContext.getSearchExecutionContext();
final FieldFetcher fieldFetcher = (fetchFieldsContext == null
|| fetchFieldsContext.fields() == null
|| fetchFieldsContext.fields().isEmpty())
? null
: FieldFetcher.create(
searchExecutionContext,
fetchFieldsContext.fields()
.stream()
.filter(
fieldAndFormat -> (searchExecutionContext.isMetadataField(fieldAndFormat.field) == false
|| searchExecutionContext.getFieldType(fieldAndFormat.field).isStored() == false
|| IdFieldMapper.NAME.equals(fieldAndFormat.field)
|| SourceFieldMapper.NAME.equals(fieldAndFormat.field))
)
.toList()
);
// NOTE: Collect stored metadata fields requested via `fields` (in FetchFieldsContext) like for instance the _ignored source field
final Set<FieldAndFormat> fetchContextMetadataFields = new HashSet<>();
if (fetchFieldsContext != null && fetchFieldsContext.fields() != null && fetchFieldsContext.fields().isEmpty() == false) {
for (final FieldAndFormat fieldAndFormat : fetchFieldsContext.fields()) {
// NOTE: _id and _source are always retrieved anyway, no need to do it explicitly. See FieldsVisitor.
if (SourceFieldMapper.NAME.equals(fieldAndFormat.field) || IdFieldMapper.NAME.equals(fieldAndFormat.field)) {
continue;
}
if (searchExecutionContext.isMetadataField(fieldAndFormat.field)
&& searchExecutionContext.getFieldType(fieldAndFormat.field).isStored()) {
fetchContextMetadataFields.add(fieldAndFormat);
}
}
}
final FieldFetcher metadataFieldFetcher;
if (storedFieldsContext != null
&& storedFieldsContext.fieldNames() != null
&& storedFieldsContext.fieldNames().isEmpty() == false) {
final Set<FieldAndFormat> metadataFields = new HashSet<>(DEFAULT_METADATA_FIELDS);
for (final String storedField : storedFieldsContext.fieldNames()) {
final Set<String> matchingFieldNames = searchExecutionContext.getMatchingFieldNames(storedField);
for (final String matchingFieldName : matchingFieldNames) {
if (SourceFieldMapper.NAME.equals(matchingFieldName) || IdFieldMapper.NAME.equals(matchingFieldName)) {
continue;
}
final MappedFieldType fieldType = searchExecutionContext.getFieldType(matchingFieldName);
// NOTE: Exclude _ignored_source when requested via wildcard '*'
if (matchingFieldName.equals(IgnoredSourceFieldMapper.NAME) && Regex.isSimpleMatchPattern(storedField)) {
continue;
}
// NOTE: checking if the field is stored is required for backward compatibility reasons and to make
// sure we also handle here stored fields requested via `stored_fields`, which was previously a
// responsibility of StoredFieldsPhase.
if (searchExecutionContext.isMetadataField(matchingFieldName) && fieldType.isStored()) {
metadataFields.add(new FieldAndFormat(matchingFieldName, null));
}
}
}
// NOTE: Include also metadata stored fields requested via `fields`
metadataFields.addAll(fetchContextMetadataFields);
metadataFieldFetcher = FieldFetcher.create(searchExecutionContext, metadataFields);
} else {
// NOTE: Include also metadata stored fields requested via `fields`
final Set<FieldAndFormat> allMetadataFields = new HashSet<>(DEFAULT_METADATA_FIELDS);
allMetadataFields.addAll(fetchContextMetadataFields);
metadataFieldFetcher = FieldFetcher.create(searchExecutionContext, allMetadataFields);
}
return new FetchSubPhaseProcessor() {
@Override
public void setNextReader(LeafReaderContext readerContext) {
if (fieldFetcher != null) {
fieldFetcher.setNextReader(readerContext);
}
metadataFieldFetcher.setNextReader(readerContext);
}
@Override
public StoredFieldsSpec storedFieldsSpec() {
if (fieldFetcher != null) {
return metadataFieldFetcher.storedFieldsSpec().merge(fieldFetcher.storedFieldsSpec());
}
return metadataFieldFetcher.storedFieldsSpec();
}
@Override
public void process(HitContext hitContext) throws IOException {
final Map<String, DocumentField> fields = fieldFetcher != null
? fieldFetcher.fetch(hitContext.source(), hitContext.docId())
: Collections.emptyMap();
final Map<String, DocumentField> metadataFields = metadataFieldFetcher.fetch(hitContext.source(), hitContext.docId());
hitContext.hit().addDocumentFields(fields, metadataFields);
}
};
}
}
| FetchFieldsPhase |
java | apache__camel | components/camel-jsonpath/src/test/java/org/apache/camel/jsonpath/easypredicate/EasyJsonPathWithRootSimpleCBRTest.java | {
"start": 1071,
"end": 3161
} | class ____ extends CamelTestSupport {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.choice()
.when().jsonpath("price < ${header.cheap}")
.to("mock:cheap")
.when().jsonpath("price < ${header.average}")
.to("mock:average")
.otherwise()
.to("mock:expensive");
}
};
}
@Test
public void testCheap() throws Exception {
getMockEndpoint("mock:cheap").expectedMessageCount(1);
getMockEndpoint("mock:average").expectedMessageCount(0);
getMockEndpoint("mock:expensive").expectedMessageCount(0);
fluentTemplate.withHeader("cheap", 10).withHeader("average", 30).withBody(new File("src/test/resources/cheap.json"))
.to("direct:start").send();
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testAverage() throws Exception {
getMockEndpoint("mock:cheap").expectedMessageCount(0);
getMockEndpoint("mock:average").expectedMessageCount(1);
getMockEndpoint("mock:expensive").expectedMessageCount(0);
fluentTemplate.withHeader("cheap", 10).withHeader("average", 30).withBody(new File("src/test/resources/average.json"))
.to("direct:start").send();
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testExpensive() throws Exception {
getMockEndpoint("mock:cheap").expectedMessageCount(0);
getMockEndpoint("mock:average").expectedMessageCount(0);
getMockEndpoint("mock:expensive").expectedMessageCount(1);
fluentTemplate.withHeader("cheap", 10).withHeader("average", 30).withBody(new File("src/test/resources/expensive.json"))
.to("direct:start").send();
MockEndpoint.assertIsSatisfied(context);
}
}
| EasyJsonPathWithRootSimpleCBRTest |
java | google__dagger | javatests/artifacts/dagger/build-tests/src/test/java/buildtests/TransitiveSubcomponentQualifierTest.java | {
"start": 4673,
"end": 4858
} | class ____ {",
" @MyQualifier",
" public abstract int getQualifiedInt();",
"",
" public abstract static | MyBaseSubcomponent |
java | google__gson | gson/src/test/java/com/google/gson/internal/bind/JsonTreeWriterTest.java | {
"start": 1098,
"end": 9707
} | class ____ {
@Test
public void testArray() throws IOException {
JsonTreeWriter writer = new JsonTreeWriter();
writer.beginArray();
writer.value(1);
writer.value(2);
writer.value(3);
writer.endArray();
assertThat(writer.get().toString()).isEqualTo("[1,2,3]");
}
@Test
public void testNestedArray() throws IOException {
JsonTreeWriter writer = new JsonTreeWriter();
writer.beginArray();
writer.beginArray();
writer.endArray();
writer.beginArray();
writer.beginArray();
writer.endArray();
writer.endArray();
writer.endArray();
assertThat(writer.get().toString()).isEqualTo("[[],[[]]]");
}
@Test
public void testObject() throws IOException {
JsonTreeWriter writer = new JsonTreeWriter();
writer.beginObject();
writer.name("A").value(1);
writer.name("B").value(2);
writer.endObject();
assertThat(writer.get().toString()).isEqualTo("{\"A\":1,\"B\":2}");
}
@Test
public void testNestedObject() throws IOException {
JsonTreeWriter writer = new JsonTreeWriter();
writer.beginObject();
writer.name("A");
writer.beginObject();
writer.name("B");
writer.beginObject();
writer.endObject();
writer.endObject();
writer.name("C");
writer.beginObject();
writer.endObject();
writer.endObject();
assertThat(writer.get().toString()).isEqualTo("{\"A\":{\"B\":{}},\"C\":{}}");
}
@Test
public void testWriteAfterClose() throws Exception {
JsonTreeWriter writer = new JsonTreeWriter();
writer.setStrictness(Strictness.LENIENT);
writer.beginArray();
writer.value("A");
writer.endArray();
writer.close();
assertThrows(IllegalStateException.class, () -> writer.beginArray());
}
@Test
public void testPrematureClose() throws Exception {
JsonTreeWriter writer = new JsonTreeWriter();
writer.setStrictness(Strictness.LENIENT);
writer.beginArray();
var e = assertThrows(IOException.class, () -> writer.close());
assertThat(e).hasMessageThat().isEqualTo("Incomplete document");
}
@Test
public void testNameAsTopLevelValue() throws IOException {
JsonTreeWriter writer = new JsonTreeWriter();
IllegalStateException e = assertThrows(IllegalStateException.class, () -> writer.name("hello"));
assertThat(e).hasMessageThat().isEqualTo("Did not expect a name");
writer.value(12);
writer.close();
e = assertThrows(IllegalStateException.class, () -> writer.name("hello"));
assertThat(e).hasMessageThat().isEqualTo("Please begin an object before writing a name.");
}
@Test
public void testNameInArray() throws IOException {
JsonTreeWriter writer = new JsonTreeWriter();
writer.beginArray();
IllegalStateException e = assertThrows(IllegalStateException.class, () -> writer.name("hello"));
assertThat(e).hasMessageThat().isEqualTo("Please begin an object before writing a name.");
writer.value(12);
e = assertThrows(IllegalStateException.class, () -> writer.name("hello"));
assertThat(e).hasMessageThat().isEqualTo("Please begin an object before writing a name.");
writer.endArray();
assertThat(writer.get().toString()).isEqualTo("[12]");
}
@Test
public void testTwoNames() throws IOException {
JsonTreeWriter writer = new JsonTreeWriter();
writer.beginObject();
writer.name("a");
IllegalStateException e = assertThrows(IllegalStateException.class, () -> writer.name("a"));
assertThat(e).hasMessageThat().isEqualTo("Did not expect a name");
}
@Test
public void testSerializeNullsFalse() throws IOException {
JsonTreeWriter writer = new JsonTreeWriter();
writer.setSerializeNulls(false);
writer.beginObject();
writer.name("A");
writer.nullValue();
writer.endObject();
assertThat(writer.get().toString()).isEqualTo("{}");
}
@Test
public void testSerializeNullsTrue() throws IOException {
JsonTreeWriter writer = new JsonTreeWriter();
writer.setSerializeNulls(true);
writer.beginObject();
writer.name("A");
writer.nullValue();
writer.endObject();
assertThat(writer.get().toString()).isEqualTo("{\"A\":null}");
}
@Test
public void testEmptyWriter() {
JsonTreeWriter writer = new JsonTreeWriter();
assertThat(writer.get()).isEqualTo(JsonNull.INSTANCE);
}
@Test
public void testBeginArray() throws Exception {
JsonTreeWriter writer = new JsonTreeWriter();
assertThat(writer.beginArray()).isEqualTo(writer);
}
@Test
public void testBeginObject() throws Exception {
JsonTreeWriter writer = new JsonTreeWriter();
assertThat(writer.beginObject()).isEqualTo(writer);
}
@Test
public void testValueString() throws Exception {
JsonTreeWriter writer = new JsonTreeWriter();
String n = "as";
assertThat(writer.value(n)).isEqualTo(writer);
}
@Test
public void testBoolValue() throws Exception {
JsonTreeWriter writer = new JsonTreeWriter();
boolean bool = true;
assertThat(writer.value(bool)).isEqualTo(writer);
}
@Test
public void testBoolMaisValue() throws Exception {
JsonTreeWriter writer = new JsonTreeWriter();
Boolean bool = true;
assertThat(writer.value(bool)).isEqualTo(writer);
}
@Test
public void testLenientNansAndInfinities() throws IOException {
JsonTreeWriter writer = new JsonTreeWriter();
writer.setStrictness(Strictness.LENIENT);
writer.beginArray();
writer.value(Float.NaN);
writer.value(Float.NEGATIVE_INFINITY);
writer.value(Float.POSITIVE_INFINITY);
writer.value(Double.NaN);
writer.value(Double.NEGATIVE_INFINITY);
writer.value(Double.POSITIVE_INFINITY);
writer.endArray();
assertThat(writer.get().toString())
.isEqualTo("[NaN,-Infinity,Infinity,NaN,-Infinity,Infinity]");
}
@Test
public void testStrictNansAndInfinities() throws IOException {
JsonTreeWriter writer = new JsonTreeWriter();
writer.setStrictness(Strictness.LEGACY_STRICT);
writer.beginArray();
assertThrows(IllegalArgumentException.class, () -> writer.value(Float.NaN));
assertThrows(IllegalArgumentException.class, () -> writer.value(Float.NEGATIVE_INFINITY));
assertThrows(IllegalArgumentException.class, () -> writer.value(Float.POSITIVE_INFINITY));
assertThrows(IllegalArgumentException.class, () -> writer.value(Double.NaN));
assertThrows(IllegalArgumentException.class, () -> writer.value(Double.NEGATIVE_INFINITY));
assertThrows(IllegalArgumentException.class, () -> writer.value(Double.POSITIVE_INFINITY));
}
@Test
public void testStrictBoxedNansAndInfinities() throws IOException {
JsonTreeWriter writer = new JsonTreeWriter();
writer.setStrictness(Strictness.LEGACY_STRICT);
writer.beginArray();
assertThrows(IllegalArgumentException.class, () -> writer.value(Float.valueOf(Float.NaN)));
assertThrows(
IllegalArgumentException.class, () -> writer.value(Float.valueOf(Float.NEGATIVE_INFINITY)));
assertThrows(
IllegalArgumentException.class, () -> writer.value(Float.valueOf(Float.POSITIVE_INFINITY)));
assertThrows(IllegalArgumentException.class, () -> writer.value(Double.valueOf(Double.NaN)));
assertThrows(
IllegalArgumentException.class,
() -> writer.value(Double.valueOf(Double.NEGATIVE_INFINITY)));
assertThrows(
IllegalArgumentException.class,
() -> writer.value(Double.valueOf(Double.POSITIVE_INFINITY)));
}
@Test
public void testJsonValue() throws IOException {
JsonTreeWriter writer = new JsonTreeWriter();
writer.beginArray();
assertThrows(UnsupportedOperationException.class, () -> writer.jsonValue("test"));
}
/**
* {@link JsonTreeWriter} effectively replaces the complete writing logic of {@link JsonWriter} to
* create a {@link JsonElement} tree instead of writing to a {@link Writer}. Therefore all
* relevant methods of {@code JsonWriter} must be overridden.
*/
@Test
public void testOverrides() {
List<String> ignoredMethods =
Arrays.asList(
"setLenient(boolean)",
"isLenient()",
"setStrictness(com.google.gson.Strictness)",
"getStrictness()",
"setIndent(java.lang.String)",
"setHtmlSafe(boolean)",
"isHtmlSafe()",
"setFormattingStyle(com.google.gson.FormattingStyle)",
"getFormattingStyle()",
"setSerializeNulls(boolean)",
"getSerializeNulls()");
MoreAsserts.assertOverridesMethods(JsonWriter.class, JsonTreeWriter.class, ignoredMethods);
}
}
| JsonTreeWriterTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java | {
"start": 1604,
"end": 7185
} | class ____ {
private static final String INCLUDE_CONTEXT_UUID = "include_context_uuid";
static InternalScrollSearchRequest internalScrollSearchRequest(ShardSearchContextId id, SearchScrollRequest request) {
return new InternalScrollSearchRequest(request, id);
}
static String buildScrollId(AtomicArray<? extends SearchPhaseResult> searchPhaseResults) {
final BytesReference bytesReference;
try (var encodedStreamOutput = new BytesStreamOutput()) {
try (var out = new OutputStreamStreamOutput(Base64.getUrlEncoder().wrap(encodedStreamOutput))) {
out.writeString(INCLUDE_CONTEXT_UUID);
out.writeString(
searchPhaseResults.length() == 1 ? ParsedScrollId.QUERY_AND_FETCH_TYPE : ParsedScrollId.QUERY_THEN_FETCH_TYPE
);
out.writeCollection(searchPhaseResults.asList(), (o, searchPhaseResult) -> {
o.writeString(searchPhaseResult.getContextId().getSessionId());
o.writeLong(searchPhaseResult.getContextId().getId());
SearchShardTarget searchShardTarget = searchPhaseResult.getSearchShardTarget();
if (searchShardTarget.getClusterAlias() != null) {
o.writeString(
RemoteClusterAware.buildRemoteIndexName(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId())
);
} else {
o.writeString(searchShardTarget.getNodeId());
}
});
}
bytesReference = encodedStreamOutput.bytes();
} catch (IOException e) {
assert false : e;
throw new UncheckedIOException(e);
}
final BytesRef bytesRef = bytesReference.toBytesRef();
return new String(bytesRef.bytes, bytesRef.offset, bytesRef.length, StandardCharsets.ISO_8859_1);
}
static ParsedScrollId parseScrollId(String scrollId) {
try (
var decodedInputStream = Base64.getUrlDecoder().wrap(new ByteArrayInputStream(scrollId.getBytes(StandardCharsets.ISO_8859_1)));
var in = new InputStreamStreamInput(decodedInputStream)
) {
final boolean includeContextUUID;
final String type;
final String firstChunk = in.readString();
if (INCLUDE_CONTEXT_UUID.equals(firstChunk)) {
includeContextUUID = true;
type = in.readString();
} else {
includeContextUUID = false;
type = firstChunk;
}
final SearchContextIdForNode[] context = in.readArray(
includeContextUUID
? TransportSearchHelper::readSearchContextIdForNodeIncludingContextUUID
: TransportSearchHelper::readSearchContextIdForNodeExcludingContextUUID,
SearchContextIdForNode[]::new
);
if (in.available() > 0) {
throw new IllegalArgumentException("Not all bytes were read");
}
return new ParsedScrollId(type, context);
} catch (Exception e) {
throw new IllegalArgumentException("Cannot parse scroll id", e);
}
}
private static SearchContextIdForNode readSearchContextIdForNodeIncludingContextUUID(StreamInput in) throws IOException {
return innerReadSearchContextIdForNode(in.readString(), in);
}
private static SearchContextIdForNode readSearchContextIdForNodeExcludingContextUUID(StreamInput in) throws IOException {
return innerReadSearchContextIdForNode("", in);
}
private static SearchContextIdForNode innerReadSearchContextIdForNode(String contextUUID, StreamInput in) throws IOException {
long id = in.readLong();
String[] split = RemoteClusterAware.splitIndexName(in.readString());
String clusterAlias = split[0];
String target = split[1];
return new SearchContextIdForNode(clusterAlias, target, new ShardSearchContextId(contextUUID, id));
}
/**
* Using the 'search.check_ccs_compatibility' setting, clients can ask for an early
* check that inspects the incoming request and tries to verify that it can be handled by
* a CCS compliant earlier version, e.g. currently a N-1 version where N is the current minor.
*
* Checking the compatibility involved serializing the request to a stream output that acts like
* it was on the previous minor version. This should e.g. trigger errors for {@link Writeable} parts of
* the requests that were not available in those versions.
*/
public static void checkCCSVersionCompatibility(Writeable writeableRequest) {
try {
writeableRequest.writeTo(new VersionCheckingStreamOutput(TransportVersion.minimumCCSVersion()));
} catch (Exception e) {
// if we cannot serialize, raise this as an error to indicate to the caller that CCS has problems with this request
throw new IllegalArgumentException(
"["
+ writeableRequest.getClass()
+ "] is not compatible with version "
+ TransportVersion.minimumCCSVersion().toReleaseVersion()
+ " and the '"
+ SearchService.CCS_VERSION_CHECK_SETTING.getKey()
+ "' setting is enabled.",
e
);
}
}
private TransportSearchHelper() {}
}
| TransportSearchHelper |
java | spring-projects__spring-framework | spring-orm/src/test/java/org/springframework/orm/jpa/persistenceunit/PersistenceXmlParsingTests.java | {
"start": 1763,
"end": 15543
} | class ____ {
@Test
void testMetaInfCase() throws Exception {
PersistenceUnitReader reader = new PersistenceUnitReader(
new PathMatchingResourcePatternResolver(), new JndiDataSourceLookup());
String resource = "/org/springframework/orm/jpa/META-INF/persistence.xml";
SpringPersistenceUnitInfo[] info = reader.readPersistenceUnitInfos(resource);
assertThat(info).isNotNull();
assertThat(info).hasSize(1);
assertThat(info[0].getPersistenceUnitName()).isEqualTo("OrderManagement");
assertThat(info[0].getJarFileUrls()).containsExactly(
new ClassPathResource("order.jar").getURL(),
new ClassPathResource("order-supplemental.jar").getURL());
assertThat(info[0].excludeUnlistedClasses()).as("Exclude unlisted should default false in 1.0.").isFalse();
}
@Test
void testExample1() {
PersistenceUnitReader reader = new PersistenceUnitReader(
new PathMatchingResourcePatternResolver(), new JndiDataSourceLookup());
String resource = "/org/springframework/orm/jpa/persistence-example1.xml";
SpringPersistenceUnitInfo[] info = reader.readPersistenceUnitInfos(resource);
assertThat(info).isNotNull();
assertThat(info).hasSize(1);
assertThat(info[0].getPersistenceUnitName()).isEqualTo("OrderManagement");
assertThat(info[0].excludeUnlistedClasses()).as("Exclude unlisted should default false in 1.0.").isFalse();
}
@Test
void testExample2() {
PersistenceUnitReader reader = new PersistenceUnitReader(
new PathMatchingResourcePatternResolver(), new JndiDataSourceLookup());
String resource = "/org/springframework/orm/jpa/persistence-example2.xml";
SpringPersistenceUnitInfo[] info = reader.readPersistenceUnitInfos(resource);
assertThat(info).isNotNull();
assertThat(info).hasSize(1);
assertThat(info[0].getPersistenceUnitName()).isEqualTo("OrderManagement2");
assertThat(info[0].getMappingFileNames()).containsExactly("mappings.xml");
assertThat(info[0].getProperties()).isEmpty();
assertThat(info[0].excludeUnlistedClasses()).as("Exclude unlisted should default false in 1.0.").isFalse();
}
@Test
void testExample3() throws Exception {
PersistenceUnitReader reader = new PersistenceUnitReader(
new PathMatchingResourcePatternResolver(), new JndiDataSourceLookup());
String resource = "/org/springframework/orm/jpa/persistence-example3.xml";
SpringPersistenceUnitInfo[] info = reader.readPersistenceUnitInfos(resource);
assertThat(info).isNotNull();
assertThat(info).hasSize(1);
assertThat(info[0].getPersistenceUnitName()).isEqualTo("OrderManagement3");
assertThat(info[0].getJarFileUrls()).containsExactly(
new ClassPathResource("order.jar").getURL(),
new ClassPathResource("order-supplemental.jar").getURL());
assertThat(info[0].getProperties()).isEmpty();
assertThat(info[0].getJtaDataSource()).isNull();
assertThat(info[0].getNonJtaDataSource()).isNull();
assertThat(info[0].excludeUnlistedClasses()).as("Exclude unlisted should default false in 1.0.").isFalse();
}
@Test
void testExample4() throws Exception {
SimpleNamingContextBuilder builder = SimpleNamingContextBuilder.emptyActivatedContextBuilder();
DataSource ds = new DriverManagerDataSource();
builder.bind("java:comp/env/jdbc/MyDB", ds);
PersistenceUnitReader reader = new PersistenceUnitReader(
new PathMatchingResourcePatternResolver(), new JndiDataSourceLookup());
String resource = "/org/springframework/orm/jpa/persistence-example4.xml";
SpringPersistenceUnitInfo[] info = reader.readPersistenceUnitInfos(resource);
assertThat(info).isNotNull();
assertThat(info).hasSize(1);
assertThat(info[0].getPersistenceUnitName()).isEqualTo("OrderManagement4");
assertThat(info[0].getMappingFileNames()).containsExactly("order-mappings.xml");
assertThat(info[0].getManagedClassNames()).containsExactly(
"com.acme.Order", "com.acme.Customer", "com.acme.Item");
assertThat(info[0].excludeUnlistedClasses()).as("Exclude unlisted should be true when no value.").isTrue();
assertThat(info[0].getTransactionType()).isSameAs(PersistenceUnitTransactionType.RESOURCE_LOCAL);
assertThat(info[0].getProperties()).isEmpty();
builder.clear();
}
@Test
void testExample5() throws Exception {
PersistenceUnitReader reader = new PersistenceUnitReader(
new PathMatchingResourcePatternResolver(), new JndiDataSourceLookup());
String resource = "/org/springframework/orm/jpa/persistence-example5.xml";
SpringPersistenceUnitInfo[] info = reader.readPersistenceUnitInfos(resource);
assertThat(info).isNotNull();
assertThat(info).hasSize(1);
assertThat(info[0].getPersistenceUnitName()).isEqualTo("OrderManagement5");
assertThat(info[0].getMappingFileNames()).containsExactly("order1.xml", "order2.xml");
assertThat(info[0].getJarFileUrls()).containsExactly(
new ClassPathResource("order.jar").getURL(),
new ClassPathResource("order-supplemental.jar").getURL());
assertThat(info[0].getPersistenceProviderClassName()).isEqualTo("com.acme.AcmePersistence");
assertThat(info[0].getProperties()).isEmpty();
assertThat(info[0].excludeUnlistedClasses()).as("Exclude unlisted should default false in 1.0.").isFalse();
}
@Test
void testExampleComplex() throws Exception {
DataSource ds = new DriverManagerDataSource();
String resource = "/org/springframework/orm/jpa/persistence-complex.xml";
MapDataSourceLookup dataSourceLookup = new MapDataSourceLookup();
Map<String, DataSource> dataSources = new HashMap<>();
dataSources.put("jdbc/MyPartDB", ds);
dataSources.put("jdbc/MyDB", ds);
dataSourceLookup.setDataSources(dataSources);
PersistenceUnitReader reader = new PersistenceUnitReader(
new PathMatchingResourcePatternResolver(), dataSourceLookup);
SpringPersistenceUnitInfo[] info = reader.readPersistenceUnitInfos(resource);
assertThat(info).hasSize(2);
SpringPersistenceUnitInfo pu1 = info[0];
assertThat(pu1.getPersistenceUnitName()).isEqualTo("pu1");
assertThat(pu1.getPersistenceProviderClassName()).isEqualTo("com.acme.AcmePersistence");
assertThat(pu1.getMappingFileNames()).containsExactly("ormap2.xml");
assertThat(pu1.getJarFileUrls()).containsExactly(new ClassPathResource("order.jar").getURL());
assertThat(pu1.excludeUnlistedClasses()).isFalse();
assertThat(pu1.getTransactionType()).isSameAs(PersistenceUnitTransactionType.RESOURCE_LOCAL);
assertThat(pu1.getProperties()).containsOnly(
entry("com.acme.persistence.sql-logging", "on"), entry("foo", "bar"));
assertThat(pu1.getNonJtaDataSource()).isNull();
assertThat(pu1.getJtaDataSource()).isSameAs(ds);
assertThat(pu1.excludeUnlistedClasses()).as("Exclude unlisted should default false in 1.0.").isFalse();
SpringPersistenceUnitInfo pu2 = info[1];
assertThat(pu2.getTransactionType()).isSameAs(PersistenceUnitTransactionType.JTA);
assertThat(pu2.getPersistenceProviderClassName()).isEqualTo("com.acme.AcmePersistence");
assertThat(pu2.getMappingFileNames()).containsExactly("order2.xml");
// the following assertions fail only during coverage runs
// assertEquals(1, pu2.getJarFileUrls().size());
// assertEquals(new ClassPathResource("order-supplemental.jar").getURL(), pu2.getJarFileUrls().get(0));
assertThat(pu2.excludeUnlistedClasses()).isTrue();
assertThat(pu2.getJtaDataSource()).isNull();
assertThat(pu2.getNonJtaDataSource()).isEqualTo(ds);
assertThat(pu2.excludeUnlistedClasses()).as("Exclude unlisted should be true when no value.").isTrue();
}
@Test
void testExample6() {
PersistenceUnitReader reader = new PersistenceUnitReader(
new PathMatchingResourcePatternResolver(), new JndiDataSourceLookup());
String resource = "/org/springframework/orm/jpa/persistence-example6.xml";
SpringPersistenceUnitInfo[] info = reader.readPersistenceUnitInfos(resource);
assertThat(info).hasSize(1);
assertThat(info[0].getPersistenceUnitName()).isEqualTo("pu");
assertThat(info[0].getProperties()).isEmpty();
assertThat(info[0].excludeUnlistedClasses()).as("Exclude unlisted should default false in 1.0.").isFalse();
}
@Disabled("not doing schema parsing anymore for JPA 2.0 compatibility")
@Test
void testInvalidPersistence() {
PersistenceUnitReader reader = new PersistenceUnitReader(
new PathMatchingResourcePatternResolver(), new JndiDataSourceLookup());
String resource = "/org/springframework/orm/jpa/persistence-invalid.xml";
assertThatRuntimeException().isThrownBy(() -> reader.readPersistenceUnitInfos(resource));
}
@Disabled("not doing schema parsing anymore for JPA 2.0 compatibility")
@Test
void testNoSchemaPersistence() {
PersistenceUnitReader reader = new PersistenceUnitReader(
new PathMatchingResourcePatternResolver(), new JndiDataSourceLookup());
String resource = "/org/springframework/orm/jpa/persistence-no-schema.xml";
assertThatRuntimeException().isThrownBy(() -> reader.readPersistenceUnitInfos(resource));
}
@Test
void testPersistenceUnitRootUrl() throws Exception {
URL url = PersistenceUnitReader.determinePersistenceUnitRootUrl(new ClassPathResource("/org/springframework/orm/jpa/persistence-no-schema.xml"));
assertThat(url).isNull();
url = PersistenceUnitReader.determinePersistenceUnitRootUrl(new ClassPathResource("/org/springframework/orm/jpa/META-INF/persistence.xml"));
assertThat(url.toString()).as("the containing folder should have been returned")
.endsWith("/org/springframework/orm/jpa");
}
@Test
void testPersistenceUnitRootUrlWithJar() throws Exception {
ClassPathResource archive = new ClassPathResource("/org/springframework/orm/jpa/jpa-archive.jar");
String newRoot = "jar:" + archive.getURL().toExternalForm() + "!/META-INF/persist.xml";
Resource insideArchive = new UrlResource(newRoot);
// make sure the location actually exists
assertThat(insideArchive.exists()).isTrue();
URL url = PersistenceUnitReader.determinePersistenceUnitRootUrl(insideArchive);
assertThat(archive.getURL().sameFile(url)).as("the archive location should have been returned").isTrue();
}
@Test
void testJpa1ExcludeUnlisted() {
PersistenceUnitReader reader = new PersistenceUnitReader(
new PathMatchingResourcePatternResolver(), new JndiDataSourceLookup());
String resource = "/org/springframework/orm/jpa/persistence-exclude-1.0.xml";
SpringPersistenceUnitInfo[] info = reader.readPersistenceUnitInfos(resource);
assertThat(info).isNotNull();
assertThat(info.length).as("The number of persistence units is incorrect.").isEqualTo(4);
SpringPersistenceUnitInfo noExclude = info[0];
assertThat(noExclude).as("noExclude should not be null.").isNotNull();
assertThat(noExclude.getPersistenceUnitName()).as("noExclude name is not correct.").isEqualTo("NoExcludeElement");
assertThat(noExclude.excludeUnlistedClasses()).as("Exclude unlisted should default false in 1.0.").isFalse();
SpringPersistenceUnitInfo emptyExclude = info[1];
assertThat(emptyExclude).as("emptyExclude should not be null.").isNotNull();
assertThat(emptyExclude.getPersistenceUnitName()).as("emptyExclude name is not correct.").isEqualTo("EmptyExcludeElement");
assertThat(emptyExclude.excludeUnlistedClasses()).as("emptyExclude should be true.").isTrue();
SpringPersistenceUnitInfo trueExclude = info[2];
assertThat(trueExclude).as("trueExclude should not be null.").isNotNull();
assertThat(trueExclude.getPersistenceUnitName()).as("trueExclude name is not correct.").isEqualTo("TrueExcludeElement");
assertThat(trueExclude.excludeUnlistedClasses()).as("trueExclude should be true.").isTrue();
SpringPersistenceUnitInfo falseExclude = info[3];
assertThat(falseExclude).as("falseExclude should not be null.").isNotNull();
assertThat(falseExclude.getPersistenceUnitName()).as("falseExclude name is not correct.").isEqualTo("FalseExcludeElement");
assertThat(falseExclude.excludeUnlistedClasses()).as("falseExclude should be false.").isFalse();
}
@Test
void testJpa2ExcludeUnlisted() {
PersistenceUnitReader reader = new PersistenceUnitReader(
new PathMatchingResourcePatternResolver(), new JndiDataSourceLookup());
String resource = "/org/springframework/orm/jpa/persistence-exclude-2.0.xml";
SpringPersistenceUnitInfo[] info = reader.readPersistenceUnitInfos(resource);
assertThat(info).isNotNull();
assertThat(info.length).as("The number of persistence units is incorrect.").isEqualTo(4);
SpringPersistenceUnitInfo noExclude = info[0];
assertThat(noExclude).as("noExclude should not be null.").isNotNull();
assertThat(noExclude.getPersistenceUnitName()).as("noExclude name is not correct.").isEqualTo("NoExcludeElement");
assertThat(noExclude.excludeUnlistedClasses()).as("Exclude unlisted still defaults to false in 2.0.").isFalse();
SpringPersistenceUnitInfo emptyExclude = info[1];
assertThat(emptyExclude).as("emptyExclude should not be null.").isNotNull();
assertThat(emptyExclude.getPersistenceUnitName()).as("emptyExclude name is not correct.").isEqualTo("EmptyExcludeElement");
assertThat(emptyExclude.excludeUnlistedClasses()).as("emptyExclude should be true.").isTrue();
SpringPersistenceUnitInfo trueExclude = info[2];
assertThat(trueExclude).as("trueExclude should not be null.").isNotNull();
assertThat(trueExclude.getPersistenceUnitName()).as("trueExclude name is not correct.").isEqualTo("TrueExcludeElement");
assertThat(trueExclude.excludeUnlistedClasses()).as("trueExclude should be true.").isTrue();
SpringPersistenceUnitInfo falseExclude = info[3];
assertThat(falseExclude).as("falseExclude should not be null.").isNotNull();
assertThat(falseExclude.getPersistenceUnitName()).as("falseExclude name is not correct.").isEqualTo("FalseExcludeElement");
assertThat(falseExclude.excludeUnlistedClasses()).as("falseExclude should be false.").isFalse();
}
}
| PersistenceXmlParsingTests |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/invoke/OperationInvokerAdvisor.java | {
"start": 971,
"end": 1471
} | interface ____ {
/**
* Apply additional functionality to the given invoker.
* @param endpointId the endpoint ID
* @param operationType the operation type
* @param parameters the operation parameters
* @param invoker the invoker to advise
* @return a potentially new operation invoker with support for additional features
*/
OperationInvoker apply(EndpointId endpointId, OperationType operationType, OperationParameters parameters,
OperationInvoker invoker);
}
| OperationInvokerAdvisor |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/hive/HiveSelectTest_distribute.java | {
"start": 909,
"end": 2653
} | class ____
extends TestCase {
public void test_0() throws Exception {
String sql = "select * from LD_aly.fct_pay_ord_cn_di t1 distribute by buyer_id sort by seller_id";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.HIVE);
SQLStatement stmt = statementList.get(0);
assertEquals("SELECT *\n" +
"FROM LD_aly.fct_pay_ord_cn_di t1\n" +
"DISTRIBUTE BY buyer_id\n" +
"SORT BY seller_id", stmt.toString());
assertEquals("SELECT *\n" +
"FROM LD_aly.fct_pay_ord_cn_di t1\n" +
"DISTRIBUTE BY buyer_id\n" +
"SORT BY seller_id", SQLUtils.toSQLString(stmt));
assertEquals(1, statementList.size());
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.HIVE);
stmt.accept(visitor);
// System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("orderBy : " + visitor.getOrderByColumns());
System.out.println("groupBy : " + visitor.getGroupByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(3, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertEquals(0, visitor.getGroupByColumns().size());
assertTrue(visitor.containsColumn("LD_aly.fct_pay_ord_cn_di", "*"));
assertTrue(visitor.containsColumn("LD_aly.fct_pay_ord_cn_di", "buyer_id"));
assertTrue(visitor.containsColumn("LD_aly.fct_pay_ord_cn_di", "seller_id"));
}
}
| HiveSelectTest_distribute |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/booleanarrays/BooleanArrays_assertHasSizeLessThanOrEqualTo_Test.java | {
"start": 1059,
"end": 2160
} | class ____ extends BooleanArraysBaseTest {
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertHasSizeLessThanOrEqualTo(someInfo(), null, 6))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_size_of_actual_is_not_less_than_or_equal_to_boundary() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertHasSizeLessThanOrEqualTo(someInfo(), actual, 1))
.withMessage(shouldHaveSizeLessThanOrEqualTo(actual, actual.length,
1).create());
}
@Test
void should_pass_if_size_of_actual_is_less_than_boundary() {
arrays.assertHasSizeLessThanOrEqualTo(someInfo(), actual, 3);
}
@Test
void should_pass_if_size_of_actual_is_equal_to_boundary() {
arrays.assertHasSizeLessThanOrEqualTo(someInfo(), actual, actual.length);
}
}
| BooleanArrays_assertHasSizeLessThanOrEqualTo_Test |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/config/MvcNamespaceTests.java | {
"start": 8975,
"end": 49088
} | class ____ {
public static final String VIEWCONTROLLER_BEAN_NAME =
"org.springframework.web.servlet.config.viewControllerHandlerMapping";
private XmlWebApplicationContext appContext;
private TestController handler;
private HandlerMethod handlerMethod;
@BeforeEach
void setup() throws Exception {
TestMockServletContext servletContext = new TestMockServletContext();
appContext = new XmlWebApplicationContext();
appContext.setServletContext(servletContext);
LocaleContextHolder.setLocale(Locale.US);
String attributeName = WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE;
appContext.getServletContext().setAttribute(attributeName, appContext);
handler = new TestController();
handlerMethod = new InvocableHandlerMethod(handler, TestController.class.getMethod("testBind",
Date.class, Double.class, TestBean.class, BindingResult.class));
}
@Test
@SuppressWarnings("removal")
void testDefaultConfig() throws Exception {
loadBeanDefinitions("mvc-config.xml");
RequestMappingHandlerMapping mapping = appContext.getBean(RequestMappingHandlerMapping.class);
assertThat(mapping).isNotNull();
assertThat(mapping.getOrder()).isEqualTo(0);
assertThat(mapping.getUrlPathHelper().shouldRemoveSemicolonContent()).isTrue();
assertThat(mapping.getPathMatcher()).isEqualTo(appContext.getBean("mvcPathMatcher"));
assertThat(mapping.getPatternParser()).isNotNull();
mapping.setDefaultHandler(handlerMethod);
MockHttpServletRequest request = new MockHttpServletRequest("GET", "/foo.json");
NativeWebRequest webRequest = new ServletWebRequest(request);
ContentNegotiationManager manager = mapping.getContentNegotiationManager();
assertThat(manager.resolveMediaTypes(webRequest))
.as("Should not resolve file extensions by default")
.containsExactly(MediaType.ALL);
RequestMappingHandlerAdapter adapter = appContext.getBean(RequestMappingHandlerAdapter.class);
assertThat(adapter).isNotNull();
List<HttpMessageConverter<?>> converters = adapter.getMessageConverters();
assertThat(converters.size()).isGreaterThan(0);
for (HttpMessageConverter<?> converter : converters) {
if (converter instanceof AbstractJackson2HttpMessageConverter) {
ObjectMapper objectMapper = ((AbstractJackson2HttpMessageConverter) converter).getObjectMapper();
assertThat(objectMapper.getDeserializationConfig().isEnabled(MapperFeature.DEFAULT_VIEW_INCLUSION)).isFalse();
assertThat(objectMapper.getSerializationConfig().isEnabled(MapperFeature.DEFAULT_VIEW_INCLUSION)).isFalse();
assertThat(objectMapper.getDeserializationConfig().isEnabled(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)).isFalse();
if (converter instanceof MappingJackson2XmlHttpMessageConverter) {
assertThat(objectMapper.getClass()).isEqualTo(XmlMapper.class);
}
}
}
assertThat(appContext.getBean(FormattingConversionServiceFactoryBean.class)).isNotNull();
assertThat(appContext.getBean(ConversionService.class)).isNotNull();
assertThat(appContext.getBean(LocalValidatorFactoryBean.class)).isNotNull();
assertThat(appContext.getBean(Validator.class)).isNotNull();
assertThat(appContext.getBean("localeResolver", LocaleResolver.class)).isNotNull();
assertThat(appContext.getBean("viewNameTranslator", RequestToViewNameTranslator.class)).isNotNull();
assertThat(appContext.getBean("flashMapManager", FlashMapManager.class)).isNotNull();
// default web binding initializer behavior test
request = new MockHttpServletRequest("GET", "/");
request.addParameter("date", "2009-10-31");
request.addParameter("percent", "99.99%");
MockHttpServletResponse response = new MockHttpServletResponse();
HandlerExecutionChain chain = mapping.getHandler(request);
assertThat(chain.getInterceptorList()).hasSize(1);
assertThat(chain.getInterceptorList()).element(0).isInstanceOf(ConversionServiceExposingInterceptor.class);
ConversionServiceExposingInterceptor interceptor = (ConversionServiceExposingInterceptor) chain.getInterceptorList().get(0);
interceptor.preHandle(request, response, handlerMethod);
assertThat(request.getAttribute(ConversionService.class.getName())).isSameAs(appContext.getBean(ConversionService.class));
adapter.handle(request, response, handlerMethod);
assertThat(handler.recordedValidationError).isTrue();
assertThat(handler.date).isInSameDayAs("2009-10-31T00:00:00+00:00");
assertThat(handler.percent).isEqualTo(Double.valueOf(0.9999));
CompositeUriComponentsContributor uriComponentsContributor = this.appContext.getBean(
MvcUriComponentsBuilder.MVC_URI_COMPONENTS_CONTRIBUTOR_BEAN_NAME,
CompositeUriComponentsContributor.class);
assertThat(uriComponentsContributor).isNotNull();
String name = "mvcHandlerMappingIntrospector";
HandlerMappingIntrospector introspector = this.appContext.getBean(name, HandlerMappingIntrospector.class);
assertThat(introspector).isNotNull();
assertThat(introspector.getHandlerMappings()).hasSize(2);
assertThat(introspector.getHandlerMappings()).element(0).isSameAs(mapping);
assertThat(introspector.getHandlerMappings().get(1).getClass()).isEqualTo(BeanNameUrlHandlerMapping.class);
}
@Test // gh-25290
void testDefaultConfigWithBeansInParentContext() {
StaticApplicationContext parent = new StaticApplicationContext();
parent.registerSingleton("localeResolver", CookieLocaleResolver.class);
parent.registerSingleton("viewNameTranslator", DefaultRequestToViewNameTranslator.class);
parent.registerSingleton("flashMapManager", SessionFlashMapManager.class);
parent.refresh();
appContext.setParent(parent);
loadBeanDefinitions("mvc-config.xml");
assertThat(appContext.getBean("localeResolver")).isSameAs(parent.getBean("localeResolver"));
assertThat(appContext.getBean("viewNameTranslator")).isSameAs(parent.getBean("viewNameTranslator"));
assertThat(appContext.getBean("flashMapManager")).isSameAs(parent.getBean("flashMapManager"));
}
@Test
void testCustomConversionService() throws Exception {
loadBeanDefinitions("mvc-config-custom-conversion-service.xml");
RequestMappingHandlerMapping mapping = appContext.getBean(RequestMappingHandlerMapping.class);
assertThat(mapping).isNotNull();
mapping.setDefaultHandler(handlerMethod);
// default web binding initializer behavior test
MockHttpServletRequest request = new MockHttpServletRequest("GET", "/");
request.setRequestURI("/accounts/12345");
request.addParameter("date", "2009-10-31");
MockHttpServletResponse response = new MockHttpServletResponse();
HandlerExecutionChain chain = mapping.getHandler(request);
assertThat(chain.getInterceptorList()).hasSize(1);
assertThat(chain.getInterceptorList()).element(0).isInstanceOf(ConversionServiceExposingInterceptor.class);
ConversionServiceExposingInterceptor interceptor = (ConversionServiceExposingInterceptor) chain.getInterceptorList().get(0);
interceptor.preHandle(request, response, handler);
assertThat(request.getAttribute(ConversionService.class.getName())).isSameAs(appContext.getBean("conversionService"));
RequestMappingHandlerAdapter adapter = appContext.getBean(RequestMappingHandlerAdapter.class);
assertThat(adapter).isNotNull();
assertThatExceptionOfType(TypeMismatchException.class).isThrownBy(() ->
adapter.handle(request, response, handlerMethod));
}
@Test
void testCustomValidator() throws Exception {
doTestCustomValidator("mvc-config-custom-validator.xml");
}
@SuppressWarnings("removal")
private void doTestCustomValidator(String xml) throws Exception {
loadBeanDefinitions(xml);
RequestMappingHandlerMapping mapping = appContext.getBean(RequestMappingHandlerMapping.class);
assertThat(mapping).isNotNull();
assertThat(mapping.getUrlPathHelper().shouldRemoveSemicolonContent()).isFalse();
RequestMappingHandlerAdapter adapter = appContext.getBean(RequestMappingHandlerAdapter.class);
assertThat(adapter).isNotNull();
// default web binding initializer behavior test
MockHttpServletRequest request = new MockHttpServletRequest();
request.addParameter("date", "2009-10-31");
MockHttpServletResponse response = new MockHttpServletResponse();
adapter.handle(request, response, handlerMethod);
assertThat(appContext.getBean(TestValidator.class).validatorInvoked).isTrue();
assertThat(handler.recordedValidationError).isFalse();
}
@Test
void testInterceptors() throws Exception {
loadBeanDefinitions("mvc-config-interceptors.xml");
RequestMappingHandlerMapping mapping = appContext.getBean(RequestMappingHandlerMapping.class);
assertThat(mapping).isNotNull();
mapping.setDefaultHandler(handlerMethod);
MockHttpServletRequest request = new MockHttpServletRequest("GET", "/");
request.setRequestURI("/accounts/12345");
request.addParameter("locale", "en");
HandlerExecutionChain chain = mapping.getHandler(request);
assertThat(chain.getInterceptorList()).hasSize(3);
assertThat(chain.getInterceptorList()).element(0).isInstanceOf(ConversionServiceExposingInterceptor.class);
assertThat(chain.getInterceptorList()).element(1).isInstanceOf(LocaleChangeInterceptor.class);
assertThat(chain.getInterceptorList()).element(2).isInstanceOf(UserRoleAuthorizationInterceptor.class);
request.setRequestURI("/admin/users");
chain = mapping.getHandler(request);
assertThat(chain.getInterceptorList()).hasSize(2);
request.setRequestURI("/logged/accounts/12345");
chain = mapping.getHandler(request);
assertThat(chain.getInterceptorList()).hasSize(2);
request.setRequestURI("/foo/logged");
chain = mapping.getHandler(request);
assertThat(chain.getInterceptorList()).hasSize(2);
}
@Test
@SuppressWarnings("removal")
void testResources() throws Exception {
loadBeanDefinitions("mvc-config-resources.xml");
HttpRequestHandlerAdapter adapter = appContext.getBean(HttpRequestHandlerAdapter.class);
assertThat(adapter).isNotNull();
SimpleUrlHandlerMapping resourceMapping = appContext.getBean(SimpleUrlHandlerMapping.class);
assertThat(resourceMapping).isNotNull();
assertThat(resourceMapping.getOrder()).isEqualTo(Ordered.LOWEST_PRECEDENCE - 1);
assertThat(resourceMapping.getPathMatcher()).isNotNull();
assertThat(resourceMapping.getPatternParser()).isNotNull();
BeanNameUrlHandlerMapping beanNameMapping = appContext.getBean(BeanNameUrlHandlerMapping.class);
assertThat(beanNameMapping).isNotNull();
assertThat(beanNameMapping.getOrder()).isEqualTo(2);
ResourceUrlProvider urlProvider = appContext.getBean(ResourceUrlProvider.class);
assertThat(urlProvider).isNotNull();
Map<String, MappedInterceptor> beans = appContext.getBeansOfType(MappedInterceptor.class);
List<Class<?>> interceptors = beans.values().stream()
.map(mappedInterceptor -> mappedInterceptor.getInterceptor().getClass())
.collect(Collectors.toList());
assertThat(interceptors).contains(ConversionServiceExposingInterceptor.class,
ResourceUrlProviderExposingInterceptor.class);
MockHttpServletRequest request = new MockHttpServletRequest();
request.setRequestURI("/resources/foo.css");
request.setMethod("GET");
HandlerExecutionChain chain = resourceMapping.getHandler(request);
assertThat(chain).isNotNull();
assertThat(chain.getHandler()).isInstanceOf(ResourceHttpRequestHandler.class);
MockHttpServletResponse response = new MockHttpServletResponse();
for (HandlerInterceptor interceptor : chain.getInterceptorList()) {
interceptor.preHandle(request, response, chain.getHandler());
}
assertThatThrownBy(() -> adapter.handle(request, response, chain.getHandler()))
.isInstanceOf(NoResourceFoundException.class);
}
@Test
@SuppressWarnings("removal")
void testUseDeprecatedPathMatcher() throws Exception {
loadBeanDefinitions("mvc-config-deprecated-path-matcher.xml");
Map<String, AbstractHandlerMapping> handlerMappings = appContext.getBeansOfType(AbstractHandlerMapping.class);
AntPathMatcher mvcPathMatcher = appContext.getBean("pathMatcher", AntPathMatcher.class);
assertThat(handlerMappings).hasSize(4);
handlerMappings.forEach((name, hm) -> {
assertThat(hm.getPathMatcher()).as("path matcher for %s", name).isEqualTo(mvcPathMatcher);
assertThat(hm.getPatternParser()).as("pattern parser for %s", name).isNull();
});
}
@Test
@SuppressWarnings("removal")
void testUsePathPatternParser() throws Exception {
loadBeanDefinitions("mvc-config-custom-pattern-parser.xml");
PathPatternParser patternParser = appContext.getBean("patternParser", PathPatternParser.class);
Map<String, AbstractHandlerMapping> handlerMappings = appContext.getBeansOfType(AbstractHandlerMapping.class);
assertThat(handlerMappings).hasSize(4);
handlerMappings.forEach((name, hm) -> {
assertThat(hm.getPathMatcher()).as("path matcher for %s", name).isNotNull();
assertThat(hm.getPatternParser()).as("pattern parser for %s", name).isEqualTo(patternParser);
});
}
@Test
void testResourcesWithOptionalAttributes() {
loadBeanDefinitions("mvc-config-resources-optional-attrs.xml");
SimpleUrlHandlerMapping mapping = appContext.getBean(SimpleUrlHandlerMapping.class);
assertThat(mapping).isNotNull();
assertThat(mapping.getOrder()).isEqualTo(5);
assertThat(mapping.getUrlMap().get("/resources/**")).isNotNull();
ResourceHttpRequestHandler handler = appContext.getBean((String) mapping.getUrlMap().get("/resources/**"),
ResourceHttpRequestHandler.class);
assertThat(handler).isNotNull();
assertThat(handler.getCacheSeconds()).isEqualTo(3600);
}
@Test
@SuppressWarnings("removal")
void testResourcesWithResolversTransformers() {
loadBeanDefinitions("mvc-config-resources-chain.xml");
SimpleUrlHandlerMapping mapping = appContext.getBean(SimpleUrlHandlerMapping.class);
assertThat(mapping).isNotNull();
assertThat(mapping.getUrlMap().get("/resources/**")).isNotNull();
String beanName = (String) mapping.getUrlMap().get("/resources/**");
ResourceHttpRequestHandler handler = appContext.getBean(beanName, ResourceHttpRequestHandler.class);
assertThat(handler).isNotNull();
assertThat(handler.getUrlPathHelper()).isNotNull();
List<ResourceResolver> resolvers = handler.getResourceResolvers();
assertThat(resolvers).hasSize(4);
assertThat(resolvers).element(0).isInstanceOf(CachingResourceResolver.class);
assertThat(resolvers).element(1).isInstanceOf(VersionResourceResolver.class);
assertThat(resolvers).element(2).isInstanceOf(LiteWebJarsResourceResolver.class);
assertThat(resolvers).element(3).isInstanceOf(PathResourceResolver.class);
CachingResourceResolver cachingResolver = (CachingResourceResolver) resolvers.get(0);
assertThat(cachingResolver.getCache()).isInstanceOf(ConcurrentMapCache.class);
assertThat(cachingResolver.getCache().getName()).isEqualTo("test-resource-cache");
VersionResourceResolver versionResolver = (VersionResourceResolver) resolvers.get(1);
assertThat(versionResolver.getStrategyMap().get("/**/*.js"))
.isInstanceOf(FixedVersionStrategy.class);
assertThat(versionResolver.getStrategyMap().get("/**"))
.isInstanceOf(ContentVersionStrategy.class);
PathResourceResolver pathResolver = (PathResourceResolver) resolvers.get(3);
Map<Resource, Charset> locationCharsets = pathResolver.getLocationCharsets();
assertThat(locationCharsets).hasSize(1);
assertThat(locationCharsets.values()).element(0).isEqualTo(StandardCharsets.ISO_8859_1);
List<ResourceTransformer> transformers = handler.getResourceTransformers();
assertThat(transformers).hasSize(2);
assertThat(transformers).element(0).isInstanceOf(CachingResourceTransformer.class);
assertThat(transformers).element(1).isInstanceOf(CssLinkResourceTransformer.class);
CachingResourceTransformer cachingTransformer = (CachingResourceTransformer) transformers.get(0);
assertThat(cachingTransformer.getCache()).isInstanceOf(ConcurrentMapCache.class);
assertThat(cachingTransformer.getCache().getName()).isEqualTo("test-resource-cache");
}
@Test
void testResourcesWithResolversTransformersCustom() {
loadBeanDefinitions("mvc-config-resources-chain-no-auto.xml");
SimpleUrlHandlerMapping mapping = appContext.getBean(SimpleUrlHandlerMapping.class);
assertThat(mapping).isNotNull();
assertThat(mapping.getUrlMap().get("/resources/**")).isNotNull();
ResourceHttpRequestHandler handler = appContext.getBean((String) mapping.getUrlMap().get("/resources/**"),
ResourceHttpRequestHandler.class);
assertThat(handler).isNotNull();
assertThat(handler.getCacheControl().getHeaderValue())
.isEqualTo(CacheControl.maxAge(1, TimeUnit.HOURS)
.sMaxAge(30, TimeUnit.MINUTES).cachePublic().getHeaderValue());
List<ResourceResolver> resolvers = handler.getResourceResolvers();
assertThat(resolvers).hasSize(3);
assertThat(resolvers).element(0).isInstanceOf(VersionResourceResolver.class);
assertThat(resolvers).element(1).isInstanceOf(EncodedResourceResolver.class);
assertThat(resolvers).element(2).isInstanceOf(PathResourceResolver.class);
VersionResourceResolver versionResolver = (VersionResourceResolver) resolvers.get(0);
assertThat(versionResolver.getStrategyMap().get("/**/*.js"))
.isInstanceOf(FixedVersionStrategy.class);
assertThat(versionResolver.getStrategyMap().get("/**"))
.isInstanceOf(ContentVersionStrategy.class);
List<ResourceTransformer> transformers = handler.getResourceTransformers();
assertThat(transformers).hasSize(1);
assertThat(transformers).element(0).isInstanceOf(CachingResourceTransformer.class);
}
@Test
void testDefaultServletHandler() throws Exception {
loadBeanDefinitions("mvc-config-default-servlet.xml");
HttpRequestHandlerAdapter adapter = appContext.getBean(HttpRequestHandlerAdapter.class);
assertThat(adapter).isNotNull();
DefaultServletHttpRequestHandler handler = appContext.getBean(DefaultServletHttpRequestHandler.class);
assertThat(handler).isNotNull();
SimpleUrlHandlerMapping mapping = appContext.getBean(SimpleUrlHandlerMapping.class);
assertThat(mapping).isNotNull();
assertThat(mapping.getOrder()).isEqualTo(Ordered.LOWEST_PRECEDENCE);
MockHttpServletRequest request = new MockHttpServletRequest();
request.setRequestURI("/foo.css");
request.setMethod("GET");
HandlerExecutionChain chain = mapping.getHandler(request);
assertThat(chain.getHandler()).isInstanceOf(DefaultServletHttpRequestHandler.class);
MockHttpServletResponse response = new MockHttpServletResponse();
ModelAndView mv = adapter.handle(request, response, chain.getHandler());
assertThat(mv).isNull();
}
@Test
void testDefaultServletHandlerWithOptionalAttributes() throws Exception {
loadBeanDefinitions("mvc-config-default-servlet-optional-attrs.xml");
HttpRequestHandlerAdapter adapter = appContext.getBean(HttpRequestHandlerAdapter.class);
assertThat(adapter).isNotNull();
DefaultServletHttpRequestHandler handler = appContext.getBean(DefaultServletHttpRequestHandler.class);
assertThat(handler).isNotNull();
SimpleUrlHandlerMapping mapping = appContext.getBean(SimpleUrlHandlerMapping.class);
assertThat(mapping).isNotNull();
assertThat(mapping.getOrder()).isEqualTo(Ordered.LOWEST_PRECEDENCE);
MockHttpServletRequest request = new MockHttpServletRequest();
request.setRequestURI("/foo.css");
request.setMethod("GET");
HandlerExecutionChain chain = mapping.getHandler(request);
assertThat(chain.getHandler()).isInstanceOf(DefaultServletHttpRequestHandler.class);
MockHttpServletResponse response = new MockHttpServletResponse();
ModelAndView mv = adapter.handle(request, response, chain.getHandler());
assertThat(mv).isNull();
}
@Test
void testBeanDecoration() throws Exception {
loadBeanDefinitions("mvc-config-bean-decoration.xml");
RequestMappingHandlerMapping mapping = appContext.getBean(RequestMappingHandlerMapping.class);
assertThat(mapping).isNotNull();
mapping.setDefaultHandler(handlerMethod);
MockHttpServletRequest request = new MockHttpServletRequest("GET", "/");
HandlerExecutionChain chain = mapping.getHandler(request);
assertThat(chain.getInterceptorList()).hasSize(2);
assertThat(chain.getInterceptorList()).element(0).isInstanceOf(ConversionServiceExposingInterceptor.class);
assertThat(chain.getInterceptorList()).element(1).isInstanceOf(LocaleChangeInterceptor.class);
LocaleChangeInterceptor interceptor = (LocaleChangeInterceptor) chain.getInterceptorList().get(1);
assertThat(interceptor.getParamName()).isEqualTo("lang");
}
@Test
@SuppressWarnings("removal")
void testViewControllers() throws Exception {
loadBeanDefinitions("mvc-config-view-controllers.xml");
RequestMappingHandlerMapping mapping = appContext.getBean(RequestMappingHandlerMapping.class);
assertThat(mapping).isNotNull();
mapping.setDefaultHandler(handlerMethod);
BeanNameUrlHandlerMapping beanNameMapping = appContext.getBean(BeanNameUrlHandlerMapping.class);
assertThat(beanNameMapping).isNotNull();
assertThat(beanNameMapping.getOrder()).isEqualTo(2);
assertThat(beanNameMapping.getPathMatcher()).isNotNull();
assertThat(beanNameMapping.getPatternParser()).isNotNull();
MockHttpServletRequest request = new MockHttpServletRequest();
request.setMethod("GET");
HandlerExecutionChain chain = mapping.getHandler(request);
assertThat(chain.getInterceptorList()).hasSize(2);
assertThat(chain.getInterceptorList()).element(0).isInstanceOf(ConversionServiceExposingInterceptor.class);
assertThat(chain.getInterceptorList()).element(1).isInstanceOf(LocaleChangeInterceptor.class);
SimpleUrlHandlerMapping mapping2 = appContext.getBean(SimpleUrlHandlerMapping.class);
assertThat(mapping2).isNotNull();
SimpleControllerHandlerAdapter adapter = appContext.getBean(SimpleControllerHandlerAdapter.class);
assertThat(adapter).isNotNull();
request = new MockHttpServletRequest("GET", "/foo");
chain = mapping2.getHandler(request);
assertThat(chain.getInterceptorList()).hasSize(3);
assertThat(chain.getInterceptorList()).element(1).isInstanceOf(ConversionServiceExposingInterceptor.class);
assertThat(chain.getInterceptorList()).element(2).isInstanceOf(LocaleChangeInterceptor.class);
ModelAndView mv = adapter.handle(request, new MockHttpServletResponse(), chain.getHandler());
assertThat(mv.getViewName()).isNull();
request = new MockHttpServletRequest("GET", "/myapp/app/bar");
request.setContextPath("/myapp");
request.setServletPath("/app");
chain = mapping2.getHandler(request);
assertThat(chain.getInterceptorList()).hasSize(3);
assertThat(chain.getInterceptorList()).element(1).isInstanceOf(ConversionServiceExposingInterceptor.class);
assertThat(chain.getInterceptorList()).element(2).isInstanceOf(LocaleChangeInterceptor.class);
mv = adapter.handle(request, new MockHttpServletResponse(), chain.getHandler());
assertThat(mv.getViewName()).isEqualTo("baz");
request = new MockHttpServletRequest("GET", "/myapp/app/");
request.setContextPath("/myapp");
request.setServletPath("/app");
chain = mapping2.getHandler(request);
assertThat(chain.getInterceptorList()).hasSize(3);
assertThat(chain.getInterceptorList()).element(1).isInstanceOf(ConversionServiceExposingInterceptor.class);
assertThat(chain.getInterceptorList()).element(2).isInstanceOf(LocaleChangeInterceptor.class);
mv = adapter.handle(request, new MockHttpServletResponse(), chain.getHandler());
assertThat(mv.getViewName()).isEqualTo("root");
request = new MockHttpServletRequest("GET", "/myapp/app/old");
request.setContextPath("/myapp");
request.setServletPath("/app");
request.setQueryString("a=b");
chain = mapping2.getHandler(request);
mv = adapter.handle(request, new MockHttpServletResponse(), chain.getHandler());
assertThat(mv.getView()).isNotNull();
assertThat(mv.getView().getClass()).isEqualTo(RedirectView.class);
RedirectView redirectView = (RedirectView) mv.getView();
MockHttpServletResponse response = new MockHttpServletResponse();
redirectView.render(Collections.emptyMap(), request, response);
assertThat(response.getRedirectedUrl()).isEqualTo("/new?a=b");
assertThat(response.getStatus()).isEqualTo(308);
request = new MockHttpServletRequest("GET", "/bad");
chain = mapping2.getHandler(request);
response = new MockHttpServletResponse();
mv = adapter.handle(request, response, chain.getHandler());
assertThat(mv).isNull();
assertThat(response.getStatus()).isEqualTo(404);
}
/** WebSphere gives trailing servlet path slashes by default!! */
@Test
void testViewControllersOnWebSphere() throws Exception {
loadBeanDefinitions("mvc-config-view-controllers.xml");
SimpleUrlHandlerMapping mapping2 = appContext.getBean(SimpleUrlHandlerMapping.class);
SimpleControllerHandlerAdapter adapter = appContext.getBean(SimpleControllerHandlerAdapter.class);
MockHttpServletRequest request = new MockHttpServletRequest();
request.setMethod("GET");
request.setRequestURI("/myapp/app/bar");
request.setContextPath("/myapp");
request.setServletPath("/app/");
request.setAttribute("com.ibm.websphere.servlet.uri_non_decoded", "/myapp/app/bar");
HandlerExecutionChain chain = mapping2.getHandler(request);
assertThat(chain.getInterceptorList()).hasSize(3);
assertThat(chain.getInterceptorList()).element(1).isInstanceOf(ConversionServiceExposingInterceptor.class);
assertThat(chain.getInterceptorList()).element(2).isInstanceOf(LocaleChangeInterceptor.class);
ModelAndView mv2 = adapter.handle(request, new MockHttpServletResponse(), chain.getHandler());
assertThat(mv2.getViewName()).isEqualTo("baz");
request.setRequestURI("/myapp/app/");
request.setContextPath("/myapp");
request.setServletPath("/app/");
request.setHttpServletMapping(new MockHttpServletMapping("", "", "", MappingMatch.PATH));
chain = mapping2.getHandler(request);
assertThat(chain.getInterceptorList()).hasSize(3);
assertThat(chain.getInterceptorList()).element(1).isInstanceOf(ConversionServiceExposingInterceptor.class);
assertThat(chain.getInterceptorList()).element(2).isInstanceOf(LocaleChangeInterceptor.class);
ModelAndView mv3 = adapter.handle(request, new MockHttpServletResponse(), chain.getHandler());
assertThat(mv3.getViewName()).isEqualTo("root");
request.setRequestURI("/myapp/");
request.setContextPath("/myapp");
request.setServletPath("/");
chain = mapping2.getHandler(request);
assertThat(chain.getInterceptorList()).hasSize(3);
assertThat(chain.getInterceptorList()).element(1).isInstanceOf(ConversionServiceExposingInterceptor.class);
assertThat(chain.getInterceptorList()).element(2).isInstanceOf(LocaleChangeInterceptor.class);
mv3 = adapter.handle(request, new MockHttpServletResponse(), chain.getHandler());
assertThat(mv3.getViewName()).isEqualTo("root");
}
@Test
void testViewControllersDefaultConfig() {
loadBeanDefinitions("mvc-config-view-controllers-minimal.xml");
SimpleUrlHandlerMapping hm = this.appContext.getBean(SimpleUrlHandlerMapping.class);
assertThat(hm).isNotNull();
ParameterizableViewController viewController = (ParameterizableViewController) hm.getUrlMap().get("/path");
assertThat(viewController).isNotNull();
assertThat(viewController.getViewName()).isEqualTo("home");
ParameterizableViewController redirectViewController = (ParameterizableViewController) hm.getUrlMap().get("/old");
assertThat(redirectViewController).isNotNull();
assertThat(redirectViewController.getView()).isInstanceOf(RedirectView.class);
ParameterizableViewController statusViewController = (ParameterizableViewController) hm.getUrlMap().get("/bad");
assertThat(statusViewController).isNotNull();
assertThat(statusViewController.getStatusCode().value()).isEqualTo(404);
BeanNameUrlHandlerMapping beanNameMapping = this.appContext.getBean(BeanNameUrlHandlerMapping.class);
assertThat(beanNameMapping).isNotNull();
assertThat(beanNameMapping.getOrder()).isEqualTo(2);
}
@Test
void testContentNegotiationManager() throws Exception {
loadBeanDefinitions("mvc-config-content-negotiation-manager.xml");
RequestMappingHandlerMapping mapping = appContext.getBean(RequestMappingHandlerMapping.class);
ContentNegotiationManager manager = mapping.getContentNegotiationManager();
MockHttpServletRequest request = new MockHttpServletRequest("GET", "/foo");
request.setParameter("format", "xml");
NativeWebRequest webRequest = new ServletWebRequest(request);
assertThat(manager.resolveMediaTypes(webRequest))
.containsExactly(MediaType.valueOf("application/rss+xml"));
ViewResolverComposite compositeResolver = this.appContext.getBean(ViewResolverComposite.class);
assertThat(compositeResolver).isNotNull();
assertThat(compositeResolver.getViewResolvers().size())
.as("Actual: " + compositeResolver.getViewResolvers())
.isEqualTo(1);
ViewResolver resolver = compositeResolver.getViewResolvers().get(0);
assertThat(resolver.getClass()).isEqualTo(ContentNegotiatingViewResolver.class);
ContentNegotiatingViewResolver cnvr = (ContentNegotiatingViewResolver) resolver;
assertThat(cnvr.getContentNegotiationManager()).isSameAs(manager);
}
@Test
void testAsyncSupportOptions() {
loadBeanDefinitions("mvc-config-async-support.xml");
RequestMappingHandlerAdapter adapter = appContext.getBean(RequestMappingHandlerAdapter.class);
assertThat(adapter).isNotNull();
DirectFieldAccessor fieldAccessor = new DirectFieldAccessor(adapter);
assertThat(fieldAccessor.getPropertyValue("taskExecutor").getClass()).isEqualTo(ConcurrentTaskExecutor.class);
assertThat(fieldAccessor.getPropertyValue("asyncRequestTimeout")).isEqualTo(2500L);
CallableProcessingInterceptor[] callableInterceptors =
(CallableProcessingInterceptor[]) fieldAccessor.getPropertyValue("callableInterceptors");
assertThat(callableInterceptors).hasSize(1);
DeferredResultProcessingInterceptor[] deferredResultInterceptors =
(DeferredResultProcessingInterceptor[]) fieldAccessor.getPropertyValue("deferredResultInterceptors");
assertThat(deferredResultInterceptors).hasSize(1);
}
@Test
void testViewResolution() {
loadBeanDefinitions("mvc-config-view-resolution.xml");
ViewResolverComposite compositeResolver = this.appContext.getBean(ViewResolverComposite.class);
assertThat(compositeResolver).isNotNull();
assertThat(compositeResolver.getViewResolvers().size()).as("Actual: " + compositeResolver.getViewResolvers()).isEqualTo(7);
assertThat(compositeResolver.getOrder()).isEqualTo(Ordered.LOWEST_PRECEDENCE);
List<ViewResolver> resolvers = compositeResolver.getViewResolvers();
assertThat(resolvers.get(0).getClass()).isEqualTo(BeanNameViewResolver.class);
ViewResolver resolver = resolvers.get(1);
assertThat(resolver.getClass()).isEqualTo(InternalResourceViewResolver.class);
DirectFieldAccessor accessor = new DirectFieldAccessor(resolver);
assertThat(accessor.getPropertyValue("viewClass")).isEqualTo(InternalResourceView.class);
resolver = resolvers.get(2);
assertThat(resolver).isInstanceOf(FreeMarkerViewResolver.class);
accessor = new DirectFieldAccessor(resolver);
assertThat(accessor.getPropertyValue("prefix")).isEqualTo("freemarker-");
assertThat(accessor.getPropertyValue("suffix")).isEqualTo(".freemarker");
assertThat((String[]) accessor.getPropertyValue("viewNames")).isEqualTo(new String[] {"my*", "*Report"});
assertThat(accessor.getPropertyValue("cacheLimit")).isEqualTo(1024);
resolver = resolvers.get(3);
assertThat(resolver).isInstanceOf(GroovyMarkupViewResolver.class);
accessor = new DirectFieldAccessor(resolver);
assertThat(accessor.getPropertyValue("prefix")).isEqualTo("");
assertThat(accessor.getPropertyValue("suffix")).isEqualTo(".tpl");
assertThat(accessor.getPropertyValue("cacheLimit")).isEqualTo(1024);
resolver = resolvers.get(4);
assertThat(resolver).isInstanceOf(ScriptTemplateViewResolver.class);
accessor = new DirectFieldAccessor(resolver);
assertThat(accessor.getPropertyValue("prefix")).isEqualTo("");
assertThat(accessor.getPropertyValue("suffix")).isEqualTo("");
assertThat(accessor.getPropertyValue("cacheLimit")).isEqualTo(1024);
assertThat(resolvers.get(5).getClass()).isEqualTo(InternalResourceViewResolver.class);
assertThat(resolvers.get(6).getClass()).isEqualTo(InternalResourceViewResolver.class);
FreeMarkerConfigurer freeMarkerConfigurer = appContext.getBean(FreeMarkerConfigurer.class);
assertThat(freeMarkerConfigurer).isNotNull();
accessor = new DirectFieldAccessor(freeMarkerConfigurer);
assertThat((String[]) accessor.getPropertyValue("templateLoaderPaths")).isEqualTo(new String[] {"/", "/test"});
GroovyMarkupConfigurer groovyMarkupConfigurer = appContext.getBean(GroovyMarkupConfigurer.class);
assertThat(groovyMarkupConfigurer).isNotNull();
assertThat(groovyMarkupConfigurer.getResourceLoaderPath()).isEqualTo("/test");
assertThat(groovyMarkupConfigurer.isAutoIndent()).isTrue();
assertThat(groovyMarkupConfigurer.isCacheTemplates()).isFalse();
ScriptTemplateConfigurer scriptTemplateConfigurer = appContext.getBean(ScriptTemplateConfigurer.class);
assertThat(scriptTemplateConfigurer).isNotNull();
assertThat(scriptTemplateConfigurer.getRenderFunction()).isEqualTo("render");
assertThat(scriptTemplateConfigurer.getContentType()).isEqualTo(MediaType.TEXT_PLAIN_VALUE);
assertThat(scriptTemplateConfigurer.getCharset()).isEqualTo(StandardCharsets.ISO_8859_1);
assertThat(scriptTemplateConfigurer.getResourceLoaderPath()).isEqualTo("classpath:");
assertThat(scriptTemplateConfigurer.isSharedEngine()).isFalse();
String[] scripts = { "org/springframework/web/servlet/view/script/nashorn/render.js" };
accessor = new DirectFieldAccessor(scriptTemplateConfigurer);
assertThat((String[]) accessor.getPropertyValue("scripts")).isEqualTo(scripts);
}
@Test
void testViewResolutionWithContentNegotiation() {
loadBeanDefinitions("mvc-config-view-resolution-content-negotiation.xml");
ViewResolverComposite compositeResolver = this.appContext.getBean(ViewResolverComposite.class);
assertThat(compositeResolver).isNotNull();
assertThat(compositeResolver.getViewResolvers()).hasSize(1);
assertThat(compositeResolver.getOrder()).isEqualTo(Ordered.HIGHEST_PRECEDENCE);
List<ViewResolver> resolvers = compositeResolver.getViewResolvers();
assertThat(resolvers.get(0).getClass()).isEqualTo(ContentNegotiatingViewResolver.class);
ContentNegotiatingViewResolver cnvr = (ContentNegotiatingViewResolver) resolvers.get(0);
assertThat(cnvr.getViewResolvers()).hasSize(5);
assertThat(cnvr.getDefaultViews()).hasSize(1);
assertThat(cnvr.isUseNotAcceptableStatusCode()).isTrue();
String beanName = "contentNegotiationManager";
DirectFieldAccessor accessor = new DirectFieldAccessor(cnvr);
ContentNegotiationManager manager = (ContentNegotiationManager) accessor.getPropertyValue(beanName);
assertThat(manager).isNotNull();
assertThat(this.appContext.getBean(ContentNegotiationManager.class)).isSameAs(manager);
assertThat(this.appContext.getBean("mvcContentNegotiationManager")).isSameAs(manager);
}
@Test
void testViewResolutionWithOrderSet() {
loadBeanDefinitions("mvc-config-view-resolution-custom-order.xml");
ViewResolverComposite compositeResolver = this.appContext.getBean(ViewResolverComposite.class);
assertThat(compositeResolver).isNotNull();
assertThat(compositeResolver.getViewResolvers().size()).as("Actual: " + compositeResolver.getViewResolvers()).isEqualTo(1);
assertThat(compositeResolver.getOrder()).isEqualTo(123);
}
@Test
@SuppressWarnings("removal")
void testPathMatchingHandlerMappings() {
loadBeanDefinitions("mvc-config-path-matching-mappings.xml");
RequestMappingHandlerMapping requestMapping = appContext.getBean(RequestMappingHandlerMapping.class);
assertThat(requestMapping).isNotNull();
assertThat(requestMapping.getUrlPathHelper().getClass()).isEqualTo(TestPathHelper.class);
assertThat(requestMapping.getPathMatcher().getClass()).isEqualTo(TestPathMatcher.class);
SimpleUrlHandlerMapping viewController = appContext.getBean(VIEWCONTROLLER_BEAN_NAME, SimpleUrlHandlerMapping.class);
assertThat(viewController).isNotNull();
assertThat(viewController.getUrlPathHelper().getClass()).isEqualTo(TestPathHelper.class);
assertThat(viewController.getPathMatcher().getClass()).isEqualTo(TestPathMatcher.class);
appContext.getBeansOfType(SimpleUrlHandlerMapping.class).forEach((name, handlerMapping) -> {
assertThat(handlerMapping).isNotNull();
assertThat(handlerMapping.getUrlPathHelper().getClass()).as("path helper for %s", name).isEqualTo(TestPathHelper.class);
assertThat(handlerMapping.getPathMatcher().getClass()).as("path matcher for %s", name).isEqualTo(TestPathMatcher.class);
});
}
@Test
void testCorsMinimal() {
loadBeanDefinitions("mvc-config-cors-minimal.xml");
String[] beanNames = appContext.getBeanNamesForType(AbstractHandlerMapping.class);
assertThat(beanNames).hasSize(2);
for (String beanName : beanNames) {
AbstractHandlerMapping handlerMapping = (AbstractHandlerMapping) appContext.getBean(beanName);
assertThat(handlerMapping).isNotNull();
DirectFieldAccessor accessor = new DirectFieldAccessor(handlerMapping);
Map<String, CorsConfiguration> configs = ((UrlBasedCorsConfigurationSource) accessor
.getPropertyValue("corsConfigurationSource")).getCorsConfigurations();
assertThat(configs).isNotNull();
assertThat(configs).hasSize(1);
CorsConfiguration config = configs.get("/**");
assertThat(config).isNotNull();
assertThat(config.getAllowedOrigins().toArray()).isEqualTo(new String[]{"*"});
assertThat(config.getAllowedMethods().toArray()).isEqualTo(new String[]{"GET", "HEAD", "POST"});
assertThat(config.getAllowedHeaders().toArray()).isEqualTo(new String[]{"*"});
assertThat(config.getExposedHeaders()).isNull();
assertThat(config.getAllowCredentials()).isNull();
assertThat(config.getMaxAge()).isEqualTo(Long.valueOf(1800));
}
}
@Test
void testCors() {
loadBeanDefinitions("mvc-config-cors.xml");
String[] beanNames = appContext.getBeanNamesForType(AbstractHandlerMapping.class);
assertThat(beanNames).hasSize(2);
for (String beanName : beanNames) {
AbstractHandlerMapping handlerMapping = (AbstractHandlerMapping) appContext.getBean(beanName);
assertThat(handlerMapping).isNotNull();
DirectFieldAccessor accessor = new DirectFieldAccessor(handlerMapping);
Map<String, CorsConfiguration> configs = ((UrlBasedCorsConfigurationSource) accessor
.getPropertyValue("corsConfigurationSource")).getCorsConfigurations();
assertThat(configs).isNotNull();
assertThat(configs).hasSize(2);
CorsConfiguration config = configs.get("/api/**");
assertThat(config).isNotNull();
assertThat(config.getAllowedOrigins().toArray()).isEqualTo(new String[]{"https://domain1.com", "https://domain2.com"});
assertThat(config.getAllowedOriginPatterns().toArray()).isEqualTo(new String[]{"http://*.domain.com"});
assertThat(config.getAllowedMethods().toArray()).isEqualTo(new String[]{"GET", "PUT"});
assertThat(config.getAllowedHeaders().toArray()).isEqualTo(new String[]{"header1", "header2", "header3"});
assertThat(config.getExposedHeaders().toArray()).isEqualTo(new String[]{"header1", "header2"});
assertThat(config.getAllowCredentials()).isFalse();
assertThat(config.getMaxAge()).isEqualTo(Long.valueOf(123));
config = configs.get("/resources/**");
assertThat(config.getAllowedOrigins().toArray()).isEqualTo(new String[]{"https://domain1.com"});
assertThat(config.getAllowedMethods().toArray()).isEqualTo(new String[]{"GET", "HEAD", "POST"});
assertThat(config.getAllowedHeaders().toArray()).isEqualTo(new String[]{"*"});
assertThat(config.getExposedHeaders()).isNull();
assertThat(config.getAllowCredentials()).isNull();
assertThat(config.getMaxAge()).isEqualTo(1800L);
}
}
private void loadBeanDefinitions(String fileName) {
this.appContext.setConfigLocation("classpath:org/springframework/web/servlet/config/" + fileName);
this.appContext.refresh();
}
@DateTimeFormat(iso = ISO.DATE)
@Target(ElementType.PARAMETER)
@Retention(RetentionPolicy.RUNTIME)
public @ | MvcNamespaceTests |
java | spring-projects__spring-boot | module/spring-boot-data-mongodb-test/src/dockerTest/java/org/springframework/boot/data/mongodb/test/autoconfigure/TransactionalDataMongoTestIntegrationTests.java | {
"start": 2398,
"end": 2644
} | class ____ {
@Bean
MongoTransactionManager mongoTransactionManager(MongoDatabaseFactory dbFactory) {
return new MongoTransactionManager(dbFactory);
}
}
@TestConfiguration(proxyBeanMethods = false)
static | TransactionManagerConfiguration |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/samples/standalone/MultipartControllerTests.java | {
"start": 10052,
"end": 13014
} | class ____ {
@PostMapping("/multipartfile")
public String processMultipartFile(@RequestParam(required = false) MultipartFile file,
@RequestPart(required = false) Map<String, String> json) {
return "redirect:/index";
}
@PutMapping("/multipartfile-via-put")
public String processMultipartFileViaHttpPut(@RequestParam(required = false) MultipartFile file,
@RequestPart(required = false) Map<String, String> json) {
return processMultipartFile(file, json);
}
@PostMapping("/multipartfilearray")
public String processMultipartFileArray(@RequestParam(required = false) MultipartFile[] file,
@RequestPart(required = false) Map<String, String> json) throws IOException {
if (file != null && file.length > 0) {
byte[] content = file[0].getBytes();
assertThat(file[1].getBytes()).isEqualTo(content);
}
return "redirect:/index";
}
@PostMapping("/multipartfilelist")
public String processMultipartFileList(@RequestParam(required = false) List<MultipartFile> file,
@RequestPart(required = false) Map<String, String> json) throws IOException {
if (!CollectionUtils.isEmpty(file)) {
byte[] content = file.get(0).getBytes();
assertThat(file.get(1).getBytes()).isEqualTo(content);
}
return "redirect:/index";
}
@PostMapping("/optionalfile")
public String processOptionalFile(
@RequestParam Optional<MultipartFile> file, @RequestPart Map<String, String> json) {
return "redirect:/index";
}
@PostMapping("/optionalfilearray")
public String processOptionalFileArray(
@RequestParam Optional<MultipartFile[]> file, @RequestPart Map<String, String> json)
throws IOException {
if (file.isPresent()) {
byte[] content = file.get()[0].getBytes();
assertThat(file.get()[1].getBytes()).isEqualTo(content);
}
return "redirect:/index";
}
@PostMapping("/optionalfilelist")
public String processOptionalFileList(
@RequestParam Optional<List<MultipartFile>> file, @RequestPart Map<String, String> json)
throws IOException {
if (file.isPresent()) {
byte[] content = file.get().get(0).getBytes();
assertThat(file.get().get(1).getBytes()).isEqualTo(content);
}
return "redirect:/index";
}
@PostMapping("/part")
public String processPart(@RequestPart Part part, @RequestPart Map<String, String> json) {
return "redirect:/index";
}
@PostMapping("/json")
public String processMultipart(@RequestPart Map<String, String> json) {
return "redirect:/index";
}
@PostMapping("/multipartfilebinding")
public String processMultipartFileBean(
MultipartFileBean multipartFileBean, RedirectAttributes model, BindingResult bindingResult)
throws IOException {
if (!bindingResult.hasErrors()) {
MultipartFile file = multipartFileBean.getFile();
if (file != null) {
model.addAttribute("fileContent", file.getBytes());
}
}
return "redirect:/index";
}
}
private static | MultipartController |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/entitlement/bootstrap/TestEntitlementBootstrap.java | {
"start": 7855,
"end": 8095
} | class ____")
private static InputStream getStream(URL resource) throws IOException {
return resource.openStream();
}
private record TestPluginData(String pluginName, boolean isModular, boolean isExternalPlugin) {}
}
| loader |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableSkipLast.java | {
"start": 865,
"end": 1255
} | class ____<T> extends AbstractObservableWithUpstream<T, T> {
final int skip;
public ObservableSkipLast(ObservableSource<T> source, int skip) {
super(source);
this.skip = skip;
}
@Override
public void subscribeActual(Observer<? super T> observer) {
source.subscribe(new SkipLastObserver<>(observer, skip));
}
static final | ObservableSkipLast |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/functions/BroadcastVariableInitializer.java | {
"start": 1913,
"end": 3019
} | class ____ extends RichMapFunction<Long, String> {
*
* private Map<Long, String> map;
*
* public void open(OpenContext ctx) throws Exception {
* getRuntimeContext().getBroadcastVariableWithInitializer("mapvar",
* new BroadcastVariableInitializer<Tuple2<Long, String>, Map<Long, String>>() {
*
* public Map<Long, String> initializeBroadcastVariable(Iterable<Tuple2<Long, String>> data) {
* Map<Long, String> map = new HashMap<>();
*
* for (Tuple2<Long, String> t : data) {
* map.put(t.f0, t.f1);
* }
*
* return map;
* }
* });
* }
*
* public String map(Long value) {
* // replace the long by the String, based on the map
* return map.get(value);
* }
* }
*
* }</pre>
*
* @param <T> The type of the elements in the list of the original untransformed broadcast variable.
* @param <O> The type of the transformed broadcast variable.
*/
@Public
@FunctionalInterface
public | MyFunction |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/SetPropertyProcessor.java | {
"start": 1221,
"end": 3077
} | class ____ extends BaseProcessorSupport implements Traceable, IdAware, RouteIdAware {
private String id;
private String routeId;
private final Expression propertyName;
private final Expression expression;
public SetPropertyProcessor(Expression propertyName, Expression expression) {
this.propertyName = propertyName;
this.expression = expression;
ObjectHelper.notNull(propertyName, "propertyName");
ObjectHelper.notNull(expression, "expression");
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
try {
Object newProperty = expression.evaluate(exchange, Object.class);
if (exchange.getException() != null) {
// the expression threw an exception so we should break-out
callback.done(true);
return true;
}
String key = propertyName.evaluate(exchange, String.class);
exchange.setProperty(key, newProperty);
} catch (Exception e) {
exchange.setException(e);
}
callback.done(true);
return true;
}
@Override
public String toString() {
return id;
}
@Override
public String getTraceLabel() {
return "setProperty[" + propertyName + ", " + expression + "]";
}
@Override
public String getId() {
return id;
}
@Override
public void setId(String id) {
this.id = id;
}
@Override
public String getRouteId() {
return routeId;
}
@Override
public void setRouteId(String routeId) {
this.routeId = routeId;
}
public String getPropertyName() {
return propertyName.toString();
}
public Expression getExpression() {
return expression;
}
}
| SetPropertyProcessor |
java | bumptech__glide | library/test/src/test/java/com/bumptech/glide/load/engine/prefill/BitmapPreFillerTest.java | {
"start": 1280,
"end": 11957
} | class ____ {
private static final int DEFAULT_BITMAP_WIDTH = 100;
private static final int DEFAULT_BITMAP_HEIGHT = 50;
private static final int BITMAPS_IN_POOL = 10;
private static final int BITMAPS_IN_CACHE = 10;
private final Bitmap.Config defaultBitmapConfig = PreFillType.DEFAULT_CONFIG;
private final Bitmap defaultBitmap =
Bitmap.createBitmap(DEFAULT_BITMAP_WIDTH, DEFAULT_BITMAP_HEIGHT, defaultBitmapConfig);
private final long defaultBitmapSize = Util.getBitmapByteSize(defaultBitmap);
private final long poolSize = BITMAPS_IN_CACHE * defaultBitmapSize;
private final long cacheSize = BITMAPS_IN_POOL * defaultBitmapSize;
@Mock private BitmapPool pool;
@Mock private MemoryCache cache;
private BitmapPreFiller bitmapPreFiller;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
when(pool.getMaxSize()).thenReturn(poolSize);
when(pool.getDirty(anyInt(), anyInt(), any(Bitmap.Config.class)))
.thenAnswer(new CreateBitmap());
when(cache.getMaxSize()).thenReturn(cacheSize);
bitmapPreFiller = new BitmapPreFiller(cache, pool, DecodeFormat.DEFAULT);
}
@Test
public void testAllocationOrderContainsEnoughSizesToFillPoolAndMemoryCache() {
PreFillQueue allocationOrder =
bitmapPreFiller.generateAllocationOrder(
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH, DEFAULT_BITMAP_HEIGHT)
.setConfig(defaultBitmapConfig)
.build());
assertEquals(BITMAPS_IN_POOL + BITMAPS_IN_CACHE, allocationOrder.getSize());
}
@Test
public void testAllocationOrderThatDoesNotFitExactlyIntoGivenSizeRoundsDown() {
PreFillType[] sizes =
new PreFillType[] {
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH, DEFAULT_BITMAP_HEIGHT)
.setConfig(defaultBitmapConfig)
.build(),
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH / 2, DEFAULT_BITMAP_HEIGHT)
.setConfig(defaultBitmapConfig)
.build(),
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH, DEFAULT_BITMAP_HEIGHT / 2)
.setConfig(defaultBitmapConfig)
.build(),
};
PreFillQueue allocationOrder = bitmapPreFiller.generateAllocationOrder(sizes);
int byteSize = 0;
while (!allocationOrder.isEmpty()) {
PreFillType current = allocationOrder.remove();
byteSize +=
Util.getBitmapByteSize(current.getWidth(), current.getHeight(), current.getConfig());
}
int expectedSize = 0;
long maxSize = poolSize + cacheSize;
for (PreFillType current : sizes) {
int currentSize =
Util.getBitmapByteSize(current.getWidth(), current.getHeight(), current.getConfig());
// See https://errorprone.info/bugpattern/NarrowingCompoundAssignment.
expectedSize = (int) (expectedSize + (currentSize * (maxSize / (3 * currentSize))));
}
assertEquals(expectedSize, byteSize);
}
@Test
public void testAllocationOrderDoesNotOverFillWithMultipleSizes() {
PreFillQueue allocationOrder =
bitmapPreFiller.generateAllocationOrder(
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH, DEFAULT_BITMAP_HEIGHT)
.setConfig(defaultBitmapConfig)
.build(),
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH / 2, DEFAULT_BITMAP_HEIGHT)
.setConfig(defaultBitmapConfig)
.build(),
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH, DEFAULT_BITMAP_HEIGHT / 2)
.setConfig(defaultBitmapConfig)
.build());
long byteSize = 0;
while (!allocationOrder.isEmpty()) {
PreFillType current = allocationOrder.remove();
byteSize +=
Util.getBitmapByteSize(current.getWidth(), current.getHeight(), current.getConfig());
}
assertThat(byteSize).isIn(Range.atMost(poolSize + cacheSize));
}
@Test
public void testAllocationOrderDoesNotOverFillWithMultipleSizesAndWeights() {
PreFillQueue allocationOrder =
bitmapPreFiller.generateAllocationOrder(
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH, DEFAULT_BITMAP_HEIGHT)
.setConfig(defaultBitmapConfig)
.setWeight(4)
.build(),
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH / 2, DEFAULT_BITMAP_HEIGHT)
.setConfig(defaultBitmapConfig)
.build(),
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH, DEFAULT_BITMAP_HEIGHT / 3)
.setConfig(defaultBitmapConfig)
.setWeight(3)
.build());
long byteSize = 0;
while (!allocationOrder.isEmpty()) {
PreFillType current = allocationOrder.remove();
byteSize +=
Util.getBitmapByteSize(current.getWidth(), current.getHeight(), current.getConfig());
}
assertThat(byteSize).isIn(Range.atMost(poolSize + cacheSize));
}
@Test
public void testAllocationOrderContainsSingleSizeIfSingleSizeIsProvided() {
PreFillQueue allocationOrder =
bitmapPreFiller.generateAllocationOrder(
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH, DEFAULT_BITMAP_HEIGHT)
.setConfig(defaultBitmapConfig)
.build());
while (!allocationOrder.isEmpty()) {
PreFillType size = allocationOrder.remove();
assertEquals(DEFAULT_BITMAP_WIDTH, size.getWidth());
assertEquals(DEFAULT_BITMAP_HEIGHT, size.getHeight());
assertEquals(defaultBitmapConfig, size.getConfig());
}
}
@Test
public void testAllocationOrderSplitsEvenlyBetweenEqualSizesWithEqualWeights() {
PreFillType smallWidth =
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH / 2, DEFAULT_BITMAP_HEIGHT)
.setConfig(defaultBitmapConfig)
.build();
PreFillType smallHeight =
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH, DEFAULT_BITMAP_HEIGHT / 2)
.setConfig(defaultBitmapConfig)
.build();
PreFillQueue allocationOrder = bitmapPreFiller.generateAllocationOrder(smallWidth, smallHeight);
int numSmallWidth = 0;
int numSmallHeight = 0;
while (!allocationOrder.isEmpty()) {
PreFillType current = allocationOrder.remove();
if (smallWidth.equals(current)) {
numSmallWidth++;
} else if (smallHeight.equals(current)) {
numSmallHeight++;
} else {
fail("Unexpected size, size: " + current);
}
}
assertEquals(numSmallWidth, numSmallHeight);
}
@Test
public void testAllocationOrderSplitsByteSizeEvenlyBetweenUnEqualSizesWithEqualWeights() {
PreFillType smallWidth =
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH / 2, DEFAULT_BITMAP_HEIGHT)
.setConfig(defaultBitmapConfig)
.build();
PreFillType normal =
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH, DEFAULT_BITMAP_HEIGHT)
.setConfig(defaultBitmapConfig)
.build();
PreFillQueue allocationOrder = bitmapPreFiller.generateAllocationOrder(smallWidth, normal);
int numSmallWidth = 0;
int numNormal = 0;
while (!allocationOrder.isEmpty()) {
PreFillType current = allocationOrder.remove();
if (smallWidth.equals(current)) {
numSmallWidth++;
} else if (normal.equals(current)) {
numNormal++;
} else {
fail("Unexpected size, size: " + current);
}
}
assertEquals(2 * numNormal, numSmallWidth);
}
@Test
public void testAllocationOrderSplitsByteSizeUnevenlyBetweenEqualSizesWithUnequalWeights() {
PreFillType doubleWeight =
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH / 2, DEFAULT_BITMAP_HEIGHT)
.setConfig(defaultBitmapConfig)
.setWeight(2)
.build();
PreFillType normal =
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH, DEFAULT_BITMAP_HEIGHT / 2)
.setConfig(defaultBitmapConfig)
.build();
PreFillQueue allocationOrder = bitmapPreFiller.generateAllocationOrder(doubleWeight, normal);
int numDoubleWeight = 0;
int numNormal = 0;
while (!allocationOrder.isEmpty()) {
PreFillType current = allocationOrder.remove();
if (doubleWeight.equals(current)) {
numDoubleWeight++;
} else if (normal.equals(current)) {
numNormal++;
} else {
fail("Unexpected size, size: " + current);
}
}
assertEquals(2 * numNormal, numDoubleWeight);
}
@Test
public void testAllocationOrderRoundRobinsDifferentSizes() {
when(pool.getMaxSize()).thenReturn(defaultBitmapSize);
when(cache.getMaxSize()).thenReturn(defaultBitmapSize);
PreFillType smallWidth =
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH / 2, DEFAULT_BITMAP_HEIGHT)
.setConfig(defaultBitmapConfig)
.build();
PreFillType smallHeight =
new PreFillType.Builder(DEFAULT_BITMAP_WIDTH, DEFAULT_BITMAP_HEIGHT / 2)
.setConfig(defaultBitmapConfig)
.build();
PreFillQueue allocationOrder = bitmapPreFiller.generateAllocationOrder(smallWidth, smallHeight);
List<PreFillType> attributes = new ArrayList<>();
while (!allocationOrder.isEmpty()) {
attributes.add(allocationOrder.remove());
}
// Either width, height, width, height or height, width, height, width.
try {
assertThat(attributes)
.containsExactly(smallWidth, smallHeight, smallWidth, smallHeight)
.inOrder();
} catch (AssertionError e) {
assertThat(attributes)
.containsExactly(smallHeight, smallWidth, smallHeight, smallWidth)
.inOrder();
}
}
@Test
@SuppressWarnings("deprecation")
public void testSetsConfigOnBuildersToDefaultIfNotSet() {
PreFillType.Builder builder = mock(PreFillType.Builder.class);
when(builder.build())
.thenReturn(new PreFillType.Builder(100).setConfig(Bitmap.Config.RGB_565).build());
bitmapPreFiller.preFill(builder);
InOrder order = inOrder(builder);
order
.verify(builder)
.setConfig(
DecodeFormat.DEFAULT == DecodeFormat.PREFER_ARGB_8888
? Bitmap.Config.ARGB_8888
: Bitmap.Config.RGB_565);
order.verify(builder).build();
}
@Test
public void testDoesNotSetConfigOnBuildersIfConfigIsAlreadySet() {
PreFillType.Builder builder = mock(PreFillType.Builder.class);
when(builder.getConfig()).thenReturn(Bitmap.Config.ARGB_4444);
when(builder.build())
.thenReturn(new PreFillType.Builder(100).setConfig(Bitmap.Config.ARGB_4444).build());
bitmapPreFiller.preFill(builder);
verify(builder, never()).setConfig(any(Bitmap.Config.class));
}
}
| BitmapPreFillerTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/records/RecordBasicsTest.java | {
"start": 653,
"end": 11577
} | class ____ extends DatabindTestUtil
{
record EmptyRecord() { }
record SimpleRecord(int id, String name) { }
record RecordOfRecord(SimpleRecord record) { }
record RecordWithRename(int id, @JsonProperty("rename")String name) { }
record RecordWithHeaderInject(int id, @JacksonInject String name) { }
record RecordWithConstructorInject(int id, String name) {
RecordWithConstructorInject(int id, @JacksonInject String name) {
this.id = id;
this.name = name;
}
}
// [databind#2992]
@JsonNaming(PropertyNamingStrategies.SnakeCaseStrategy.class)
record SnakeRecord(String myId, String myValue){}
record RecordWithJsonDeserialize(int id, @JsonDeserialize(converter = StringTrimmer.class) String name) { }
record RecordSingleWriteOnly(@JsonProperty(access = JsonProperty.Access.WRITE_ONLY) int id) { }
record RecordSomeWriteOnly(
@JsonProperty(access = JsonProperty.Access.WRITE_ONLY) int id,
@JsonProperty(access = JsonProperty.Access.WRITE_ONLY) String name,
String email) {
}
record RecordAllWriteOnly(
@JsonProperty(access = JsonProperty.Access.WRITE_ONLY) int id,
@JsonProperty(access = JsonProperty.Access.WRITE_ONLY) String name,
@JsonProperty(access = JsonProperty.Access.WRITE_ONLY) String email) {
}
private final ObjectMapper MAPPER = newJsonMapper();
/*
/**********************************************************************
/* Test methods, Record type introspection
/**********************************************************************
*/
@Test
public void testClassUtil() {
assertFalse(ClassUtil.isRecordType(getClass()));
assertTrue(ClassUtil.isRecordType(SimpleRecord.class));
assertTrue(ClassUtil.isRecordType(RecordOfRecord.class));
assertTrue(ClassUtil.isRecordType(RecordWithRename.class));
}
@Test
public void testRecordJavaType() {
assertFalse(MAPPER.constructType(getClass()).isRecordType());
assertTrue(MAPPER.constructType(SimpleRecord.class).isRecordType());
assertTrue(MAPPER.constructType(RecordOfRecord.class).isRecordType());
assertTrue(MAPPER.constructType(RecordWithRename.class).isRecordType());
}
/*
/**********************************************************************
/* Test methods, default reading/writing Record values
/**********************************************************************
*/
@Test
public void testSerializeSimpleRecord() throws Exception {
String json = MAPPER.writeValueAsString(new SimpleRecord(123, "Bob"));
final Object EXP = map("id", Integer.valueOf(123), "name", "Bob");
assertEquals(EXP, MAPPER.readValue(json, Object.class));
}
@Test
public void testDeserializeSimpleRecord() throws Exception {
assertEquals(new SimpleRecord(123, "Bob"),
MAPPER.readValue("{\"id\":123,\"name\":\"Bob\"}", SimpleRecord.class));
}
@Test
public void testSerializeEmptyRecord() throws Exception {
assertEquals("{}", MAPPER.writeValueAsString(new EmptyRecord()));
}
@Test
public void testDeserializeEmptyRecord() throws Exception {
assertEquals(new EmptyRecord(),
MAPPER.readValue("{}", EmptyRecord.class));
}
@Test
public void testSerializeRecordOfRecord() throws Exception {
RecordOfRecord record = new RecordOfRecord(new SimpleRecord(123, "Bob"));
String json = MAPPER.writeValueAsString(record);
final Object EXP = Collections.singletonMap("record",
map("id", Integer.valueOf(123), "name", "Bob"));
assertEquals(EXP, MAPPER.readValue(json, Object.class));
}
@Test
public void testDeserializeRecordOfRecord() throws Exception {
assertEquals(new RecordOfRecord(new SimpleRecord(123, "Bob")),
MAPPER.readValue("{\"record\":{\"id\":123,\"name\":\"Bob\"}}",
RecordOfRecord.class));
}
/*
/**********************************************************************
/* Test methods, reading/writing Record values with different config
/**********************************************************************
*/
@Test
public void testSerializeSimpleRecord_DisableAnnotationIntrospector() throws Exception {
SimpleRecord record = new SimpleRecord(123, "Bob");
JsonMapper mapper = JsonMapper.builder()
.configure(MapperFeature.USE_ANNOTATIONS, false)
.build();
String json = mapper.writeValueAsString(record);
assertEquals("{\"id\":123,\"name\":\"Bob\"}", json);
}
@Test
public void testDeserializeSimpleRecord_DisableAnnotationIntrospector() throws Exception {
JsonMapper mapper = JsonMapper.builder()
.configure(MapperFeature.USE_ANNOTATIONS, false)
.build();
SimpleRecord value = mapper.readValue("{\"id\":123,\"name\":\"Bob\"}", SimpleRecord.class);
assertEquals(new SimpleRecord(123, "Bob"), value);
}
/*
/**********************************************************************
/* Test methods, renames, injects
/**********************************************************************
*/
@Test
public void testSerializeJsonRename() throws Exception {
String json = MAPPER.writeValueAsString(new RecordWithRename(123, "Bob"));
final Object EXP = map("id", Integer.valueOf(123), "rename", "Bob");
assertEquals(EXP, MAPPER.readValue(json, Object.class));
}
@Test
public void testDeserializeJsonRename() throws Exception {
RecordWithRename value = MAPPER.readValue("{\"id\":123,\"rename\":\"Bob\"}",
RecordWithRename.class);
assertEquals(new RecordWithRename(123, "Bob"), value);
}
// Confirmation of fix of [databind#4218]
@Test
public void testDeserializeHeaderInjectRecord4218() throws Exception {
ObjectReader reader = MAPPER.readerFor(RecordWithHeaderInject.class)
.with(new InjectableValues.Std().addValue(String.class, "Bob"));
assertNotNull(reader.readValue("{\"id\":123}"));
}
@Test
public void testDeserializeConstructorInjectRecord4218() throws Exception {
ObjectReader reader = MAPPER.readerFor(RecordWithConstructorInject.class)
.with(new InjectableValues.Std().addValue(String.class, "Bob"));
RecordWithConstructorInject value = reader.readValue("{\"id\":123}");
assertEquals(new RecordWithConstructorInject(123, "Bob"), value);
}
/*
/**********************************************************************
/* Test methods, naming strategy
/**********************************************************************
*/
// [databind#2992]
@Test
public void testNamingStrategy() throws Exception
{
SnakeRecord input = new SnakeRecord("123", "value");
String json = MAPPER.writeValueAsString(input);
assertEquals("{\"my_id\":\"123\",\"my_value\":\"value\"}", json);
SnakeRecord output = MAPPER.readValue(json, SnakeRecord.class);
assertEquals(input, output);
}
/*
/**********************************************************************
/* Test methods, JsonDeserialize
/**********************************************************************
*/
@Test
public void testDeserializeJsonDeserializeRecord() throws Exception {
RecordWithJsonDeserialize value = MAPPER.readValue("{\"id\":123,\"name\":\" Bob \"}", RecordWithJsonDeserialize.class);
assertEquals(new RecordWithJsonDeserialize(123, "Bob"), value);
}
/*
/**********************************************************************
/* Test methods, JsonProperty(access=WRITE_ONLY)
/**********************************************************************
*/
@Test
public void testSerialize_SingleWriteOnlyParameter() throws Exception {
String json = MAPPER.writeValueAsString(new RecordSingleWriteOnly(123));
assertEquals("{}", json);
}
// [databind#3897]
@Test
public void testDeserialize_SingleWriteOnlyParameter() throws Exception {
RecordSingleWriteOnly value = MAPPER.readValue("{\"id\":123}", RecordSingleWriteOnly.class);
assertEquals(new RecordSingleWriteOnly(123), value);
}
@Test
public void testSerialize_SomeWriteOnlyParameter() throws Exception {
String json = MAPPER.writeValueAsString(new RecordSomeWriteOnly(123, "Bob", "bob@example.com"));
assertEquals("{\"email\":\"bob@example.com\"}", json);
}
@Test
public void testDeserialize_SomeWriteOnlyParameter() throws Exception {
RecordSomeWriteOnly value = MAPPER.readValue(
"{\"id\":123,\"name\":\"Bob\",\"email\":\"bob@example.com\"}",
RecordSomeWriteOnly.class);
assertEquals(new RecordSomeWriteOnly(123, "Bob", "bob@example.com"), value);
}
@Test
public void testSerialize_AllWriteOnlyParameter() throws Exception {
String json = MAPPER.writeValueAsString(new RecordAllWriteOnly(123, "Bob", "bob@example.com"));
assertEquals("{}", json);
}
@Test
public void testDeserialize_AllWriteOnlyParameter() throws Exception {
RecordAllWriteOnly value = MAPPER.readValue(
"{\"id\":123,\"name\":\"Bob\",\"email\":\"bob@example.com\"}",
RecordAllWriteOnly.class);
assertEquals(new RecordAllWriteOnly(123, "Bob", "bob@example.com"), value);
}
/*
/**********************************************************************
/* Test method(s), MapperFeature.REQUIRE_SETTERS_FOR_GETTERS
/**********************************************************************
*/
// [databind#4678]
@Test
public void testSerializeWithSettersForGetters() throws Exception {
ObjectMapper mapperWithSetters = JsonMapper.builder()
.configure(MapperFeature.REQUIRE_SETTERS_FOR_GETTERS, true)
.build();
var input = new SimpleRecord(123, "Bob");
assertEquals(MAPPER.writeValueAsString(input),
mapperWithSetters.writeValueAsString(input));
}
/*
/**********************************************************************
/* Internal helper methods
/**********************************************************************
*/
private Map<String,Object> map(String key1, Object value1,
String key2, Object value2) {
final Map<String, Object> result = new LinkedHashMap<>();
result.put(key1, value1);
result.put(key2, value2);
return result;
}
public static | RecordBasicsTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/fetch/batch/SimpleBatchFetchBaselineTests.java | {
"start": 1331,
"end": 3552
} | class ____ {
@Test
public void baselineTest(SessionFactoryScope scope) {
final SQLStatementInspector statementInspector = scope.getCollectingStatementInspector();
statementInspector.clear();
scope.inTransaction( (session) -> {
final EmployeeGroup group1 = session.getReference( EmployeeGroup.class, 1 );
final EmployeeGroup group2 = session.getReference( EmployeeGroup.class, 2 );
assertThat( Hibernate.isInitialized( group1 ) ).isFalse();
assertThat( Hibernate.isInitialized( group2 ) ).isFalse();
assertThat( statementInspector.getSqlQueries() ).hasSize( 0 );
Hibernate.initialize( group1 );
assertThat( Hibernate.isInitialized( group1 ) ).isTrue();
assertThat( Hibernate.isInitialized( group2 ) ).isFalse();
assertThat( statementInspector.getSqlQueries() ).hasSize( 1 );
assertThat( Hibernate.isInitialized( group1.getEmployees() ) ).isFalse();
Hibernate.initialize( group1.getEmployees() );
assertThat( Hibernate.isInitialized( group1 ) ).isTrue();
assertThat( Hibernate.isInitialized( group2 ) ).isFalse();
assertThat( statementInspector.getSqlQueries() ).hasSize( 2 );
assertThat( Hibernate.isInitialized( group1.getEmployees() ) ).isTrue();
assertThat( ( (PersistentCollection) group1.getEmployees() ).getOwner() ).isNotInstanceOf( HibernateProxy.class );
} );
}
@BeforeEach
public void prepareTestData(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final EmployeeGroup group1 = new EmployeeGroup(1, "QA");
final Employee employee1 = new Employee(100, "Jane");
final Employee employee2 = new Employee(101, "Jeff");
group1.addEmployee(employee1);
group1.addEmployee(employee2);
EmployeeGroup group2 = new EmployeeGroup(2, "R&D");
Employee employee3 = new Employee(200, "Joan");
Employee employee4 = new Employee(201, "John");
group2.addEmployee(employee3);
group2.addEmployee( employee4 );
session.persist( group1 );
session.persist( group2 );
} );
}
@AfterEach
public void dropTestData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Entity( name="EmployeeGroup")
@Table(name = "EmployeeGroup")
public static | SimpleBatchFetchBaselineTests |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/HttpSecurityHeadersTests.java | {
"start": 2290,
"end": 3484
} | class ____ {
@Autowired
WebApplicationContext wac;
@Autowired
Filter springSecurityFilterChain;
MockMvc mockMvc;
@BeforeEach
public void setup() {
this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).addFilters(this.springSecurityFilterChain).build();
}
// gh-2953
// gh-3975
@Test
public void headerWhenSpringMvcResourceThenCacheRelatedHeadersReset() throws Exception {
// @formatter:off
this.mockMvc.perform(get("/resources/file.js"))
.andExpect(status().isOk())
.andExpect(header().string(HttpHeaders.CACHE_CONTROL, "max-age=12345"))
.andExpect(header().doesNotExist(HttpHeaders.PRAGMA))
.andExpect(header().doesNotExist(HttpHeaders.EXPIRES));
// @formatter:on
}
@Test
public void headerWhenNotSpringResourceThenCacheRelatedHeadersSet() throws Exception {
// @formatter:off
this.mockMvc.perform(get("/notresource"))
.andExpect(header().string(HttpHeaders.CACHE_CONTROL, "no-cache, no-store, max-age=0, must-revalidate"))
.andExpect(header().string(HttpHeaders.PRAGMA, "no-cache"))
.andExpect(header().string(HttpHeaders.EXPIRES, "0"));
// @formatter:on
}
@Configuration
@EnableWebSecurity
static | HttpSecurityHeadersTests |
java | greenrobot__EventBus | EventBus/src/org/greenrobot/eventbus/BackgroundPoster.java | {
"start": 779,
"end": 2418
} | class ____ implements Runnable, Poster {
private final PendingPostQueue queue;
private final EventBus eventBus;
private volatile boolean executorRunning;
BackgroundPoster(EventBus eventBus) {
this.eventBus = eventBus;
queue = new PendingPostQueue();
}
public void enqueue(Subscription subscription, Object event) {
PendingPost pendingPost = PendingPost.obtainPendingPost(subscription, event);
synchronized (this) {
queue.enqueue(pendingPost);
if (!executorRunning) {
executorRunning = true;
eventBus.getExecutorService().execute(this);
}
}
}
@Override
public void run() {
try {
try {
while (true) {
PendingPost pendingPost = queue.poll(1000);
if (pendingPost == null) {
synchronized (this) {
// Check again, this time in synchronized
pendingPost = queue.poll();
if (pendingPost == null) {
executorRunning = false;
return;
}
}
}
eventBus.invokeSubscriber(pendingPost);
}
} catch (InterruptedException e) {
eventBus.getLogger().log(Level.WARNING, Thread.currentThread().getName() + " was interrupted", e);
}
} finally {
executorRunning = false;
}
}
}
| BackgroundPoster |
java | resilience4j__resilience4j | resilience4j-commons-configuration/src/main/java/io/github/resilience4j/commons/configuration/util/ClassParseUtil.java | {
"start": 1871,
"end": 2643
} | class ____ assignable to the target type.
* @param className - String representation of class
* @param targetClassType - Target Class type
* @return - Converted Class type
*/
public static <T> Class<? extends T> convertStringToClassType(String className, Class<? extends T> targetClassType) {
try {
Class<?> clazz = Class.forName(className);
if (!targetClassType.isAssignableFrom(clazz)){
throw new ConfigParseException("Class " + className + " is not a subclass of " + targetClassType.getName());
}
return (Class<? extends T>) clazz;
} catch (ClassNotFoundException e) {
throw new ConfigParseException("Class not found: " + className, e);
}
}
}
| is |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/observers/ifexists/DependentReceptionIfExistsTest.java | {
"start": 1060,
"end": 1195
} | class ____ {
void observeString(@Observes(notifyObserver = Reception.IF_EXISTS) String value) {
}
}
}
| DependentObserver |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java | {
"start": 460,
"end": 779
} | class ____ extends ActionType<ClearRolesCacheResponse> {
public static final ClearRolesCacheAction INSTANCE = new ClearRolesCacheAction();
public static final String NAME = "cluster:admin/xpack/security/roles/cache/clear";
protected ClearRolesCacheAction() {
super(NAME);
}
}
| ClearRolesCacheAction |
java | apache__kafka | clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/consumer/PlaintextConsumerCommitTest.java | {
"start": 3630,
"end": 26159
} | class ____ {
public static final int BROKER_COUNT = 3;
public static final String OFFSETS_TOPIC_PARTITIONS = "1";
public static final String OFFSETS_TOPIC_REPLICATION = "3";
private final ClusterInstance cluster;
private final String topic = "topic";
private final TopicPartition tp = new TopicPartition(topic, 0);
private final TopicPartition tp1 = new TopicPartition(topic, 1);
public PlaintextConsumerCommitTest(ClusterInstance clusterInstance) {
this.cluster = clusterInstance;
}
@BeforeEach
public void setup() throws InterruptedException {
cluster.createTopic(topic, 2, (short) BROKER_COUNT);
}
@ClusterTest
public void testClassicConsumerAutoCommitOnClose() throws InterruptedException {
testAutoCommitOnClose(GroupProtocol.CLASSIC);
}
@ClusterTest
public void testAsyncConsumerAutoCommitOnClose() throws InterruptedException {
testAutoCommitOnClose(GroupProtocol.CONSUMER);
}
private void testAutoCommitOnClose(GroupProtocol groupProtocol) throws InterruptedException {
try (var consumer = createConsumer(groupProtocol, true)) {
sendRecords(cluster, tp, 1000);
consumer.subscribe(List.of(topic));
awaitAssignment(consumer, Set.of(tp, tp1));
// should auto-commit sought positions before closing
consumer.seek(tp, 300);
consumer.seek(tp1, 500);
}
// now we should see the committed positions from another consumer
try (var anotherConsumer = createConsumer(groupProtocol, true)) {
assertEquals(300, anotherConsumer.committed(Set.of(tp)).get(tp).offset());
assertEquals(500, anotherConsumer.committed(Set.of(tp1)).get(tp1).offset());
}
}
@ClusterTest
public void testClassicConsumerAutoCommitOnCloseAfterWakeup() throws InterruptedException {
testAutoCommitOnCloseAfterWakeup(GroupProtocol.CLASSIC);
}
@ClusterTest
public void testAsyncConsumerAutoCommitOnCloseAfterWakeup() throws InterruptedException {
testAutoCommitOnCloseAfterWakeup(GroupProtocol.CONSUMER);
}
private void testAutoCommitOnCloseAfterWakeup(GroupProtocol groupProtocol) throws InterruptedException {
try (var consumer = createConsumer(groupProtocol, true)) {
sendRecords(cluster, tp, 1000);
consumer.subscribe(List.of(topic));
awaitAssignment(consumer, Set.of(tp, tp1));
// should auto-commit sought positions before closing
consumer.seek(tp, 300);
consumer.seek(tp1, 500);
// wakeup the consumer before closing to simulate trying to break a poll
// loop from another thread
consumer.wakeup();
}
// now we should see the committed positions from another consumer
try (var anotherConsumer = createConsumer(groupProtocol, true)) {
assertEquals(300, anotherConsumer.committed(Set.of(tp)).get(tp).offset());
assertEquals(500, anotherConsumer.committed(Set.of(tp1)).get(tp1).offset());
}
}
@ClusterTest
public void testClassicConsumerCommitMetadata() throws InterruptedException {
testCommitMetadata(GroupProtocol.CLASSIC);
}
@ClusterTest
public void testAsyncConsumerCommitMetadata() throws InterruptedException {
testCommitMetadata(GroupProtocol.CONSUMER);
}
private void testCommitMetadata(GroupProtocol groupProtocol) throws InterruptedException {
try (var consumer = createConsumer(groupProtocol, true)) {
consumer.assign(List.of(tp));
// sync commit
var syncMetadata = new OffsetAndMetadata(5, Optional.of(15), "foo");
consumer.commitSync(Map.of(tp, syncMetadata));
assertEquals(syncMetadata, consumer.committed(Set.of(tp)).get(tp));
// async commit
var asyncMetadata = new OffsetAndMetadata(10, "bar");
sendAndAwaitAsyncCommit(consumer, Map.of(tp, asyncMetadata));
assertEquals(asyncMetadata, consumer.committed(Set.of(tp)).get(tp));
// handle null metadata
var nullMetadata = new OffsetAndMetadata(5, null);
consumer.commitSync(Map.of(tp, nullMetadata));
assertEquals(nullMetadata, consumer.committed(Set.of(tp)).get(tp));
}
}
@ClusterTest
public void testClassicConsumerAsyncCommit() throws InterruptedException {
testAsyncCommit(GroupProtocol.CLASSIC);
}
@ClusterTest
public void testAsyncConsumerAsyncCommit() throws InterruptedException {
testAsyncCommit(GroupProtocol.CONSUMER);
}
private void testAsyncCommit(GroupProtocol groupProtocol) throws InterruptedException {
// Ensure the __consumer_offsets topic is created to prevent transient issues,
// such as RetriableCommitFailedException during async offset commits.
cluster.createTopic(
Topic.GROUP_METADATA_TOPIC_NAME,
Integer.parseInt(OFFSETS_TOPIC_PARTITIONS),
Short.parseShort(OFFSETS_TOPIC_REPLICATION)
);
try (var consumer = createConsumer(groupProtocol, false)) {
consumer.assign(List.of(tp));
var callback = new CountConsumerCommitCallback();
var count = 5;
for (var i = 1; i <= count; i++)
consumer.commitAsync(Map.of(tp, new OffsetAndMetadata(i)), callback);
ClientsTestUtils.pollUntilTrue(
consumer,
() -> callback.successCount >= count || callback.lastError.isPresent(),
"Failed to observe commit callback before timeout"
);
assertEquals(Optional.empty(), callback.lastError);
assertEquals(count, callback.successCount);
assertEquals(new OffsetAndMetadata(count), consumer.committed(Set.of(tp)).get(tp));
}
}
@ClusterTest
public void testClassicConsumerAutoCommitIntercept() throws InterruptedException {
testAutoCommitIntercept(GroupProtocol.CLASSIC);
}
@ClusterTest
public void testAsyncConsumerAutoCommitIntercept() throws InterruptedException {
testAutoCommitIntercept(GroupProtocol.CONSUMER);
}
private void testAutoCommitIntercept(GroupProtocol groupProtocol) throws InterruptedException {
var topic2 = "topic2";
cluster.createTopic(topic2, 2, (short) 3);
var numRecords = 100;
try (var producer = cluster.producer();
// create consumer with interceptor
Consumer<byte[], byte[]> consumer = cluster.consumer(Map.of(
GROUP_PROTOCOL_CONFIG, groupProtocol.name().toLowerCase(Locale.ROOT),
ENABLE_AUTO_COMMIT_CONFIG, "true",
INTERCEPTOR_CLASSES_CONFIG, "org.apache.kafka.test.MockConsumerInterceptor"
))
) {
// produce records
for (var i = 0; i < numRecords; i++) {
producer.send(new ProducerRecord<>(tp.topic(), tp.partition(), ("key " + i).getBytes(), ("value " + i).getBytes()));
}
var rebalanceListener = new ConsumerRebalanceListener() {
@Override
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
// keep partitions paused in this test so that we can verify the commits based on specific seeks
consumer.pause(partitions);
}
@Override
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
// No-op
}
};
changeConsumerSubscriptionAndValidateAssignment(
consumer,
List.of(topic),
Set.of(tp, tp1),
rebalanceListener
);
consumer.seek(tp, 10);
consumer.seek(tp1, 20);
// change subscription to trigger rebalance
var commitCountBeforeRebalance = MockConsumerInterceptor.ON_COMMIT_COUNT.intValue();
var expectedAssignment = Set.of(tp, tp1, new TopicPartition(topic2, 0), new TopicPartition(topic2, 1));
changeConsumerSubscriptionAndValidateAssignment(
consumer,
List.of(topic, topic2),
expectedAssignment,
rebalanceListener
);
// after rebalancing, we should have reset to the committed positions
var committed1 = consumer.committed(Set.of(tp));
assertEquals(10, committed1.get(tp).offset());
var committed2 = consumer.committed(Set.of(tp1));
assertEquals(20, committed2.get(tp1).offset());
// In both CLASSIC and CONSUMER protocols, interceptors are executed in poll and close.
// However, in the CONSUMER protocol, the assignment may be changed outside a poll, so
// we need to poll once to ensure the interceptor is called.
if (groupProtocol == GroupProtocol.CONSUMER) {
consumer.poll(Duration.ZERO);
}
assertTrue(MockConsumerInterceptor.ON_COMMIT_COUNT.intValue() > commitCountBeforeRebalance);
// verify commits are intercepted on close
var commitCountBeforeClose = MockConsumerInterceptor.ON_COMMIT_COUNT.intValue();
consumer.close();
assertTrue(MockConsumerInterceptor.ON_COMMIT_COUNT.intValue() > commitCountBeforeClose);
producer.close();
// cleanup
MockConsumerInterceptor.resetCounters();
}
}
@ClusterTest
public void testClassicConsumerCommitSpecifiedOffsets() throws InterruptedException {
testCommitSpecifiedOffsets(GroupProtocol.CLASSIC);
}
@ClusterTest
public void testAsyncConsumerCommitSpecifiedOffsets() throws InterruptedException {
testCommitSpecifiedOffsets(GroupProtocol.CONSUMER);
}
private void testCommitSpecifiedOffsets(GroupProtocol groupProtocol) throws InterruptedException {
try (Producer<byte[], byte[]> producer = cluster.producer();
var consumer = createConsumer(groupProtocol, false)
) {
sendRecords(producer, tp, 5, System.currentTimeMillis());
sendRecords(producer, tp1, 7, System.currentTimeMillis());
consumer.assign(List.of(tp, tp1));
var pos1 = consumer.position(tp);
var pos2 = consumer.position(tp1);
consumer.commitSync(Map.of(tp, new OffsetAndMetadata(3L)));
assertEquals(3, consumer.committed(Set.of(tp)).get(tp).offset());
assertNull(consumer.committed(Collections.singleton(tp1)).get(tp1));
// Positions should not change
assertEquals(pos1, consumer.position(tp));
assertEquals(pos2, consumer.position(tp1));
consumer.commitSync(Map.of(tp1, new OffsetAndMetadata(5L)));
assertEquals(3, consumer.committed(Set.of(tp)).get(tp).offset());
assertEquals(5, consumer.committed(Set.of(tp1)).get(tp1).offset());
// Using async should pick up the committed changes after commit completes
sendAndAwaitAsyncCommit(consumer, Map.of(tp1, new OffsetAndMetadata(7L)));
assertEquals(7, consumer.committed(Collections.singleton(tp1)).get(tp1).offset());
}
}
@ClusterTest
public void testClassicConsumerAutoCommitOnRebalance() throws InterruptedException {
testAutoCommitOnRebalance(GroupProtocol.CLASSIC);
}
@ClusterTest
public void testAsyncConsumerAutoCommitOnRebalance() throws InterruptedException {
testAutoCommitOnRebalance(GroupProtocol.CONSUMER);
}
private void testAutoCommitOnRebalance(GroupProtocol groupProtocol) throws InterruptedException {
var topic2 = "topic2";
cluster.createTopic(topic2, 2, (short) BROKER_COUNT);
try (var consumer = createConsumer(groupProtocol, true)) {
sendRecords(cluster, tp, 1000);
var rebalanceListener = new ConsumerRebalanceListener() {
@Override
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
// keep partitions paused in this test so that we can verify the commits based on specific seeks
consumer.pause(partitions);
}
@Override
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
}
};
consumer.subscribe(List.of(topic), rebalanceListener);
awaitAssignment(consumer, Set.of(tp, tp1));
consumer.seek(tp, 300);
consumer.seek(tp1, 500);
// change subscription to trigger rebalance
consumer.subscribe(List.of(topic, topic2), rebalanceListener);
var newAssignment = Set.of(tp, tp1, new TopicPartition(topic2, 0), new TopicPartition(topic2, 1));
awaitAssignment(consumer, newAssignment);
// after rebalancing, we should have reset to the committed positions
assertEquals(300, consumer.committed(Set.of(tp)).get(tp).offset());
assertEquals(500, consumer.committed(Set.of(tp1)).get(tp1).offset());
}
}
@ClusterTest
public void testClassicConsumerSubscribeAndCommitSync() throws InterruptedException {
testSubscribeAndCommitSync(GroupProtocol.CLASSIC);
}
@ClusterTest
public void testAsyncConsumerSubscribeAndCommitSync() throws InterruptedException {
testSubscribeAndCommitSync(GroupProtocol.CONSUMER);
}
private void testSubscribeAndCommitSync(GroupProtocol groupProtocol) throws InterruptedException {
// This test ensure that the member ID is propagated from the group coordinator when the
// assignment is received into a subsequent offset commit
try (var consumer = createConsumer(groupProtocol, false)) {
assertEquals(0, consumer.assignment().size());
consumer.subscribe(List.of(topic));
awaitAssignment(consumer, Set.of(tp, tp1));
consumer.seek(tp, 0);
consumer.commitSync();
}
}
@ClusterTest
public void testClassicConsumerPositionAndCommit() throws InterruptedException {
testPositionAndCommit(GroupProtocol.CLASSIC);
}
@ClusterTest
public void testAsyncConsumerPositionAndCommit() throws InterruptedException {
testPositionAndCommit(GroupProtocol.CONSUMER);
}
private void testPositionAndCommit(GroupProtocol groupProtocol) throws InterruptedException {
try (Producer<byte[], byte[]> producer = cluster.producer();
var consumer = createConsumer(groupProtocol, false);
var otherConsumer = createConsumer(groupProtocol, false)
) {
var startingTimestamp = System.currentTimeMillis();
sendRecords(producer, tp, 5, startingTimestamp);
var topicPartition = new TopicPartition(topic, 15);
assertNull(consumer.committed(Collections.singleton(topicPartition)).get(topicPartition));
// position() on a partition that we aren't subscribed to throws an exception
assertThrows(IllegalStateException.class, () -> consumer.position(topicPartition));
consumer.assign(List.of(tp));
assertEquals(0L, consumer.position(tp), "position() on a partition that we are subscribed to should reset the offset");
consumer.commitSync();
assertEquals(0L, consumer.committed(Set.of(tp)).get(tp).offset());
consumeAndVerifyRecords(consumer, tp, 5, 0, 0, startingTimestamp);
assertEquals(5L, consumer.position(tp), "After consuming 5 records, position should be 5");
consumer.commitSync();
assertEquals(5L, consumer.committed(Set.of(tp)).get(tp).offset(), "Committed offset should be returned");
startingTimestamp = System.currentTimeMillis();
sendRecords(producer, tp, 1, startingTimestamp);
// another consumer in the same group should get the same position
otherConsumer.assign(List.of(tp));
consumeAndVerifyRecords(otherConsumer, tp, 1, 5, 0, startingTimestamp);
}
}
/**
* This is testing when closing the consumer but commit request has already been sent.
* During the closing, the consumer won't find the coordinator anymore.
*/
@ClusterTest
public void testCommitAsyncFailsWhenCoordinatorUnavailableDuringClose() throws InterruptedException {
try (Producer<byte[], byte[]> producer = cluster.producer();
var consumer = createConsumer(GroupProtocol.CONSUMER, false)
) {
sendRecords(producer, tp, 3, System.currentTimeMillis());
consumer.assign(List.of(tp));
var callback = new CountConsumerCommitCallback();
// Close the coordinator before committing because otherwise the commit will fail to find the coordinator.
cluster.brokerIds().forEach(cluster::shutdownBroker);
TestUtils.waitForCondition(() -> cluster.aliveBrokers().isEmpty(), "All brokers should be shut down");
consumer.poll(Duration.ofMillis(500));
consumer.commitAsync(Map.of(tp, new OffsetAndMetadata(1L)), callback);
long startTime = System.currentTimeMillis();
consumer.close(CloseOptions.timeout(Duration.ofMillis(500)));
long closeDuration = System.currentTimeMillis() - startTime;
assertTrue(closeDuration < 1000, "The closing process for the consumer was too long: " + closeDuration + " ms");
assertTrue(callback.lastError.isPresent());
assertEquals(CommitFailedException.class, callback.lastError.get().getClass());
assertEquals("Failed to commit offsets: Coordinator unknown and consumer is closing", callback.lastError.get().getMessage());
assertEquals(1, callback.exceptionCount);
}
}
// TODO: This only works in the new consumer, but should be fixed for the old consumer as well
@ClusterTest
public void testCommitAsyncCompletedBeforeConsumerCloses() throws InterruptedException {
// This is testing the contract that asynchronous offset commit are completed before the consumer
// is closed, even when no commit sync is performed as part of the close (due to auto-commit
// disabled, or simply because there are no consumed offsets).
// Create offsets topic to ensure coordinator is available during close
cluster.createTopic(Topic.GROUP_METADATA_TOPIC_NAME, Integer.parseInt(OFFSETS_TOPIC_PARTITIONS), Short.parseShort(OFFSETS_TOPIC_REPLICATION));
try (Producer<byte[], byte[]> producer = cluster.producer(Map.of(ProducerConfig.ACKS_CONFIG, "all"));
var consumer = createConsumer(GroupProtocol.CONSUMER, false)
) {
sendRecords(producer, tp, 3, System.currentTimeMillis());
sendRecords(producer, tp1, 3, System.currentTimeMillis());
consumer.assign(List.of(tp, tp1));
// Try without looking up the coordinator first
var cb = new CountConsumerCommitCallback();
consumer.commitAsync(Map.of(tp, new OffsetAndMetadata(1L)), cb);
consumer.commitAsync(Map.of(tp1, new OffsetAndMetadata(1L)), cb);
consumer.close();
assertEquals(2, cb.successCount);
}
}
// TODO: This only works in the new consumer, but should be fixed for the old consumer as well
@ClusterTest
public void testCommitAsyncCompletedBeforeCommitSyncReturns() {
// This is testing the contract that asynchronous offset commits sent previously with the
// `commitAsync` are guaranteed to have their callbacks invoked prior to completion of
// `commitSync` (given that it does not time out).
try (Producer<byte[], byte[]> producer = cluster.producer();
var consumer = createConsumer(GroupProtocol.CONSUMER, false)
) {
sendRecords(producer, tp, 3, System.currentTimeMillis());
sendRecords(producer, tp1, 3, System.currentTimeMillis());
consumer.assign(List.of(tp, tp1));
// Try without looking up the coordinator first
var cb = new CountConsumerCommitCallback();
consumer.commitAsync(Map.of(tp, new OffsetAndMetadata(1L)), cb);
consumer.commitSync(Map.of());
assertEquals(1, consumer.committed(Set.of(tp)).get(tp).offset());
assertEquals(1, cb.successCount);
// Try with coordinator known
consumer.commitAsync(Map.of(tp, new OffsetAndMetadata(2L)), cb);
consumer.commitSync(Map.of(tp1, new OffsetAndMetadata(2L)));
assertEquals(2, consumer.committed(Set.of(tp)).get(tp).offset());
assertEquals(2, consumer.committed(Set.of(tp1)).get(tp1).offset());
assertEquals(2, cb.successCount);
// Try with empty sync commit
consumer.commitAsync(Map.of(tp, new OffsetAndMetadata(3L)), cb);
consumer.commitSync(Map.of());
assertEquals(3, consumer.committed(Set.of(tp)).get(tp).offset());
assertEquals(2, consumer.committed(Set.of(tp1)).get(tp1).offset());
assertEquals(3, cb.successCount);
}
}
private Consumer<byte[], byte[]> createConsumer(GroupProtocol protocol, boolean enableAutoCommit) {
return cluster.consumer(Map.of(
GROUP_ID_CONFIG, "test-group",
GROUP_PROTOCOL_CONFIG, protocol.name().toLowerCase(Locale.ROOT),
ENABLE_AUTO_COMMIT_CONFIG, enableAutoCommit
));
}
private void sendAndAwaitAsyncCommit(
Consumer<byte[], byte[]> consumer,
Map<TopicPartition, OffsetAndMetadata> offsetsOpt
) throws InterruptedException {
var commitCallback = new RetryCommitCallback(consumer, offsetsOpt);
commitCallback.sendAsyncCommit();
ClientsTestUtils.pollUntilTrue(
consumer,
() -> commitCallback.isComplete,
"Failed to observe commit callback before timeout"
);
assertEquals(Optional.empty(), commitCallback.error);
}
private static | PlaintextConsumerCommitTest |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/filter/GenericHttpFilter.java | {
"start": 759,
"end": 945
} | interface ____ exposed, so you
* can pass around instances of these filters, the different implementations are internal only.
*
* @author Jonas Konrad
* @since 4.0.0
*/
public sealed | is |
java | playframework__playframework | core/play/src/main/java/play/inject/DelegateInjector.java | {
"start": 295,
"end": 785
} | class ____ implements Injector {
public final play.api.inject.Injector injector;
@Inject
public DelegateInjector(play.api.inject.Injector injector) {
this.injector = injector;
}
@Override
public <T> T instanceOf(Class<T> clazz) {
return injector.instanceOf(clazz);
}
@Override
public <T> T instanceOf(BindingKey<T> key) {
return injector.instanceOf(key);
}
@Override
public play.api.inject.Injector asScala() {
return injector;
}
}
| DelegateInjector |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/file/FileProducerFileExistAppendTest.java | {
"start": 1089,
"end": 3358
} | class ____ extends ContextTestSupport {
private static final String TEST_FILE_NAME = "hello" + UUID.randomUUID() + ".txt";
private static final String TEST_FILE_NAME_1 = "test1" + UUID.randomUUID() + ".txt";
private static final String TEST_FILE_NAME_2 = "test2" + UUID.randomUUID() + ".txt";
private static final String TEST_FILE_NAME_OUT = "out" + UUID.randomUUID() + ".txt";
@Test
public void testAppend() throws Exception {
template.sendBodyAndHeader(fileUri(), "Hello World\n", Exchange.FILE_NAME, TEST_FILE_NAME);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Hello World\nBye World");
mock.expectedFileExists(testFile(TEST_FILE_NAME), "Hello World\nBye World");
template.sendBodyAndHeader(fileUri("?fileExist=Append"), "Bye World", Exchange.FILE_NAME, TEST_FILE_NAME);
context.getRouteController().startAllRoutes();
assertMockEndpointsSatisfied();
}
@Test
public void testAppendFileByFile() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
// Create some test files
template.sendBodyAndHeader(fileUri(), "Row 1\n", Exchange.FILE_NAME, TEST_FILE_NAME_1);
template.sendBodyAndHeader(fileUri(), "Row 2\n", Exchange.FILE_NAME, TEST_FILE_NAME_2);
// Append test files to the target one
template.sendBodyAndHeader(fileUri("?fileExist=Append"), testFile(TEST_FILE_NAME_1).toFile(),
Exchange.FILE_NAME, TEST_FILE_NAME_OUT);
template.sendBodyAndHeader(fileUri("?fileExist=Append"), testFile(TEST_FILE_NAME_2).toFile(),
Exchange.FILE_NAME, TEST_FILE_NAME_OUT);
mock.expectedFileExists(testFile(TEST_FILE_NAME_OUT), "Row 1\nRow 2\n");
context.getRouteController().startAllRoutes();
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(fileUri("?noop=true&initialDelay=0&delay=10")).autoStartup(false).convertBodyTo(String.class)
.to("mock:result");
}
};
}
}
| FileProducerFileExistAppendTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/AggregationExecutionException.java | {
"start": 756,
"end": 1136
} | class ____ extends ElasticsearchException {
public AggregationExecutionException(String msg) {
super(msg);
}
public AggregationExecutionException(String msg, Throwable cause) {
super(msg, cause);
}
public AggregationExecutionException(StreamInput in) throws IOException {
super(in);
}
public static | AggregationExecutionException |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java | {
"start": 4556,
"end": 5570
} | class ____<T extends ClusterStateTaskListener> implements ClusterStateTaskExecutor.TaskContext<T> {
private final T task;
private TestTaskContext(T task) {
this.task = task;
}
@Override
public T getTask() {
return task;
}
@Override
public void success(Runnable onPublicationSuccess) {
onPublicationSuccess.run();
}
@Override
public void success(Consumer<ClusterState> publishedStateConsumer) {}
@Override
public void success(Runnable onPublicationSuccess, ClusterStateAckListener clusterStateAckListener) {}
@Override
public void success(Consumer<ClusterState> publishedStateConsumer, ClusterStateAckListener clusterStateAckListener) {}
@Override
public void onFailure(Exception failure) {}
@Override
public Releasable captureResponseHeaders() {
return null;
}
}
private static | TestTaskContext |
java | google__guava | android/guava/src/com/google/common/collect/HashMultiset.java | {
"start": 1017,
"end": 2556
} | class ____<E extends @Nullable Object> extends AbstractMapBasedMultiset<E> {
/** Creates a new, empty {@code HashMultiset} using the default initial capacity. */
public static <E extends @Nullable Object> HashMultiset<E> create() {
return create(ObjectCountHashMap.DEFAULT_SIZE);
}
/**
* Creates a new, empty {@code HashMultiset} with the specified expected number of distinct
* elements.
*
* @param distinctElements the expected number of distinct elements
* @throws IllegalArgumentException if {@code distinctElements} is negative
*/
public static <E extends @Nullable Object> HashMultiset<E> create(int distinctElements) {
return new HashMultiset<>(distinctElements);
}
/**
* Creates a new {@code HashMultiset} containing the specified elements.
*
* <p>This implementation is highly efficient when {@code elements} is itself a {@link Multiset}.
*
* @param elements the elements that the multiset should contain
*/
public static <E extends @Nullable Object> HashMultiset<E> create(
Iterable<? extends E> elements) {
HashMultiset<E> multiset = create(Multisets.inferDistinctElements(elements));
Iterables.addAll(multiset, elements);
return multiset;
}
HashMultiset(int distinctElements) {
super(distinctElements);
}
@Override
ObjectCountHashMap<E> newBackingMap(int distinctElements) {
return new ObjectCountHashMap<>(distinctElements);
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
}
| HashMultiset |
java | apache__camel | components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/NettyHttpSSLTest.java | {
"start": 1407,
"end": 4124
} | class ____ extends BaseNettyTest {
private static final String NULL_VALUE_MARKER = CamelTestSupport.class.getCanonicalName();
protected final Properties originalValues = new Properties();
@Override
public void doPreSetup() throws Exception {
// ensure jsse clients can validate the self signed dummy localhost cert,
// use the server keystore as the trust store for these tests
URL trustStoreUrl = this.getClass().getClassLoader().getResource("jsse/localhost.p12");
setSystemProp("javax.net.ssl.trustStore", trustStoreUrl.toURI().getPath());
setSystemProp("javax.net.ssl.trustStorePassword", "changeit");
}
@Override
public void doPostTearDown() {
restoreSystemProperties();
}
protected void setSystemProp(String key, String value) {
String originalValue = System.setProperty(key, value);
originalValues.put(key, originalValue != null ? originalValue : NULL_VALUE_MARKER);
}
protected void restoreSystemProperties() {
for (Map.Entry<Object, Object> entry : originalValues.entrySet()) {
Object key = entry.getKey();
Object value = entry.getValue();
if (NULL_VALUE_MARKER.equals(value)) {
System.clearProperty((String) key);
} else {
System.setProperty((String) key, (String) value);
}
}
}
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testSSLInOutWithNettyConsumer() throws Exception {
getMockEndpoint("mock:input").expectedBodiesReceived("Hello World");
context.addRoutes(new RouteBuilder() {
public void configure() {
from("netty-http:https://localhost:{{port}}?ssl=true&passphrase=changeit&keyStoreResource=jsse/localhost.p12&trustStoreResource=jsse/localhost.p12")
.to("mock:input")
.process(exchange -> {
SSLSession session = exchange.getIn().getHeader(NettyConstants.NETTY_SSL_SESSION, SSLSession.class);
if (session != null) {
exchange.getMessage().setBody("Bye World");
} else {
exchange.getMessage().setBody("Cannot start conversion without SSLSession");
}
});
}
});
context.start();
String out = template.requestBody("https://localhost:{{port}}", "Hello World", String.class);
assertEquals("Bye World", out);
MockEndpoint.assertIsSatisfied(context);
}
}
| NettyHttpSSLTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/event/internal/DefaultReplicateEventListener.java | {
"start": 1277,
"end": 6898
} | class ____
extends AbstractSaveEventListener<ReplicationMode>
implements ReplicateEventListener {
/**
* Handle the given replicate event.
*
* @param event The replicate event to be handled.
*
* @throws TransientObjectException An invalid attempt to replicate a transient entity.
*/
@Override
public void onReplicate(ReplicateEvent event) {
final var source = event.getSession();
final var persistenceContext = source.getPersistenceContextInternal();
if ( persistenceContext.reassociateIfUninitializedProxy( event.getObject() ) ) {
EVENT_LISTENER_LOGGER.uninitializedProxyPassedToReplicate();
}
else {
final Object entity = persistenceContext.unproxyAndReassociate( event.getObject() );
if ( persistenceContext.isEntryFor( entity ) ) {
EVENT_LISTENER_LOGGER.ignoringPersistentInstancePassedToReplicate();
//hum ... should we cascade anyway? throw an exception? fine like it is?
}
else {
doReplicate( event, source, entity );
}
}
}
private void doReplicate(ReplicateEvent event, EventSource source, Object entity) {
final var persister = source.getEntityPersister( event.getEntityName(), entity);
final var replicationMode = event.getReplicationMode();
// get the id from the object - we accept almost anything at all,
// except null (that is, even ids which look like they're unsaved)
final Object id = persister.getIdentifier( entity, source );
if ( id == null ) {
throw new TransientObjectException( "Cannot replicate instance of entity '" + persister.getEntityName()
+ "' because it has a null identifier" );
}
final Object oldVersion = replicationMode == ReplicationMode.EXCEPTION
? null // always do an INSERT, and let it fail by constraint violation
: persister.getCurrentVersion( id, source); // what is the version on the database?
if ( oldVersion != null ) {
if ( EVENT_LISTENER_LOGGER.isTraceEnabled() ) {
EVENT_LISTENER_LOGGER.foundExistingRowFor(
infoString( persister, id, event.getFactory() ) );
}
// If the entity has no version, getCurrentVersion() just returns
// a meaningless value to indicate that the row exists (HHH-2378)
final Object realOldVersion = persister.isVersioned() ? oldVersion : null;
if ( shouldOverwrite( replicationMode,
persister.getVersion( entity ), realOldVersion,
persister.getVersionType() ) ) {
// execute a SQL UPDATE
performReplication( entity, id, realOldVersion, persister, replicationMode, source );
}
else if ( EVENT_LISTENER_LOGGER.isTraceEnabled() ) {
// do nothing (don't even reassociate entity!)
EVENT_LISTENER_LOGGER.noNeedToReplicate();
}
//TODO: would it be better to do a refresh from db?
}
else {
// no existing row - execute a SQL INSERT
if ( EVENT_LISTENER_LOGGER.isTraceEnabled() ) {
EVENT_LISTENER_LOGGER.noExistingRowReplicatingNewInstance(
infoString( persister, id, event.getFactory() ) );
}
final boolean regenerate = persister.isIdentifierAssignedByInsert(); // prefer re-generation of identity!
final var key = regenerate ? null : source.generateEntityKey( id, persister );
performSaveOrReplicate( entity, key, persister, regenerate, replicationMode, source, false );
}
}
private static <T> boolean shouldOverwrite(
ReplicationMode replicationMode, Object entityVersion, Object realOldVersion, BasicType<T> versionType) {
return replicationMode.shouldOverwriteCurrentVersion( (T) realOldVersion, (T) entityVersion, versionType );
}
@Override
protected boolean visitCollectionsBeforeSave(
Object entity,
Object id,
Object[] values,
Type[] types,
EventSource source) {
//TODO: we use two visitors here, inefficient!
final var visitor = new OnReplicateVisitor( source, id, entity, false );
visitor.processEntityPropertyValues( values, types );
return super.visitCollectionsBeforeSave( entity, id, values, types, source );
}
@Override
protected boolean substituteValuesIfNecessary(
Object entity,
Object id,
Object[] values,
EntityPersister persister,
SessionImplementor source) {
return false;
}
@Override
protected boolean isVersionIncrementDisabled() {
return true;
}
private void performReplication(
Object entity,
Object id,
Object version,
EntityPersister persister,
ReplicationMode replicationMode,
EventSource source) throws HibernateException {
if ( EVENT_LISTENER_LOGGER.isTraceEnabled() ) {
EVENT_LISTENER_LOGGER.replicatingChangesTo(
infoString( persister, id, source.getFactory() ) );
}
new OnReplicateVisitor( source, id, entity, true ).process( entity, persister );
source.getPersistenceContextInternal().addEntity(
entity,
persister.isMutable() ? Status.MANAGED : Status.READ_ONLY,
null,
source.generateEntityKey( id, persister ),
version,
LockMode.NONE,
true,
persister,
true
);
cascadeAfterReplicate( entity, persister, replicationMode, source );
}
private void cascadeAfterReplicate(
Object entity,
EntityPersister persister,
ReplicationMode replicationMode,
EventSource source) {
final var persistenceContext = source.getPersistenceContextInternal();
persistenceContext.incrementCascadeLevel();
try {
Cascade.cascade(
CascadingActions.REPLICATE,
CascadePoint.AFTER_UPDATE,
source,
persister,
entity,
replicationMode
);
}
finally {
persistenceContext.decrementCascadeLevel();
}
}
@Override
protected CascadingAction<ReplicationMode> getCascadeAction() {
return CascadingActions.REPLICATE;
}
}
| DefaultReplicateEventListener |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/alterTable/MySqlAlterTableAddIndex_0.java | {
"start": 1053,
"end": 3465
} | class ____ extends TestCase {
public void test_alter_first() throws Exception {
String sql = "ALTER TABLE `test`.`tb1` ADD INDEX `ix` (`f2` ASC) ;";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
assertEquals("ALTER TABLE `test`.`tb1`" +
"\n\tADD INDEX `ix` (`f2` ASC);", SQLUtils.toMySqlString(stmt));
assertEquals("alter table `test`.`tb1`" +
"\n\tadd index `ix` (`f2` asc);", SQLUtils.toMySqlString(stmt, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION));
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.MYSQL);
stmt.accept(visitor);
TableStat tableStat = visitor.getTableStat("test.tb1");
assertNotNull(tableStat);
assertEquals(1, tableStat.getCreateIndexCount());
}
public void test_alter_fulltext() throws Exception {
String sql = "ALTER TABLE `test`.`tb1` alter INDEX `ix` set fulltext analyzer='sfewfw' ;";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
assertEquals("ALTER TABLE `test`.`tb1`\n"
+ "\t ALTER INDEX `ix` FULLTEXT ANALYZER = 'sfewfw';", SQLUtils.toMySqlString(stmt));
}
public void test_alter_fulltext2() throws Exception {
String sql = "ALTER TABLE `test`.`tb1` alter INDEX `ix` set fulltext index analyzer='sfewfw' ;";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
assertEquals("ALTER TABLE `test`.`tb1`\n"
+ "\t ALTER INDEX `ix` FULLTEXT INDEX ANALYZER = 'sfewfw';", SQLUtils.toMySqlString(stmt));
}
public void test_alter_fulltext3() throws Exception {
String sql = "ALTER TABLE `test`.`tb1` alter INDEX `ix` set fulltext QUERY analyzer='sfewfw' ;";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
assertEquals("ALTER TABLE `test`.`tb1`\n"
+ "\t ALTER INDEX `ix` FULLTEXT QUERY ANALYZER = 'sfewfw';", SQLUtils.toMySqlString(stmt));
}
}
| MySqlAlterTableAddIndex_0 |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/dynamic/support/ResolvableType.java | {
"start": 34815,
"end": 35455
} | class ____ not be null");
LettuceAssert.notNull(generics, "Generics must not be null");
ResolvableType[] resolvableGenerics = new ResolvableType[generics.length];
for (int i = 0; i < generics.length; i++) {
resolvableGenerics[i] = forClass(generics[i]);
}
return forClassWithGenerics(sourceClass, resolvableGenerics);
}
/**
* Return a {@link ResolvableType} for the specified {@link Class} with pre-declared generics.
*
* @param sourceClass the source class
* @param generics the generics of the class
* @return a {@link ResolvableType} for the specific | must |
java | apache__kafka | tools/src/main/java/org/apache/kafka/tools/ClientCompatibilityTest.java | {
"start": 15785,
"end": 15991
} | class ____ {
Map<TopicPartition, OffsetAndTimestamp> result;
@Override
public String toString() {
return result.toString();
}
}
public static | OffsetsForTime |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/path/JSONPath_field_wildcard_filter_float.java | {
"start": 328,
"end": 1590
} | class ____ extends TestCase {
public void test_list_map_0() throws Exception {
Map<String, Value> jsonObject = JSON.parseObject(text, new TypeReference<Map<String, Value>>(){}, Feature.OrderedField);
Collection array = (Collection) JSONPath.eval(jsonObject, "$.*[score>0]");
assertEquals("[{\"score\":0.89513224},{\"score\":0.7237897},{\"score\":0.34671742}]", JSON.toJSONString(array));
}
public void test_list_map_1() throws Exception {
Map<String, Value> jsonObject = JSON.parseObject(text, new TypeReference<Map<String, Value>>(){}, Feature.OrderedField);
Collection array = (Collection) JSONPath.eval(jsonObject, "$.*[score<0]");
assertEquals("[{\"score\":-0.3453004}]", JSON.toJSONString(array));
}
public void test_list_map_2() throws Exception {
Map<String, Value> jsonObject = JSON.parseObject(text, new TypeReference<Map<String, Value>>(){}, Feature.OrderedField);
Collection array = (Collection) JSONPath.eval(jsonObject, "$.*[score=0]");
assertEquals("[{\"score\":0.0},{\"score\":0.0},{\"score\":0.0},{\"score\":0.0},{\"score\":0.0},{\"score\":0.0},{\"score\":0.0}]", JSON.toJSONString(array));
}
public static | JSONPath_field_wildcard_filter_float |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/dos/DeepArrayWrappingForDeser3582Test.java | {
"start": 454,
"end": 1831
} | class ____
{
// 23-Aug-2022, tatu: Before fix, failed with 5000
private final static int TOO_DEEP_NESTING = 9999;
private final ObjectMapper MAPPER = jsonMapperBuilder()
.enable(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS)
.build();
@Test
public void testArrayWrapping() throws Exception
{
final String doc = _nestedDoc(TOO_DEEP_NESTING, "[ ", "] ", "{}");
try {
MAPPER.readValue(doc, DatabindTestUtil.Point.class);
fail("Should not pass");
} catch (MismatchedInputException e) {
verifyException(e, "Cannot deserialize");
verifyException(e, "nested Array");
verifyException(e, "only single");
}
}
private String _nestedDoc(int nesting, String open, String close, String content) {
StringBuilder sb = new StringBuilder(nesting * (open.length() + close.length()));
for (int i = 0; i < nesting; ++i) {
sb.append(open);
if ((i & 31) == 0) {
sb.append("\n");
}
}
sb.append("\n").append(content).append("\n");
for (int i = 0; i < nesting; ++i) {
sb.append(close);
if ((i & 31) == 0) {
sb.append("\n");
}
}
return sb.toString();
}
}
| DeepArrayWrappingForDeser3582Test |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorFilteringTests.java | {
"start": 1233,
"end": 14026
} | class ____ extends ESTestCase {
private NamedWriteableRegistry namedWriteableRegistry;
@Before
public void registerNamedObjects() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, emptyList());
List<NamedWriteableRegistry.Entry> namedWriteables = searchModule.getNamedWriteables();
namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables);
}
public final void testRandomSerialization() throws IOException {
for (int runs = 0; runs < 10; runs++) {
ConnectorFiltering testInstance = ConnectorTestUtils.getRandomConnectorFiltering();
assertTransportSerialization(testInstance);
}
}
public void testToXContent() throws IOException {
String content = XContentHelper.stripWhitespace("""
{
"active": {
"advanced_snippet": {
"created_at": "2023-11-09T15:13:08.231Z",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": {}
},
"rules": [
{
"created_at": "2023-11-09T15:13:08.231Z",
"field": "_",
"id": "DEFAULT",
"order": 0,
"policy": "include",
"rule": "regex",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": ".*"
}
],
"validation": {
"errors": [],
"state": "valid"
}
},
"domain": "DEFAULT",
"draft": {
"advanced_snippet": {
"created_at": "2023-11-09T15:13:08.231Z",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": {}
},
"rules": [
{
"created_at": "2023-11-09T15:13:08.231Z",
"field": "_",
"id": "DEFAULT",
"order": 0,
"policy": "include",
"rule": "regex",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": ".*"
}
],
"validation": {
"errors": [],
"state": "valid"
}
}
}
""");
ConnectorFiltering filtering = ConnectorFiltering.fromXContentBytes(new BytesArray(content), XContentType.JSON);
boolean humanReadable = true;
BytesReference originalBytes = toShuffledXContent(filtering, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable);
ConnectorFiltering parsed;
try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) {
parsed = ConnectorFiltering.fromXContent(parser);
}
assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON);
}
public void testToXContent_WithAdvancedSnippetPopulatedWithAValueArray() throws IOException {
String content = XContentHelper.stripWhitespace("""
{
"active": {
"advanced_snippet": {
"created_at": "2023-11-09T15:13:08.231Z",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": [
{"service": "Incident", "query": "user_nameSTARTSWITHa"},
{"service": "Incident", "query": "user_nameSTARTSWITHj"}
]
},
"rules": [
{
"created_at": "2023-11-09T15:13:08.231Z",
"field": "_",
"id": "DEFAULT",
"order": 0,
"policy": "include",
"rule": "regex",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": ".*"
}
],
"validation": {
"errors": [],
"state": "valid"
}
},
"domain": "DEFAULT",
"draft": {
"advanced_snippet": {
"created_at": "2023-11-09T15:13:08.231Z",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": {}
},
"rules": [
{
"created_at": "2023-11-09T15:13:08.231Z",
"field": "_",
"id": "DEFAULT",
"order": 0,
"policy": "include",
"rule": "regex",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": ".*"
}
],
"validation": {
"errors": [],
"state": "valid"
}
}
}
""");
ConnectorFiltering filtering = ConnectorFiltering.fromXContentBytes(new BytesArray(content), XContentType.JSON);
boolean humanReadable = true;
BytesReference originalBytes = toShuffledXContent(filtering, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable);
ConnectorFiltering parsed;
try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) {
parsed = ConnectorFiltering.fromXContent(parser);
}
assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON);
}
public void testToXContent_WithAdvancedSnippetPopulatedWithAValueObject() throws IOException {
String content = XContentHelper.stripWhitespace("""
{
"active": {
"advanced_snippet": {
"created_at": "2023-11-09T15:13:08.231Z",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": {
"service": "Incident",
"query": "user_nameSTARTSWITHa"
}
},
"rules": [
{
"created_at": "2023-11-09T15:13:08.231Z",
"field": "_",
"id": "DEFAULT",
"order": 0,
"policy": "include",
"rule": "regex",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": ".*"
}
],
"validation": {
"errors": [{"ids": ["1"], "messages": ["some messages"]}],
"state": "invalid"
}
},
"domain": "DEFAULT",
"draft": {
"advanced_snippet": {
"created_at": "2023-11-09T15:13:08.231Z",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": {}
},
"rules": [
{
"created_at": "2023-11-09T15:13:08.231Z",
"field": "_",
"id": "DEFAULT",
"order": 0,
"policy": "include",
"rule": "regex",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": ".*"
}
],
"validation": {
"errors": [],
"state": "valid"
}
}
}
""");
ConnectorFiltering filtering = ConnectorFiltering.fromXContentBytes(new BytesArray(content), XContentType.JSON);
boolean humanReadable = true;
BytesReference originalBytes = toShuffledXContent(filtering, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable);
ConnectorFiltering parsed;
try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) {
parsed = ConnectorFiltering.fromXContent(parser);
}
assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON);
}
public void testToXContent_WithAdvancedSnippetPopulatedWithAValueLiteral_ExpectParseException() throws IOException {
String content = XContentHelper.stripWhitespace("""
{
"active": {
"advanced_snippet": {
"created_at": "2023-11-09T15:13:08.231Z",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": "string literal"
},
"rules": [
{
"created_at": "2023-11-09T15:13:08.231Z",
"field": "_",
"id": "DEFAULT",
"order": 0,
"policy": "include",
"rule": "regex",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": ".*"
}
],
"validation": {
"errors": [],
"state": "valid"
}
},
"domain": "DEFAULT",
"draft": {
"advanced_snippet": {
"created_at": "2023-11-09T15:13:08.231Z",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": {}
},
"rules": [
{
"created_at": "2023-11-09T15:13:08.231Z",
"field": "_",
"id": "DEFAULT",
"order": 0,
"policy": "include",
"rule": "regex",
"updated_at": "2023-11-09T15:13:08.231Z",
"value": ".*"
}
],
"validation": {
"errors": [],
"state": "valid"
}
}
}
""");
assertThrows(XContentParseException.class, () -> ConnectorFiltering.fromXContentBytes(new BytesArray(content), XContentType.JSON));
}
private void assertTransportSerialization(ConnectorFiltering testInstance) throws IOException {
ConnectorFiltering deserializedInstance = copyInstance(testInstance);
assertNotSame(testInstance, deserializedInstance);
assertThat(testInstance, equalTo(deserializedInstance));
}
private ConnectorFiltering copyInstance(ConnectorFiltering instance) throws IOException {
return copyWriteable(instance, namedWriteableRegistry, ConnectorFiltering::new);
}
}
| ConnectorFilteringTests |
java | apache__spark | launcher/src/main/java/org/apache/spark/launcher/NamedThreadFactory.java | {
"start": 929,
"end": 1358
} | class ____ implements ThreadFactory {
private final String nameFormat;
private final AtomicLong threadIds;
NamedThreadFactory(String nameFormat) {
this.nameFormat = nameFormat;
this.threadIds = new AtomicLong();
}
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r, String.format(nameFormat, threadIds.incrementAndGet()));
t.setDaemon(true);
return t;
}
}
| NamedThreadFactory |
java | quarkusio__quarkus | test-framework/common/src/main/java/io/quarkus/test/util/annotations/AnnotationUtils.java | {
"start": 613,
"end": 932
} | class ____ {
private AnnotationUtils() {
}
/**
* Find the first annotation of {@code annotationType} that is either
* <em>directly present</em>, <em>meta-present</em>, or <em>indirectly
* present</em> on the supplied {@code element}.
*
* <p>
* If the element is a | AnnotationUtils |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TempDirectoryCleanupTests.java | {
"start": 15008,
"end": 16380
} | class ____ {
@Test
void deletesBrokenJunctions(@TempDir Path dir) throws Exception {
var test = Files.createDirectory(dir.resolve("test"));
createWindowsJunction(dir.resolve("link"), test);
// The error might also occur without the source folder being deleted
// but it depends on the order that the TempDir cleanup does its work,
// so the following line forces the error to occur always
Files.delete(test);
}
@Test
void doesNotFollowJunctions(@TempDir Path tempDir, @TrackLogRecords LogRecordListener listener)
throws IOException {
var outsideDir = Files.createDirectory(tempDir.resolve("outside"));
var testFile = Files.writeString(outsideDir.resolve("test.txt"), "test");
JunctionTestCase.target = outsideDir;
try {
executeTestsForClass(JunctionTestCase.class).testEvents() //
.assertStatistics(stats -> stats.started(1).succeeded(1));
}
finally {
JunctionTestCase.target = null;
}
assertThat(outsideDir).exists();
assertThat(testFile).exists();
assertThat(listener.stream(Level.WARNING)) //
.map(LogRecord::getMessage) //
.anyMatch(m -> m.matches(
"Deleting link from location inside of temp dir \\(.+\\) to location outside of temp dir \\(.+\\) but not the target file/directory"));
}
@SuppressWarnings("JUnitMalformedDeclaration")
@NullUnmarked
static | WindowsTests |
java | google__auto | value/src/it/functional/src/test/java/com/google/auto/value/AutoValueTest.java | {
"start": 29526,
"end": 29628
} | interface ____<O> {
NodeType<O> getType();
}
@AutoValue
abstract static | NodeExpressionInterface |
java | spring-projects__spring-boot | loader/spring-boot-loader/src/test/java/org/springframework/boot/loader/zip/DataBlockTests.java | {
"start": 1365,
"end": 2738
} | class ____ {
@Test
void readFullyReadsAllBytesByCallingReadMultipleTimes() throws IOException {
DataBlock dataBlock = mock(DataBlock.class, withSettings().defaultAnswer(CALLS_REAL_METHODS));
given(dataBlock.read(any(), anyLong()))
.will(putBytes(new byte[] { 0, 1 }, new byte[] { 2 }, new byte[] { 3, 4, 5 }));
ByteBuffer dst = ByteBuffer.allocate(6);
dataBlock.readFully(dst, 0);
assertThat(dst.array()).containsExactly(0, 1, 2, 3, 4, 5);
}
private Answer<?> putBytes(byte[]... bytes) {
AtomicInteger count = new AtomicInteger();
return (invocation) -> {
int index = count.getAndIncrement();
invocation.getArgument(0, ByteBuffer.class).put(bytes[index]);
return bytes.length;
};
}
@Test
void readFullyWhenReadReturnsNegativeResultThrowsException() throws Exception {
DataBlock dataBlock = mock(DataBlock.class, withSettings().defaultAnswer(CALLS_REAL_METHODS));
given(dataBlock.read(any(), anyLong())).willReturn(-1);
ByteBuffer dst = ByteBuffer.allocate(8);
assertThatExceptionOfType(EOFException.class).isThrownBy(() -> dataBlock.readFully(dst, 0));
}
@Test
void asInputStreamReturnsDataBlockInputStream() throws Exception {
DataBlock dataBlock = mock(DataBlock.class, withSettings().defaultAnswer(CALLS_REAL_METHODS));
assertThat(dataBlock.asInputStream()).isInstanceOf(DataBlockInputStream.class);
}
}
| DataBlockTests |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/batch/BatchExecDynamicFilteringDataCollector.java | {
"start": 2510,
"end": 5320
} | class ____ extends ExecNodeBase<Object>
implements BatchExecNode<Object> {
@Experimental
private static final ConfigOption<MemorySize> TABLE_EXEC_DYNAMIC_FILTERING_THRESHOLD =
key("table.exec.dynamic-filtering.threshold")
.memoryType()
.defaultValue(MemorySize.parse("8 mb"))
.withDescription(
"If the collector collects more data than the threshold (default is 8M), "
+ "an empty DynamicFilterEvent with a flag only will be sent to Coordinator, "
+ "which could avoid exceeding the pekko limit and out-of-memory (see "
+ RpcOptions.FRAMESIZE.key()
+ "). Otherwise a DynamicFilterEvent with all deduplicated records will be sent to Coordinator.");
private final List<Integer> dynamicFilteringFieldIndices;
public BatchExecDynamicFilteringDataCollector(
List<Integer> dynamicFilteringFieldIndices,
ReadableConfig tableConfig,
InputProperty inputProperty,
RowType outputType,
String description) {
super(
ExecNodeContext.newNodeId(),
ExecNodeContext.newContext(BatchExecTableSourceScan.class),
ExecNodeContext.newPersistedConfig(BatchExecTableSourceScan.class, tableConfig),
Collections.singletonList(inputProperty),
outputType,
description);
this.dynamicFilteringFieldIndices = dynamicFilteringFieldIndices;
checkArgument(outputType.getFieldCount() == dynamicFilteringFieldIndices.size());
}
@Override
@SuppressWarnings("unchecked")
protected Transformation<Object> translateToPlanInternal(
PlannerBase planner, ExecNodeConfig config) {
final ExecEdge inputEdge = getInputEdges().get(0);
final Transformation<RowData> inputTransform =
(Transformation<RowData>) inputEdge.translateToPlan(planner);
StreamOperatorFactory<Object> factory =
new DynamicFilteringDataCollectorOperatorFactory(
(RowType) getOutputType(),
dynamicFilteringFieldIndices,
config.get(TABLE_EXEC_DYNAMIC_FILTERING_THRESHOLD).getBytes());
return ExecNodeUtil.createOneInputTransformation(
inputTransform,
createTransformationName(config),
createTransformationDescription(config),
factory,
InternalTypeInfo.of(getOutputType()),
1,
true); // parallelism should always be 1
}
}
| BatchExecDynamicFilteringDataCollector |
java | apache__camel | components/camel-kamelet/src/main/java/org/apache/camel/component/kamelet/KameletComponent.java | {
"start": 17305,
"end": 21471
} | class ____ extends LifecycleStrategySupport {
record Tuple(KameletEndpoint endpoint, String parentRouteId, String parentProcessorId) {
}
private final List<Tuple> endpoints;
private final AtomicBoolean initialized;
public LifecycleHandler() {
this.endpoints = new ArrayList<>();
this.initialized = new AtomicBoolean();
}
public void createRouteForEndpoint(KameletEndpoint endpoint, String parentRouteId, String parentProcessorId)
throws Exception {
// creating dynamic routes from kamelets should not happen concurrently so we use locking
lock.lock();
try {
doCreateRouteForEndpoint(endpoint, parentRouteId, parentProcessorId);
} finally {
lock.unlock();
}
}
protected void doCreateRouteForEndpoint(KameletEndpoint endpoint, String parentRouteId, String parentProcessorId)
throws Exception {
final ModelCamelContext context = (ModelCamelContext) getCamelContext();
final String templateId = endpoint.getTemplateId();
final String routeId = endpoint.getRouteId();
final String loc = endpoint.getLocation() != null ? endpoint.getLocation() : getLocation();
final String uuid = (String) endpoint.getKameletProperties().get(PARAM_UUID);
if (context.getRouteTemplateDefinition(templateId) == null && loc != null) {
LOG.debug("Loading route template={} from {}", templateId, loc);
RouteTemplateHelper.loadRouteTemplateFromLocation(getCamelContext(), routeTemplateLoaderListener, templateId,
loc);
}
LOG.debug("Creating route from template={} and id={}", templateId, routeId);
try {
String id = context.addRouteFromKamelet(routeId, templateId, uuid, parentRouteId, parentProcessorId,
endpoint.getKameletProperties());
RouteDefinition def = context.getRouteDefinition(id);
// start the route if not already started
ServiceStatus status = context.getRouteController().getRouteStatus(id);
boolean started = status != null && status.isStarted();
if (!started) {
context.startRouteDefinitions(Collections.singletonList(def));
}
LOG.debug("Route with id={} created from template={}", id, templateId);
} catch (Exception e) {
throw new FailedToCreateKameletException(templateId, loc, e);
}
}
@Override
public void onContextInitialized(CamelContext context) throws VetoCamelContextStartException {
if (this.initialized.compareAndSet(false, true)) {
for (Tuple tuple : endpoints) {
try {
createRouteForEndpoint(tuple.endpoint, tuple.parentRouteId, tuple.parentProcessorId);
} catch (Exception e) {
throw new VetoCamelContextStartException(
"Failure creating route from template: " + tuple.endpoint.getTemplateId(), e, context);
}
}
endpoints.clear();
}
}
public void setInitialized(boolean initialized) {
this.initialized.set(initialized);
}
public void track(KameletEndpoint endpoint, String parentRouteId, String parentProcessorId) {
if (this.initialized.get()) {
try {
createRouteForEndpoint(endpoint, parentRouteId, parentProcessorId);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeException(e);
}
} else {
LOG.debug("Tracking route template={} and id={}", endpoint.getTemplateId(), endpoint.getRouteId());
this.endpoints.add(new Tuple(endpoint, parentRouteId, parentProcessorId));
}
}
}
}
| LifecycleHandler |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/flogger/FloggerArgumentToString.java | {
"start": 5099,
"end": 5887
} | class ____ {
final Supplier<String> source;
final Type type;
final @Nullable Character placeholder;
private Parameter(ExpressionTree expression, char placeholder) {
this(s -> s.getSourceForNode(expression), getType(expression), placeholder);
}
private Parameter(Supplier<String> source, Type type, char placeholder) {
this.source = source;
this.type = type;
this.placeholder = placeholder;
}
private static Parameter receiver(MethodInvocationTree invocation, char placeholder) {
ExpressionTree receiver = getReceiver(invocation);
if (receiver != null) {
return new Parameter(getReceiver(invocation), placeholder);
}
return new Parameter(s -> "this", null, placeholder);
}
}
private | Parameter |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/support/IdTimestampMessageHeaderInitializer.java | {
"start": 1055,
"end": 2905
} | class ____ implements MessageHeaderInitializer {
private static final IdGenerator ID_VALUE_NONE_GENERATOR = () -> MessageHeaders.ID_VALUE_NONE;
private @Nullable IdGenerator idGenerator;
private boolean enableTimestamp;
/**
* Configure the IdGenerator strategy to initialize {@code MessageHeaderAccessor}
* instances with.
* <p>By default this property is set to {@code null} in which case the default
* IdGenerator of {@link org.springframework.messaging.MessageHeaders} is used.
* <p>To have no ids generated at all, see {@link #setDisableIdGeneration()}.
*/
public void setIdGenerator(@Nullable IdGenerator idGenerator) {
this.idGenerator = idGenerator;
}
/**
* Return the configured {@code IdGenerator}, if any.
*/
public @Nullable IdGenerator getIdGenerator() {
return this.idGenerator;
}
/**
* A shortcut for calling {@link #setIdGenerator} with an id generation strategy
* to disable id generation completely.
*/
public void setDisableIdGeneration() {
this.idGenerator = ID_VALUE_NONE_GENERATOR;
}
/**
* Whether to enable the automatic addition of the
* {@link org.springframework.messaging.MessageHeaders#TIMESTAMP} header on
* {@code MessageHeaderAccessor} instances being initialized.
* <p>By default this property is set to false.
*/
public void setEnableTimestamp(boolean enableTimestamp) {
this.enableTimestamp = enableTimestamp;
}
/**
* Return whether the timestamp header is enabled or not.
*/
public boolean isEnableTimestamp() {
return this.enableTimestamp;
}
@Override
public void initHeaders(MessageHeaderAccessor headerAccessor) {
IdGenerator idGenerator = getIdGenerator();
if (idGenerator != null) {
headerAccessor.setIdGenerator(idGenerator);
}
headerAccessor.setEnableTimestamp(isEnableTimestamp());
}
}
| IdTimestampMessageHeaderInitializer |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/BeanWrapperTests.java | {
"start": 12603,
"end": 12800
} | interface ____ extends BaseProperty {
default void setAliasedName(String name) {
setName(name);
}
void setName(String name);
}
@SuppressWarnings("unused")
private static | AliasedProperty |
java | spring-projects__spring-framework | spring-r2dbc/src/main/java/org/springframework/r2dbc/core/ConnectionAccessor.java | {
"start": 1391,
"end": 1503
} | interface ____ {@link DatabaseClient}.
*
* @author Mark Paluch
* @since 5.3
* @see DatabaseClient
*/
public | for |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/token/OAuth2TokenGenerator.java | {
"start": 982,
"end": 1331
} | interface ____ responsible for generating an {@link OAuth2Token}
* using the attributes contained in the {@link OAuth2TokenContext}.
*
* @param <T> the type of the OAuth 2.0 Token
* @author Joe Grandja
* @since 7.0
* @see OAuth2Token
* @see OAuth2TokenContext
* @see OAuth2TokenClaimsSet
* @see ClaimAccessor
*/
@FunctionalInterface
public | are |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/co/CoStreamMap.java | {
"start": 1332,
"end": 2007
} | class ____<IN1, IN2, OUT>
extends AbstractUdfStreamOperator<OUT, CoMapFunction<IN1, IN2, OUT>>
implements TwoInputStreamOperator<IN1, IN2, OUT> {
private static final long serialVersionUID = 1L;
public CoStreamMap(CoMapFunction<IN1, IN2, OUT> mapper) {
super(mapper);
}
@Override
public void processElement1(StreamRecord<IN1> element) throws Exception {
output.collect(element.replace(userFunction.map1(element.getValue())));
}
@Override
public void processElement2(StreamRecord<IN2> element) throws Exception {
output.collect(element.replace(userFunction.map2(element.getValue())));
}
}
| CoStreamMap |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/model/User.java | {
"start": 982,
"end": 2050
} | class ____ implements Serializable {
private int age;
private String name;
public User(int age, String name) {
this.age = age;
this.name = name;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return String.format("User name(%s) age(%d) ", name, age);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
User user = (User) o;
if (name == null) {
if (user.name != null) {
return false;
}
} else if (!name.equals(user.name)) {
return false;
}
return Objects.equals(age, user.age);
}
@Override
public int hashCode() {
return Objects.hash(age, name);
}
}
| User |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/javadoc/InvalidLinkTest.java | {
"start": 3822,
"end": 3971
} | interface ____ {}
""")
.addOutputLines(
"Test.java",
"""
/** {@link #frobnicate} */
| A |
java | quarkusio__quarkus | extensions/spring-cloud-config-client/runtime/src/main/java/io/quarkus/spring/cloud/config/client/runtime/eureka/EurekaInstanceSelector.java | {
"start": 154,
"end": 242
} | interface ____ {
JsonObject select(List<JsonObject> instances);
}
| EurekaInstanceSelector |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/rest/ClosedRestService.java | {
"start": 1026,
"end": 1282
} | enum ____ implements RestService {
INSTANCE;
@Override
public CompletableFuture<Void> closeAsync() {
return FutureUtils.completedVoidFuture();
}
@Override
public int getRestPort() {
return -1;
}
}
| ClosedRestService |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-common/runtime/src/main/java/io/quarkus/resteasy/reactive/common/runtime/ArcBeanFactory.java | {
"start": 160,
"end": 1399
} | class ____<T> implements BeanFactory<T> {
private final BeanContainer.Factory<T> factory;
// for toString
private final String targetClassName;
public ArcBeanFactory(Class<T> target, BeanContainer beanContainer) {
targetClassName = target.getName();
factory = beanContainer.beanInstanceFactory(target);
}
@Override
public String toString() {
return "ArcBeanFactory[" + targetClassName + "]";
}
@Override
public BeanInstance<T> createInstance() {
BeanContainer.Instance<T> instance;
try {
instance = factory.create();
return new BeanInstance<T>() {
@Override
public T getInstance() {
return instance.get();
}
@Override
public void close() {
instance.close();
}
};
} catch (Exception e) {
if (factory.getClass().getName().contains("DefaultInstanceFactory")) {
throw new IllegalArgumentException(
"Unable to create class '" + targetClassName
+ "'. To fix the problem, make sure this | ArcBeanFactory |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/DefaultStreamCachingTest.java | {
"start": 1163,
"end": 1730
} | class ____ {
@Test
public void testStreamCaching() throws Exception {
AbstractApplicationContext appContext
= new ClassPathXmlApplicationContext(new String[] { "org/apache/camel/spring/streamCaching.xml" });
CamelContext camelContext = appContext.getBean("camelContext", CamelContext.class);
assertTrue(camelContext.isStreamCaching(), "StreamCaching should be enabled by default");
// we're done so let's properly close the application context
IOHelper.close(appContext);
}
}
| DefaultStreamCachingTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java | {
"start": 1514,
"end": 1612
} | class ____ implements Progressable {
public void progress() { }
}
private static | NullProgress |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/TupleSerializerSnapshot.java | {
"start": 2254,
"end": 2742
} | class ____ not be NULL");
}
@Override
protected int getCurrentOuterSnapshotVersion() {
return VERSION;
}
@Override
protected TypeSerializer<?>[] getNestedSerializers(TupleSerializer<T> outerSerializer) {
return outerSerializer.getFieldSerializers();
}
@Override
protected TupleSerializer<T> createOuterSerializerWithNestedSerializers(
TypeSerializer<?>[] nestedSerializers) {
checkState(tupleClass != null, "tuple | can |
java | apache__rocketmq | proxy/src/main/java/org/apache/rocketmq/proxy/service/transaction/TransactionData.java | {
"start": 1000,
"end": 3940
} | class ____ implements Comparable<TransactionData> {
private final String brokerName;
private final String topic;
private final long tranStateTableOffset;
private final long commitLogOffset;
private final String transactionId;
private final long checkTimestamp;
private final long expireMs;
public TransactionData(String brokerName, String topic, long tranStateTableOffset, long commitLogOffset, String transactionId,
long checkTimestamp, long expireMs) {
this.brokerName = brokerName;
this.topic = topic;
this.tranStateTableOffset = tranStateTableOffset;
this.commitLogOffset = commitLogOffset;
this.transactionId = transactionId;
this.checkTimestamp = checkTimestamp;
this.expireMs = expireMs;
}
public String getBrokerName() {
return brokerName;
}
public String getTopic() {
return topic;
}
public long getTranStateTableOffset() {
return tranStateTableOffset;
}
public long getCommitLogOffset() {
return commitLogOffset;
}
public String getTransactionId() {
return transactionId;
}
public long getCheckTimestamp() {
return checkTimestamp;
}
public long getExpireMs() {
return expireMs;
}
public long getExpireTime() {
return checkTimestamp + expireMs;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TransactionData data = (TransactionData) o;
return tranStateTableOffset == data.tranStateTableOffset && commitLogOffset == data.commitLogOffset &&
getExpireTime() == data.getExpireTime() && Objects.equal(brokerName, data.brokerName) &&
Objects.equal(transactionId, data.transactionId);
}
@Override
public int hashCode() {
return Objects.hashCode(brokerName, transactionId, tranStateTableOffset, commitLogOffset, getExpireTime());
}
@Override
public int compareTo(TransactionData o) {
return ComparisonChain.start()
.compare(getExpireTime(), o.getExpireTime())
.compare(brokerName, o.brokerName)
.compare(commitLogOffset, o.commitLogOffset)
.compare(tranStateTableOffset, o.tranStateTableOffset)
.compare(transactionId, o.transactionId)
.result();
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("brokerName", brokerName)
.add("tranStateTableOffset", tranStateTableOffset)
.add("commitLogOffset", commitLogOffset)
.add("transactionId", transactionId)
.add("checkTimestamp", checkTimestamp)
.add("expireMs", expireMs)
.toString();
}
}
| TransactionData |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/codec/perfield/XPerFieldDocValuesFormat.java | {
"start": 3110,
"end": 9881
} | class ____ extends DocValuesConsumer {
private final Map<DocValuesFormat, ConsumerAndSuffix> formats = new HashMap<>();
private final Map<String, Integer> suffixes = new HashMap<>();
private final SegmentWriteState segmentWriteState;
FieldsWriter(SegmentWriteState state) {
segmentWriteState = state;
}
@Override
public void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException {
getInstance(field).addNumericField(field, valuesProducer);
}
@Override
public void addBinaryField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException {
getInstance(field).addBinaryField(field, valuesProducer);
}
@Override
public void addSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException {
getInstance(field).addSortedField(field, valuesProducer);
}
@Override
public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException {
getInstance(field).addSortedNumericField(field, valuesProducer);
}
@Override
public void addSortedSetField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException {
getInstance(field).addSortedSetField(field, valuesProducer);
}
@Override
public void merge(MergeState mergeState) throws IOException {
Map<DocValuesConsumer, Collection<String>> consumersToField = new IdentityHashMap<>();
// Group each consumer by the fields it handles
for (FieldInfo fi : mergeState.mergeFieldInfos) {
if (fi.getDocValuesType() == DocValuesType.NONE) {
continue;
}
// merge should ignore current format for the fields being merged
DocValuesConsumer consumer = getInstance(fi, true);
Collection<String> fieldsForConsumer = consumersToField.get(consumer);
if (fieldsForConsumer == null) {
fieldsForConsumer = new ArrayList<>();
consumersToField.put(consumer, fieldsForConsumer);
}
fieldsForConsumer.add(fi.name);
}
// Delegate the merge to the appropriate consumer
for (Map.Entry<DocValuesConsumer, Collection<String>> e : consumersToField.entrySet()) {
e.getKey().merge(XPerFieldMergeState.restrictFields(mergeState, e.getValue()));
}
}
private DocValuesConsumer getInstance(FieldInfo field) throws IOException {
return getInstance(field, false);
}
/**
* DocValuesConsumer for the given field.
*
* @param field - FieldInfo object.
* @param ignoreCurrentFormat - ignore the existing format attributes.
* @return DocValuesConsumer for the field.
* @throws IOException if there is a low-level IO error
*/
private DocValuesConsumer getInstance(FieldInfo field, boolean ignoreCurrentFormat) throws IOException {
DocValuesFormat format = null;
if (field.getDocValuesGen() != -1) {
String formatName = null;
if (ignoreCurrentFormat == false) {
formatName = field.getAttribute(PER_FIELD_FORMAT_KEY);
}
// this means the field never existed in that segment, yet is applied updates
if (formatName != null) {
format = DocValuesFormat.forName(formatName);
}
}
if (format == null) {
format = getDocValuesFormatForField(field.name);
}
if (format == null) {
throw new IllegalStateException("invalid null DocValuesFormat for field=\"" + field.name + "\"");
}
final String formatName = format.getName();
field.putAttribute(PER_FIELD_FORMAT_KEY, formatName);
Integer suffix = null;
ConsumerAndSuffix consumer = formats.get(format);
if (consumer == null) {
// First time we are seeing this format; create a new instance
if (field.getDocValuesGen() != -1) {
String suffixAtt = null;
if (ignoreCurrentFormat == false) {
suffixAtt = field.getAttribute(PER_FIELD_SUFFIX_KEY);
}
// even when dvGen is != -1, it can still be a new field, that never
// existed in the segment, and therefore doesn't have the recorded
// attributes yet.
if (suffixAtt != null) {
suffix = Integer.valueOf(suffixAtt);
}
}
if (suffix == null) {
// bump the suffix
suffix = suffixes.get(formatName);
if (suffix == null) {
suffix = 0;
} else {
suffix = suffix + 1;
}
}
suffixes.put(formatName, suffix);
final String segmentSuffix = getFullSegmentSuffix(
segmentWriteState.segmentSuffix,
getSuffix(formatName, Integer.toString(suffix))
);
consumer = new ConsumerAndSuffix(format.fieldsConsumer(new SegmentWriteState(segmentWriteState, segmentSuffix)), suffix);
formats.put(format, consumer);
} else {
// we've already seen this format, so just grab its suffix
assert suffixes.containsKey(formatName);
suffix = consumer.suffix;
}
field.putAttribute(PER_FIELD_SUFFIX_KEY, Integer.toString(suffix));
// TODO: we should only provide the "slice" of FIS
// that this DVF actually sees ...
return consumer.consumer;
}
@Override
public void close() throws IOException {
// Close all subs
IOUtils.close(formats.values());
}
}
static String getSuffix(String formatName, String suffix) {
return formatName + "_" + suffix;
}
static String getFullSegmentSuffix(String outerSegmentSuffix, String segmentSuffix) {
if (outerSegmentSuffix.length() == 0) {
return segmentSuffix;
} else {
return outerSegmentSuffix + "_" + segmentSuffix;
}
}
@SuppressForbidden(reason = "forked from Lucene")
public static | FieldsWriter |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/webmonitor/threadinfo/VertexThreadInfoTracker.java | {
"start": 19372,
"end": 22050
} | class ____
extends ThreadInfoSampleCompletionCallback {
private final JobVertexKey jobVertexKey;
JobVertexThreadInfoSampleCompletionCallback(JobVertexKey jobVertexKey, String sampleName) {
super(sampleName);
this.jobVertexKey = jobVertexKey;
}
@Override
protected void handleResult(VertexThreadInfoStats threadInfoStats) {
jobVertexStatsCache.put(jobVertexKey, threadInfoStats);
for (Map.Entry<ExecutionAttemptID, Collection<ThreadInfoSample>> entry :
threadInfoStats.getSamplesBySubtask().entrySet()) {
ExecutionAttemptID executionAttemptID = entry.getKey();
ExecutionVertexKey executionVertexKey =
jobVertexKey.toExecutionVertexKey(executionAttemptID.getSubtaskIndex());
VertexThreadInfoStats oldStats =
executionVertexStatsCache.getIfPresent(executionVertexKey);
if (oldStats == null || oldStats.getRequestId() < threadInfoStats.getRequestId()) {
executionVertexStatsCache.put(
executionVertexKey,
generateExecutionVertexStats(
threadInfoStats, executionAttemptID, entry.getValue()));
continue;
}
if (oldStats.getRequestId() == threadInfoStats.getRequestId()) {
// When the same ExecutionVertex has multiple attempts.
Map<ExecutionAttemptID, Collection<ThreadInfoSample>> samples =
oldStats.getSamplesBySubtask();
samples.put(executionAttemptID, entry.getValue());
}
}
}
private VertexThreadInfoStats generateExecutionVertexStats(
VertexThreadInfoStats threadInfoStats,
ExecutionAttemptID executionAttemptID,
Collection<ThreadInfoSample> sample) {
HashMap<ExecutionAttemptID, Collection<ThreadInfoSample>> samples = new HashMap<>();
samples.put(executionAttemptID, sample);
return new VertexThreadInfoStats(
threadInfoStats.getRequestId(),
threadInfoStats.getStartTime(),
threadInfoStats.getEndTime(),
samples);
}
@Override
protected void doFinally() {
pendingJobVertexStats.remove(jobVertexKey);
}
}
/** Callback on completed thread info sample for execution vertex. */
private | JobVertexThreadInfoSampleCompletionCallback |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/collections/semantics/UniqueListWrapper.java | {
"start": 604,
"end": 3853
} | class ____<E> extends PersistentList<E> {
public UniqueListWrapper(SharedSessionContractImplementor session) {
super( session );
}
public UniqueListWrapper(SharedSessionContractImplementor session, List<E> list) {
super( session, list );
}
// ...
//end::collections-custom-semantics-ex[]
@Override
public boolean add(E element) {
if ( element == null ) {
// per java.util.List requirements
throw new NullPointerException( "Passed collection cannot be null" );
}
final Boolean exists = isOperationQueueEnabled() ? readElementExistence( element ) : null;
if ( exists == null ) {
initialize( true );
if ( getRawList().contains( element ) ) {
// per java.util.List requirements
throw new IllegalArgumentException( "Cannot add given element to unique List as it already existed" );
}
getRawList().add( element );
dirty();
return true;
}
else if ( exists ) {
return false;
}
else {
queueOperation( new SimpleAdd( element ) );
return true;
}
}
@Override
public boolean addAll(Collection<? extends E> values) {
if ( values == null ) {
// per java.util.List requirements
throw new NullPointerException( "Passed collection cannot be null" );
}
boolean changed = false;
for ( E value : values ) {
final boolean added = add( value );
if ( ! added ) {
// per java.util.List requirements
throw new IllegalArgumentException( "Cannot add given element to unique List as it already existed" );
}
changed = changed || added;
}
return changed;
}
@Override
public boolean addAll(int index, Collection<? extends E> values) {
if ( values == null ) {
// per java.util.List requirements
throw new NullPointerException( "Passed collection cannot be null" );
}
return addAllStartingAt( index, values );
}
private boolean addAllStartingAt(int index, Collection<? extends E> values) {
if ( values == null ) {
// per java.util.List requirements
throw new NullPointerException( "Passed collection cannot be null" );
}
boolean changed = false;
int position = index;
for ( E value : values ) {
final boolean added = addAt( position++, value );
changed = changed || added;
}
return changed;
}
private boolean addAt(int index, E value) {
if ( value == null ) {
// per java.util.List requirements
throw new NullPointerException( "Passed collection cannot be null" );
}
final Boolean exists = isOperationQueueEnabled() ? readElementExistence( value ) : null;
if ( exists == null ) {
initialize( true );
if ( getRawList().contains( value ) ) {
return false;
}
getRawList().add( index, value );
dirty();
return true;
}
else if ( exists ) {
return false;
}
else {
queueOperation( new SimpleAdd( value ) );
return true;
}
}
@Override
public E set(int index, E value) {
initialize( true );
return getRawList().set( index, value );
}
@Override
public void add(int index, E value) {
final boolean added = addAt( index, value );
if ( !added ) {
throw new IllegalArgumentException( "Cannot add given element to unique List as it already existed" );
}
}
//tag::collections-custom-semantics-ex[]
}
//end::collections-custom-semantics-ex[]
| UniqueListWrapper |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/QualifierOrScopeOnInjectMethodTest.java | {
"start": 1530,
"end": 2013
} | class ____ {
@Inject
// BUG: Diagnostic contains: remove
@Named("bar")
void someMethod() {}
}
""")
.doTest();
}
@Test
public void positiveCase_injectConstructor() {
refactoringHelper
.addInputLines(
"in/Foo.java",
"""
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
| Foo |
java | google__guava | android/guava/src/com/google/common/collect/Iterators.java | {
"start": 49229,
"end": 52599
} | class ____<T extends @Nullable Object>
implements Iterator<T> {
/* The last iterator to return an element. Calls to remove() go to this iterator. */
private @Nullable Iterator<? extends T> toRemove;
/* The iterator currently returning elements. */
private Iterator<? extends T> iterator;
/*
* We track the "meta iterators," the iterators-of-iterators, below. Usually, topMetaIterator
* is the only one in use, but if we encounter nested concatenations, we start a deque of
* meta-iterators rather than letting the nesting get arbitrarily deep. This keeps each
* operation O(1).
*/
private @Nullable Iterator<? extends Iterator<? extends T>> topMetaIterator;
// Only becomes nonnull if we encounter nested concatenations.
private @Nullable Deque<Iterator<? extends Iterator<? extends T>>> metaIterators;
ConcatenatedIterator(Iterator<? extends Iterator<? extends T>> metaIterator) {
iterator = emptyIterator();
topMetaIterator = checkNotNull(metaIterator);
}
// Returns a nonempty meta-iterator or, if all meta-iterators are empty, null.
private @Nullable Iterator<? extends Iterator<? extends T>> getTopMetaIterator() {
while (topMetaIterator == null || !topMetaIterator.hasNext()) {
if (metaIterators != null && !metaIterators.isEmpty()) {
topMetaIterator = metaIterators.removeFirst();
} else {
return null;
}
}
return topMetaIterator;
}
@Override
public boolean hasNext() {
while (!checkNotNull(iterator).hasNext()) {
// this weird checkNotNull positioning appears required by our tests, which expect
// both hasNext and next to throw NPE if an input iterator is null.
topMetaIterator = getTopMetaIterator();
if (topMetaIterator == null) {
return false;
}
iterator = topMetaIterator.next();
if (iterator instanceof ConcatenatedIterator) {
// Instead of taking linear time in the number of nested concatenations, unpack
// them into the queue
@SuppressWarnings("unchecked")
ConcatenatedIterator<T> topConcat = (ConcatenatedIterator<T>) iterator;
iterator = topConcat.iterator;
// topConcat.topMetaIterator, then topConcat.metaIterators, then this.topMetaIterator,
// then this.metaIterators
if (this.metaIterators == null) {
this.metaIterators = new ArrayDeque<>();
}
this.metaIterators.addFirst(this.topMetaIterator);
if (topConcat.metaIterators != null) {
while (!topConcat.metaIterators.isEmpty()) {
this.metaIterators.addFirst(topConcat.metaIterators.removeLast());
}
}
this.topMetaIterator = topConcat.topMetaIterator;
}
}
return true;
}
@Override
@ParametricNullness
public T next() {
if (hasNext()) {
toRemove = iterator;
return iterator.next();
} else {
throw new NoSuchElementException();
}
}
@Override
public void remove() {
if (toRemove == null) {
throw new IllegalStateException("no calls to next() since the last call to remove()");
}
toRemove.remove();
toRemove = null;
}
}
}
| ConcatenatedIterator |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeIn.java | {
"start": 1016,
"end": 2257
} | class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldNotBeIn}</code>.
* @param actual the actual value in the failed assertion.
* @param values the group of values where {@code actual} is expected to be in.
* @param comparisonStrategy the {@link ComparisonStrategy} used to evaluate assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldNotBeIn(Object actual, Object values, ComparisonStrategy comparisonStrategy) {
return new ShouldNotBeIn(actual, values, comparisonStrategy);
}
/**
* Creates a new <code>{@link ShouldNotBeIn}</code>.
* @param actual the actual value in the failed assertion.
* @param values the group of values where {@code actual} is expected to be in.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldNotBeIn(Object actual, Object values) {
return new ShouldNotBeIn(actual, values, StandardComparisonStrategy.instance());
}
private ShouldNotBeIn(Object actual, Object values, ComparisonStrategy comparisonStrategy) {
super("%nExpecting actual:%n %s%nnot to be in:%n %s%n%s", actual, values, comparisonStrategy);
}
}
| ShouldNotBeIn |
java | apache__camel | components/camel-kubernetes/src/main/java/org/apache/camel/component/kubernetes/cluster/lock/KubernetesClusterEvent.java | {
"start": 927,
"end": 1016
} | interface ____ events produced by the Kubernetes cluster.
*/
@FunctionalInterface
public | for |
java | playframework__playframework | documentation/manual/working/javaGuide/main/i18n/code/javaguide/i18n/JavaI18N.java | {
"start": 6395,
"end": 8614
} | class ____ extends MockJavaAction {
AcceptedLanguageController(JavaHandlerComponents javaHandlerComponents) {
super(javaHandlerComponents);
}
// #accepted-languages
public Result index(Http.Request request) {
List<Lang> langs = request.acceptLanguages();
String codes = langs.stream().map(Lang::code).collect(joining(","));
return ok(codes);
}
// #accepted-languages
}
@Test
public void testSingleApostrophe() {
assertThat(singleApostrophe()).isTrue();
}
private Boolean singleApostrophe() {
MessagesApi messagesApi = app.injector().instanceOf(MessagesApi.class);
Collection<Lang> candidates = Collections.singletonList(new Lang(Locale.US));
Messages messages = messagesApi.preferred(candidates);
// #single-apostrophe
String errorMessage = messages.at("info.error");
Boolean areEqual = errorMessage.equals("You aren't logged in!");
// #single-apostrophe
return areEqual;
}
@Test
public void testEscapedParameters() {
assertThat(escapedParameters()).isTrue();
}
private Boolean escapedParameters() {
MessagesApi messagesApi = app.injector().instanceOf(MessagesApi.class);
Collection<Lang> candidates = Collections.singletonList(new Lang(Locale.US));
Messages messages = messagesApi.preferred(candidates);
// #parameter-escaping
String errorMessage = messages.at("example.formatting");
Boolean areEqual =
errorMessage.equals(
"When using MessageFormat, '{0}' is replaced with the first parameter.");
// #parameter-escaping
return areEqual;
}
// #explicit-messages-api
private MessagesApi explicitMessagesApi() {
return new play.i18n.MessagesApi(
new play.api.i18n.DefaultMessagesApi(
Collections.singletonMap(
Lang.defaultLang().code(), Collections.singletonMap("foo", "bar")),
new play.api.i18n.DefaultLangs().asJava()));
}
// #explicit-messages-api
@Test
public void testExplicitMessagesApi() {
MessagesApi messagesApi = explicitMessagesApi();
String message = messagesApi.get(Lang.defaultLang(), "foo");
assertThat(message).isEqualTo("bar");
}
}
| AcceptedLanguageController |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_3601/Issue3601Mapper.java | {
"start": 409,
"end": 839
} | interface ____ {
Issue3601Mapper INSTANCE = Mappers.getMapper( Issue3601Mapper.class );
@Mapping(target = "currentId", source = "source.uuid")
@Mapping(target = "targetIds", source = "sourceIds")
Target map(Source source, List<String> sourceIds);
@SourceParameterCondition
default boolean isNotEmpty(List<String> elements) {
return elements != null && !elements.isEmpty();
}
| Issue3601Mapper |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/AbstractITCommitProtocol.java | {
"start": 25442,
"end": 25821
} | class ____.
* @throws Exception failure.
*/
private void validateStorageClass(Path dir, String expectedStorageClass) throws Exception {
Path expectedFile = getPart0000(dir);
String actualStorageClass = getS3AInternals().getObjectMetadata(expectedFile)
.storageClassAsString();
Assertions.assertThat(actualStorageClass)
.describedAs("Storage | value |
java | apache__dubbo | dubbo-plugin/dubbo-qos/src/test/java/org/apache/dubbo/qos/legacy/service/DemoServiceImpl.java | {
"start": 999,
"end": 3459
} | class ____ implements DemoService {
private static final Logger logger = LoggerFactory.getLogger(DemoServiceImpl.class);
public DemoServiceImpl() {
super();
}
public void sayHello(String name) {
logger.info("hello {}", name);
}
public String echo(String text) {
return text;
}
public Map echo(Map map) {
return map;
}
public long timestamp() {
return System.currentTimeMillis();
}
public String getThreadName() {
return Thread.currentThread().getName();
}
public int getSize(String[] strs) {
if (strs == null) return -1;
return strs.length;
}
public int getSize(Object[] os) {
if (os == null) return -1;
return os.length;
}
public Object invoke(String service, String method) throws Exception {
logger.info(
"RpcContext.getServerAttachment().getRemoteHost()={}",
RpcContext.getServiceContext().getRemoteHost());
return service + ":" + method;
}
public Type enumlength(Type... types) {
if (types.length == 0) return Type.Lower;
return types[0];
}
public Type getType(Type type) {
return type;
}
public int stringLength(String str) {
return str.length();
}
public String get(CustomArgument arg1) {
return arg1.toString();
}
public byte getbyte(byte arg) {
return arg;
}
public Person gerPerson(Person person) {
return person;
}
public Set<String> keys(Map<String, String> map) {
return map == null ? null : map.keySet();
}
public void nonSerializedParameter(NonSerialized ns) {}
public NonSerialized returnNonSerialized() {
return new NonSerialized();
}
public long add(int a, long b) {
return a + b;
}
@Override
public int getPerson(Person person) {
return person.getAge();
}
@Override
public int getPerson(Person person1, Person person2) {
return person1.getAge() + person2.getAge();
}
@Override
public String getPerson(Man man) {
return man.getName();
}
@Override
public String getRemoteApplicationName() {
return RpcContext.getServiceContext().getRemoteApplicationName();
}
@Override
public Map<Integer, Object> getMap(Map<Integer, Object> map) {
return map;
}
}
| DemoServiceImpl |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java | {
"start": 65027,
"end": 66663
} | class ____<K,V>
extends org.apache.hadoop.mapreduce.RecordWriter<K,V> {
OutputCollector<K,V> output;
OutputConverter(OutputCollector<K,V> output) {
this.output = output;
}
@Override
public void close(org.apache.hadoop.mapreduce.TaskAttemptContext context){
}
@Override
public void write(K key, V value
) throws IOException, InterruptedException {
output.collect(key,value);
}
}
@SuppressWarnings("unchecked")
@Override
public void combine(RawKeyValueIterator iterator,
OutputCollector<K,V> collector
) throws IOException, InterruptedException,
ClassNotFoundException {
// make a reducer
org.apache.hadoop.mapreduce.Reducer<K,V,K,V> reducer =
(org.apache.hadoop.mapreduce.Reducer<K,V,K,V>)
ReflectionUtils.newInstance(reducerClass, job);
org.apache.hadoop.mapreduce.Reducer.Context
reducerContext = createReduceContext(reducer, job, taskId,
iterator, null, inputCounter,
new OutputConverter(collector),
committer,
reporter, comparator, keyClass,
valueClass);
reducer.run(reducerContext);
}
}
BytesWritable getExtraData() {
return extraData;
}
void setExtraData(BytesWritable extraData) {
this.extraData = extraData;
}
}
| OutputConverter |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/json/AbstractJsonParser.java | {
"start": 891,
"end": 1038
} | class ____ parsers wrapped or implemented in this package.
*
* @author Anton Telechev
* @author Phillip Webb
* @since 2.0.1
*/
public abstract | for |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatDoubleAggregatorFunctionSupplier.java | {
"start": 653,
"end": 1830
} | class ____ implements AggregatorFunctionSupplier {
private final int limit;
private final boolean ascending;
public TopFloatDoubleAggregatorFunctionSupplier(int limit, boolean ascending) {
this.limit = limit;
this.ascending = ascending;
}
@Override
public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() {
return TopFloatDoubleAggregatorFunction.intermediateStateDesc();
}
@Override
public List<IntermediateStateDesc> groupingIntermediateStateDesc() {
return TopFloatDoubleGroupingAggregatorFunction.intermediateStateDesc();
}
@Override
public TopFloatDoubleAggregatorFunction aggregator(DriverContext driverContext,
List<Integer> channels) {
return TopFloatDoubleAggregatorFunction.create(driverContext, channels, limit, ascending);
}
@Override
public TopFloatDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext,
List<Integer> channels) {
return TopFloatDoubleGroupingAggregatorFunction.create(channels, driverContext, limit, ascending);
}
@Override
public String describe() {
return "top_float of doubles";
}
}
| TopFloatDoubleAggregatorFunctionSupplier |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/async/utils/TestAsyncUtil.java | {
"start": 1681,
"end": 1828
} | class ____. It utilizes the JUnit testing
* framework to verify that asynchronous operations are performed as
* expected.
*
* <p>
* This | AsyncUtil |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/idclass/mappedsuperclass/BaseSummary.java | {
"start": 396,
"end": 1190
} | class ____ implements Serializable {
@Id
private Integer year;
@Id
private Integer month;
private BigDecimal value;
public Integer getYear() {
return year;
}
public void setYear(Integer year) {
this.year = year;
}
public Integer getMonth() {
return month;
}
public void setMonth(Integer month) {
this.month = month;
}
public BigDecimal getValue() {
return value;
}
public void setValue(BigDecimal value) {
this.value = value;
}
@Override
public boolean equals(Object o) {
if ( o == null || getClass() != o.getClass() ) {
return false;
}
BaseSummary that = (BaseSummary) o;
return Objects.equals( year, that.year ) && Objects.equals( month, that.month );
}
@Override
public int hashCode() {
return Objects.hash( year, month );
}
}
| BaseSummary |
java | netty__netty | codec-dns/src/main/java/io/netty/handler/codec/dns/DefaultDnsResponse.java | {
"start": 802,
"end": 4819
} | class ____ extends AbstractDnsMessage implements DnsResponse {
private boolean authoritativeAnswer;
private boolean truncated;
private boolean recursionAvailable;
private DnsResponseCode code;
/**
* Creates a new instance with the {@link DnsOpCode#QUERY} {@code opCode} and
* the {@link DnsResponseCode#NOERROR} {@code RCODE}.
*
* @param id the {@code ID} of the DNS response
*/
public DefaultDnsResponse(int id) {
this(id, DnsOpCode.QUERY, DnsResponseCode.NOERROR);
}
/**
* Creates a new instance with the {@link DnsResponseCode#NOERROR} {@code RCODE}.
*
* @param id the {@code ID} of the DNS response
* @param opCode the {@code opCode} of the DNS response
*/
public DefaultDnsResponse(int id, DnsOpCode opCode) {
this(id, opCode, DnsResponseCode.NOERROR);
}
/**
* Creates a new instance.
*
* @param id the {@code ID} of the DNS response
* @param opCode the {@code opCode} of the DNS response
* @param code the {@code RCODE} of the DNS response
*/
public DefaultDnsResponse(int id, DnsOpCode opCode, DnsResponseCode code) {
super(id, opCode);
setCode(code);
}
@Override
public boolean isAuthoritativeAnswer() {
return authoritativeAnswer;
}
@Override
public DnsResponse setAuthoritativeAnswer(boolean authoritativeAnswer) {
this.authoritativeAnswer = authoritativeAnswer;
return this;
}
@Override
public boolean isTruncated() {
return truncated;
}
@Override
public DnsResponse setTruncated(boolean truncated) {
this.truncated = truncated;
return this;
}
@Override
public boolean isRecursionAvailable() {
return recursionAvailable;
}
@Override
public DnsResponse setRecursionAvailable(boolean recursionAvailable) {
this.recursionAvailable = recursionAvailable;
return this;
}
@Override
public DnsResponseCode code() {
return code;
}
@Override
public DnsResponse setCode(DnsResponseCode code) {
this.code = checkNotNull(code, "code");
return this;
}
@Override
public DnsResponse setId(int id) {
return (DnsResponse) super.setId(id);
}
@Override
public DnsResponse setOpCode(DnsOpCode opCode) {
return (DnsResponse) super.setOpCode(opCode);
}
@Override
public DnsResponse setRecursionDesired(boolean recursionDesired) {
return (DnsResponse) super.setRecursionDesired(recursionDesired);
}
@Override
public DnsResponse setZ(int z) {
return (DnsResponse) super.setZ(z);
}
@Override
public DnsResponse setRecord(DnsSection section, DnsRecord record) {
return (DnsResponse) super.setRecord(section, record);
}
@Override
public DnsResponse addRecord(DnsSection section, DnsRecord record) {
return (DnsResponse) super.addRecord(section, record);
}
@Override
public DnsResponse addRecord(DnsSection section, int index, DnsRecord record) {
return (DnsResponse) super.addRecord(section, index, record);
}
@Override
public DnsResponse clear(DnsSection section) {
return (DnsResponse) super.clear(section);
}
@Override
public DnsResponse clear() {
return (DnsResponse) super.clear();
}
@Override
public DnsResponse touch() {
return (DnsResponse) super.touch();
}
@Override
public DnsResponse touch(Object hint) {
return (DnsResponse) super.touch(hint);
}
@Override
public DnsResponse retain() {
return (DnsResponse) super.retain();
}
@Override
public DnsResponse retain(int increment) {
return (DnsResponse) super.retain(increment);
}
@Override
public String toString() {
return DnsMessageUtil.appendResponse(new StringBuilder(128), this).toString();
}
}
| DefaultDnsResponse |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/plugins/spi/NamedXContentProvider.java | {
"start": 642,
"end": 848
} | interface ____ {
/**
* @return a list of {@link NamedXContentRegistry.Entry} that this plugin provides.
*/
List<NamedXContentRegistry.Entry> getNamedXContentParsers();
}
| NamedXContentProvider |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/ProtoTimestampGetSecondsGetNanoTest.java | {
"start": 6018,
"end": 6450
} | class ____ {
static {
long seconds = Timestamp.getDefaultInstance().getSeconds();
}
}
""")
.doTest();
}
@Test
public void getNanoOnlyInStaticBlock() {
compilationHelper
.addSourceLines(
"test/TestCase.java",
"""
package test;
import com.google.protobuf.Timestamp;
public | TestCase |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy-common/spi/src/main/java/io/quarkus/resteasy/common/spi/EndpointValidationPredicatesBuildItem.java | {
"start": 756,
"end": 1086
} | class ____ extends MultiBuildItem {
private final Predicate<ClassInfo> predicate;
public EndpointValidationPredicatesBuildItem(Predicate<ClassInfo> predicate) {
this.predicate = predicate;
}
public Predicate<ClassInfo> getPredicate() {
return predicate;
}
}
| EndpointValidationPredicatesBuildItem |
java | apache__camel | components/camel-velocity/src/test/java/org/apache/camel/component/velocity/VelocityConcurrentTest.java | {
"start": 1230,
"end": 2566
} | class ____ extends CamelTestSupport {
@Test
public void testNoConcurrentProducers() throws Exception {
doSendMessages(1, 1);
}
@Test
public void testConcurrentProducers() throws Exception {
doSendMessages(10, 5);
}
private void doSendMessages(int files, int poolSize) throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(files);
getMockEndpoint("mock:result").assertNoDuplicates(body());
getMockEndpoint("mock:result").message(0).body().contains("Bye");
ExecutorService executor = Executors.newFixedThreadPool(poolSize);
for (int i = 0; i < files; i++) {
final int index = i;
executor.submit(new Callable<Object>() {
public Object call() {
template.sendBody("direct:start", "Hello " + index);
return null;
}
});
}
MockEndpoint.assertIsSatisfied(context);
executor.shutdownNow();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start").to("velocity:org/apache/camel/component/velocity/Concurrent.vm").to("mock:result");
}
};
}
}
| VelocityConcurrentTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/associations/UnidirectionalManyToManyRemoveTest.java | {
"start": 1910,
"end": 2335
} | class ____ {
@Id
@GeneratedValue
private Long id;
private String street;
@Column(name = "`number`")
private String number;
public Address() {
}
public Address(String street, String number) {
this.street = street;
this.number = number;
}
public Long getId() {
return id;
}
public String getStreet() {
return street;
}
public String getNumber() {
return number;
}
}
}
| Address |
java | quarkusio__quarkus | independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/InnerInvocationContext.java | {
"start": 413,
"end": 579
} | class ____ its superclasses. It doesn't proceed to other interceptors in the "outer" invocation
* chain (interceptor methods declared in other classes).
*/
abstract | and |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/RangeSetAssert_enclosesAnyRangesOf_with_RangeSet_Test.java | {
"start": 1374,
"end": 4064
} | class ____ {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
RangeSet<Integer> actual = null;
RangeSet<Integer> rangeSet = ImmutableRangeSet.of(closed(0, 1));
// WHEN
var error = expectAssertionError(() -> assertThat(actual).enclosesAnyRangesOf(rangeSet));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_if_rangeSet_is_null() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of();
RangeSet<Integer> rangeSet = null;
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).enclosesAnyRangesOf(rangeSet));
// THEN
then(thrown).isInstanceOf(NullPointerException.class)
.hasMessage(shouldNotBeNull("rangeSet").create());
}
@Test
void should_fail_if_rangeSet_is_empty() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of(closed(0, 1));
RangeSet<Integer> rangeSet = ImmutableRangeSet.of();
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).enclosesAnyRangesOf(rangeSet));
// THEN
then(thrown).isInstanceOf(IllegalArgumentException.class)
.hasMessage("Expecting rangeSet not to be empty");
}
@Test
void should_fail_if_actual_does_not_enclose_rangeSet() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of(open(0, 100));
RangeSet<Integer> rangeSet = ImmutableRangeSet.<Integer> builder()
.add(closed(0, 10))
.add(open(90, 110))
.build();
// WHEN
var error = expectAssertionError(() -> assertThat(actual).enclosesAnyRangesOf(rangeSet));
// THEN
then(error).hasMessage(shouldEncloseAnyOf(actual, rangeSet).create());
}
@Test
void should_pass_if_both_actual_and_rangeSet_are_empty() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of();
RangeSet<Integer> rangeSet = ImmutableRangeSet.of();
// WHEN/THEN
assertThat(actual).enclosesAnyRangesOf(rangeSet);
}
@Test
void should_pass_if_actual_encloses_rangeSet() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of(closed(0, 100));
RangeSet<Integer> expected = ImmutableRangeSet.<Integer> builder()
.add(open(0, 10))
.add(open(50, 60))
.add(open(90, 110))
.build();
// WHEN/THEN
assertThat(actual).enclosesAnyRangesOf(expected);
}
}
| RangeSetAssert_enclosesAnyRangesOf_with_RangeSet_Test |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/config/AspectJAutoProxyBeanDefinitionParser.java | {
"start": 1392,
"end": 2797
} | class ____ implements BeanDefinitionParser {
@Override
public @Nullable BeanDefinition parse(Element element, ParserContext parserContext) {
AopNamespaceUtils.registerAspectJAnnotationAutoProxyCreatorIfNecessary(parserContext, element);
extendBeanDefinition(element, parserContext);
return null;
}
private void extendBeanDefinition(Element element, ParserContext parserContext) {
BeanDefinition beanDef =
parserContext.getRegistry().getBeanDefinition(AopConfigUtils.AUTO_PROXY_CREATOR_BEAN_NAME);
if (element.hasChildNodes()) {
addIncludePatterns(element, parserContext, beanDef);
}
}
private void addIncludePatterns(Element element, ParserContext parserContext, BeanDefinition beanDef) {
ManagedList<TypedStringValue> includePatterns = new ManagedList<>();
NodeList childNodes = element.getChildNodes();
for (int i = 0; i < childNodes.getLength(); i++) {
Node node = childNodes.item(i);
if (node instanceof Element includeElement) {
TypedStringValue valueHolder = new TypedStringValue(includeElement.getAttribute("name"));
valueHolder.setSource(parserContext.extractSource(includeElement));
includePatterns.add(valueHolder);
}
}
if (!includePatterns.isEmpty()) {
includePatterns.setSource(parserContext.extractSource(element));
beanDef.getPropertyValues().add("includePatterns", includePatterns);
}
}
}
| AspectJAutoProxyBeanDefinitionParser |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-http3/src/main/java/org/apache/dubbo/remoting/http3/netty4/Http3ChannelAddressAccessor.java | {
"start": 1233,
"end": 2203
} | class ____ implements ChannelAddressAccessor {
@Override
public String getProtocol() {
return "UDP";
}
@Override
public InetSocketAddress getRemoteAddress(Channel channel) {
if (channel instanceof QuicStreamChannel) {
return (InetSocketAddress) ((QuicStreamChannel) channel).parent().remoteSocketAddress();
}
if (channel instanceof QuicChannel) {
return (InetSocketAddress) ((QuicChannel) channel).remoteSocketAddress();
}
return null;
}
@Override
public InetSocketAddress getLocalAddress(Channel channel) {
if (channel instanceof QuicStreamChannel) {
return (InetSocketAddress) ((QuicStreamChannel) channel).parent().localSocketAddress();
}
if (channel instanceof QuicChannel) {
return (InetSocketAddress) ((QuicChannel) channel).localSocketAddress();
}
return null;
}
}
| Http3ChannelAddressAccessor |
java | mapstruct__mapstruct | core/src/main/java/org/mapstruct/EnumMapping.java | {
"start": 4422,
"end": 5149
} | enum ____ doing name based mapping.
*
* @return the configuration to use
*/
String configuration() default "";
/**
* Exception that should be thrown by the generated code if no mapping matches.
* If no exception is configured, the exception given via {@link MapperConfig#unexpectedValueMappingException()} or
* {@link Mapper#unexpectedValueMappingException()} will be used, using {@link IllegalArgumentException} by default.
*
* <p>
* Note:
* <ul>
* <li>
* The defined exception should at least have a constructor with a {@link String} parameter.
* </li>
* <li>
* If the defined exception is a checked exception then the | when |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/converter/MappingJackson2MessageConverter.java | {
"start": 7203,
"end": 11773
} | class ____ Jackson tested for (de-)serializability
* @param cause the Jackson-thrown exception to evaluate
* (typically a {@link JsonMappingException})
* @since 4.3
*/
protected void logWarningIfNecessary(Type type, @Nullable Throwable cause) {
if (cause == null) {
return;
}
// Do not log warning for serializer not found (note: different message wording on Jackson 2.9)
boolean debugLevel = (cause instanceof JsonMappingException && cause.getMessage() != null &&
cause.getMessage().startsWith("Cannot find"));
if (debugLevel ? logger.isDebugEnabled() : logger.isWarnEnabled()) {
String msg = "Failed to evaluate Jackson " + (type instanceof JavaType ? "de" : "") +
"serialization for type [" + type + "]";
if (debugLevel) {
logger.debug(msg, cause);
}
else if (logger.isDebugEnabled()) {
logger.warn(msg, cause);
}
else {
logger.warn(msg + ": " + cause);
}
}
}
@Override
protected boolean supports(Class<?> clazz) {
// should not be called, since we override canConvertFrom/canConvertTo instead
throw new UnsupportedOperationException();
}
@Override
protected @Nullable Object convertFromInternal(Message<?> message, Class<?> targetClass, @Nullable Object conversionHint) {
JavaType javaType = this.objectMapper.constructType(getResolvedType(targetClass, conversionHint));
Object payload = message.getPayload();
Class<?> view = getSerializationView(conversionHint);
try {
if (ClassUtils.isAssignableValue(targetClass, payload)) {
return payload;
}
else if (payload instanceof byte[] bytes) {
if (view != null) {
return this.objectMapper.readerWithView(view).forType(javaType).readValue(bytes);
}
else {
return this.objectMapper.readValue(bytes, javaType);
}
}
else {
// Assuming a text-based source payload
if (view != null) {
return this.objectMapper.readerWithView(view).forType(javaType).readValue(payload.toString());
}
else {
return this.objectMapper.readValue(payload.toString(), javaType);
}
}
}
catch (IOException ex) {
throw new MessageConversionException(message, "Could not read JSON: " + ex.getMessage(), ex);
}
}
@Override
protected @Nullable Object convertToInternal(Object payload, @Nullable MessageHeaders headers,
@Nullable Object conversionHint) {
try {
Class<?> view = getSerializationView(conversionHint);
if (byte[].class == getSerializedPayloadClass()) {
ByteArrayOutputStream out = new ByteArrayOutputStream(1024);
JsonEncoding encoding = getJsonEncoding(getMimeType(headers));
try (JsonGenerator generator = this.objectMapper.getFactory().createGenerator(out, encoding)) {
if (view != null) {
this.objectMapper.writerWithView(view).writeValue(generator, payload);
}
else {
this.objectMapper.writeValue(generator, payload);
}
payload = out.toByteArray();
}
}
else {
// Assuming a text-based target payload
Writer writer = new StringWriter(1024);
if (view != null) {
this.objectMapper.writerWithView(view).writeValue(writer, payload);
}
else {
this.objectMapper.writeValue(writer, payload);
}
payload = writer.toString();
}
}
catch (IOException ex) {
throw new MessageConversionException("Could not write JSON: " + ex.getMessage(), ex);
}
return payload;
}
/**
* Determine a Jackson serialization view based on the given conversion hint.
* @param conversionHint the conversion hint Object as passed into the
* converter for the current conversion attempt
* @return the serialization view class, or {@code null} if none
* @since 4.2
*/
protected @Nullable Class<?> getSerializationView(@Nullable Object conversionHint) {
if (conversionHint instanceof MethodParameter param) {
JsonView annotation = (param.getParameterIndex() >= 0 ?
param.getParameterAnnotation(JsonView.class) : param.getMethodAnnotation(JsonView.class));
if (annotation != null) {
return extractViewClass(annotation, conversionHint);
}
}
else if (conversionHint instanceof JsonView jsonView) {
return extractViewClass(jsonView, conversionHint);
}
else if (conversionHint instanceof Class<?> clazz) {
return clazz;
}
// No JSON view specified...
return null;
}
private Class<?> extractViewClass(JsonView annotation, Object conversionHint) {
Class<?>[] classes = annotation.value();
if (classes.length != 1) {
throw new IllegalArgumentException(
"@JsonView only supported for handler methods with exactly 1 | that |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/client/reactive/JettyResourceFactory.java | {
"start": 1739,
"end": 4789
} | class ____ implements InitializingBean, DisposableBean {
private @Nullable Executor executor;
private @Nullable ByteBufferPool byteBufferPool;
private @Nullable Scheduler scheduler;
private String threadPrefix = "jetty-http";
/**
* Configure the {@link Executor} to use.
* <p>By default, initialized with a {@link QueuedThreadPool}.
* @param executor the executor to use
*/
public void setExecutor(@Nullable Executor executor) {
this.executor = executor;
}
/**
* Configure the {@link ByteBufferPool} to use.
* <p>By default, initialized with a {@link ArrayByteBufferPool}.
* @param byteBufferPool the {@link ByteBuffer} pool to use
*/
public void setByteBufferPool(@Nullable ByteBufferPool byteBufferPool) {
this.byteBufferPool = byteBufferPool;
}
/**
* Configure the {@link Scheduler} to use.
* <p>By default, initialized with a {@link ScheduledExecutorScheduler}.
* @param scheduler the {@link Scheduler} to use
*/
public void setScheduler(@Nullable Scheduler scheduler) {
this.scheduler = scheduler;
}
/**
* Configure the thread prefix to initialize {@link QueuedThreadPool} executor with. This
* is used only when a {@link Executor} instance isn't
* {@link #setExecutor(Executor) provided}.
* <p>By default set to "jetty-http".
* @param threadPrefix the thread prefix to use
*/
public void setThreadPrefix(String threadPrefix) {
Assert.notNull(threadPrefix, "Thread prefix is required");
this.threadPrefix = threadPrefix;
}
/**
* Return the configured {@link Executor}.
*/
public @Nullable Executor getExecutor() {
return this.executor;
}
/**
* Return the configured {@link ByteBufferPool}.
*/
public @Nullable ByteBufferPool getByteBufferPool() {
return this.byteBufferPool;
}
/**
* Return the configured {@link Scheduler}.
*/
public @Nullable Scheduler getScheduler() {
return this.scheduler;
}
@Override
public void afterPropertiesSet() throws Exception {
String name = this.threadPrefix + "@" + Integer.toHexString(hashCode());
if (this.executor == null) {
QueuedThreadPool threadPool = new QueuedThreadPool();
threadPool.setName(name);
this.executor = threadPool;
}
if (this.byteBufferPool == null) {
this.byteBufferPool = new ArrayByteBufferPool(0, 2048, 65536, // from HttpClient:202
this.executor instanceof ThreadPool.SizedThreadPool sizedThreadPool ?
sizedThreadPool.getMaxThreads() / 2 :
ProcessorUtils.availableProcessors() * 2);
}
if (this.scheduler == null) {
this.scheduler = new ScheduledExecutorScheduler(name + "-scheduler", false);
}
if (this.executor instanceof LifeCycle lifeCycle) {
lifeCycle.start();
}
this.scheduler.start();
}
@Override
public void destroy() throws Exception {
try {
if (this.executor instanceof LifeCycle lifeCycle) {
lifeCycle.stop();
}
}
catch (Throwable ex) {
// ignore
}
try {
if (this.scheduler != null) {
this.scheduler.stop();
}
}
catch (Throwable ex) {
// ignore
}
}
}
| JettyResourceFactory |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.