language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__kafka
|
connect/runtime/src/main/java/org/apache/kafka/connect/runtime/distributed/DistributedHerder.java
|
{
"start": 147980,
"end": 153945
}
|
class ____ {
private final String connName;
private final int tasksToFence;
private final int tasksToRecord;
private final int taskGen;
private final FutureCallback<Void> fencingFollowup;
private KafkaFuture<Void> fencingFuture;
public ZombieFencing(String connName, int tasksToFence, int tasksToRecord, int taskGen) {
this.connName = connName;
this.tasksToFence = tasksToFence;
this.tasksToRecord = tasksToRecord;
this.taskGen = taskGen;
this.fencingFollowup = new FutureCallback<>();
}
/**
* Start sending requests to the Kafka cluster to fence zombies. In rare cases, may cause blocking calls to
* take place before returning, so care should be taken to ensure that this method is not invoked while holding
* any important locks (e.g., while synchronized on the surrounding DistributedHerder instance).
* This method must be invoked before any {@link #addCallback(Callback) callbacks can be added},
* and may only be invoked once.
* @throws IllegalStateException if invoked multiple times
*/
public void start() {
if (fencingFuture != null) {
throw new IllegalStateException("Cannot invoke start() multiple times");
}
String stageDescription = "initiating a round of zombie fencing for connector " + connName;
try (TickThreadStage stage = new TickThreadStage(stageDescription)) {
fencingFuture = worker.fenceZombies(connName, tasksToFence, configState.connectorConfig(connName)).thenApply(ignored -> {
// This callback will be called on the same thread that invokes KafkaFuture::thenApply if
// the future is already completed. Since that thread is the herder tick thread, we don't need
// to perform follow-up logic through an additional herder request (and if we tried, it would lead
// to deadlock)
runOnTickThread(
this::onZombieFencingSuccess,
fencingFollowup
);
awaitFollowup();
return null;
});
}
// Immediately after the fencing and necessary followup work (i.e., writing the task count record to the config topic)
// is complete, remove this from the list of active fencings
addCallback((ignored, error) -> {
synchronized (DistributedHerder.this) {
activeZombieFencings.remove(connName);
}
});
}
// Invoked after the worker has successfully fenced out the producers of old task generations using an admin client
// Note that work here will be performed on the herder's tick thread, so it should not block for very long
private Void onZombieFencingSuccess() {
if (!refreshConfigSnapshot(workerSyncTimeoutMs)) {
throw new ConnectException("Failed to read to end of config topic");
}
if (taskGen < configState.taskConfigGeneration(connName)) {
throw new ConnectRestException(
Response.Status.CONFLICT.getStatusCode(),
"Fencing failed because new task configurations were generated for the connector");
}
// If we've already been cancelled, skip the write to the config topic
if (fencingFollowup.isDone()) {
return null;
}
writeToConfigTopicAsLeader(
"writing a task count record for connector " + connName + " to the config topic",
() -> configBackingStore.putTaskCountRecord(connName, tasksToRecord)
);
return null;
}
private void awaitFollowup() {
try {
fencingFollowup.get();
} catch (InterruptedException e) {
throw new ConnectException("Interrupted while performing zombie fencing", e);
} catch (ExecutionException e) {
Throwable cause = e.getCause();
throw ConnectUtils.maybeWrap(cause, "Failed to perform round of zombie fencing");
}
}
/**
* Fail the fencing if it is still active, reporting the given exception as the cause of failure
* @param t the cause of failure to report for the failed fencing; may not be null
*/
public void completeExceptionally(Throwable t) {
Objects.requireNonNull(t);
fencingFollowup.onCompletion(t, null);
}
/**
* Add a callback to invoke after the fencing has succeeded and a record of it has been written to the config topic
* Note that this fencing must be {@link #start() started} before this method is invoked
* @param callback the callback to report the success or failure of the fencing to
* @throws IllegalStateException if this method is invoked before {@link #start()}
*/
public void addCallback(Callback<Void> callback) {
if (fencingFuture == null) {
throw new IllegalStateException("The start() method must be invoked before adding callbacks for this zombie fencing");
}
fencingFuture.whenComplete((ignored, error) -> {
if (error != null) {
callback.onCompletion(
ConnectUtils.maybeWrap(error, "Failed to perform zombie fencing"),
null
);
} else {
callback.onCompletion(null, null);
}
});
}
}
/**
* A collection of cluster rebalance related metrics.
*/
|
ZombieFencing
|
java
|
quarkusio__quarkus
|
independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/ClientProxyGenerator.java
|
{
"start": 3472,
"end": 4032
}
|
class ____
* @param bytecodeTransformerConsumer
* @param transformUnproxyableClasses whether or not unproxyable classes should be transformed
* @return a collection of resources
*/
Collection<Resource> generate(BeanInfo bean, String beanClassName,
Consumer<BytecodeTransformer> bytecodeTransformerConsumer, boolean transformUnproxyableClasses) {
// see `BeanGenerator` -- if this bean is unproxyable and that error is deferred to runtime,
// we don't need to (and cannot, in fact) generate the client proxy
|
name
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/rx/PublisherAdder.java
|
{
"start": 1175,
"end": 3229
}
|
class ____<V> {
public abstract RFuture<Boolean> add(Object o);
public Single<Boolean> addAll(Publisher<? extends V> c) {
final Flowable<? extends V> cc = Flowable.fromPublisher(c);
final ReplayProcessor<Boolean> p = ReplayProcessor.create();
return p.doOnRequest(new LongConsumer() {
@Override
public void accept(long t) throws Exception {
final AtomicBoolean completed = new AtomicBoolean();
final AtomicLong values = new AtomicLong();
final AtomicBoolean lastSize = new AtomicBoolean();
cc.subscribe(new Consumer<V>() {
@Override
public void accept(V t) throws Exception {
values.getAndIncrement();
add(t).whenComplete((res, e) -> {
if (e != null) {
p.onError(e);
return;
}
if (res) {
lastSize.set(true);
}
if (values.decrementAndGet() == 0 && completed.get()) {
p.onNext(lastSize.get());
p.onComplete();
}
});
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable t) throws Exception {
p.onError(t);
}
}, new Action() {
@Override
public void run() throws Exception {
completed.set(true);
if (values.get() == 0) {
p.onNext(lastSize.get());
p.onComplete();
}
}
});
}
}).singleOrError();
}
}
|
PublisherAdder
|
java
|
spring-projects__spring-framework
|
spring-context/src/testFixtures/java/org/springframework/context/testfixture/context/annotation/PrivateFieldResourceSample.java
|
{
"start": 739,
"end": 810
}
|
class ____ {
@Resource
private String one;
}
|
PrivateFieldResourceSample
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/jndi/JndiObjectFactoryBean.java
|
{
"start": 3114,
"end": 3666
}
|
class ____ extends JndiObjectLocator
implements FactoryBean<Object>, BeanFactoryAware, BeanClassLoaderAware {
private Class<?> @Nullable [] proxyInterfaces;
private boolean lookupOnStartup = true;
private boolean cache = true;
private boolean exposeAccessContext = false;
private @Nullable Object defaultObject;
private @Nullable ConfigurableBeanFactory beanFactory;
private @Nullable ClassLoader beanClassLoader = ClassUtils.getDefaultClassLoader();
private @Nullable Object jndiObject;
/**
* Specify the proxy
|
JndiObjectFactoryBean
|
java
|
square__retrofit
|
retrofit-adapters/rxjava2/src/test/java/retrofit2/adapter/rxjava2/SingleWithSchedulerTest.java
|
{
"start": 1165,
"end": 2684
}
|
interface ____ {
@GET("/")
Single<String> body();
@GET("/")
Single<Response<String>> response();
@GET("/")
Single<Result<String>> result();
}
private final TestScheduler scheduler = new TestScheduler();
private Service service;
@Before
public void setUp() {
Retrofit retrofit =
new Retrofit.Builder()
.baseUrl(server.url("/"))
.addConverterFactory(new StringConverterFactory())
.addCallAdapterFactory(RxJava2CallAdapterFactory.createWithScheduler(scheduler))
.build();
service = retrofit.create(Service.class);
}
@Test
public void bodyUsesScheduler() {
server.enqueue(new MockResponse());
RecordingSingleObserver<Object> observer = observerRule.create();
service.body().subscribe(observer);
observer.assertNoEvents();
scheduler.triggerActions();
observer.assertAnyValue();
}
@Test
public void responseUsesScheduler() {
server.enqueue(new MockResponse());
RecordingSingleObserver<Object> observer = observerRule.create();
service.response().subscribe(observer);
observer.assertNoEvents();
scheduler.triggerActions();
observer.assertAnyValue();
}
@Test
public void resultUsesScheduler() {
server.enqueue(new MockResponse());
RecordingSingleObserver<Object> observer = observerRule.create();
service.result().subscribe(observer);
observer.assertNoEvents();
scheduler.triggerActions();
observer.assertAnyValue();
}
}
|
Service
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/query/AssociationToManyJoinQueryTest.java
|
{
"start": 3783,
"end": 4113
}
|
class ____ {
@Id
private Long id;
private String name;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Entity(name = "EntityC")
@Audited
public static
|
EntityB
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/TreeWalk.java
|
{
"start": 1192,
"end": 1640
}
|
class ____ implements Iterable<TreePath> {
/**
* @param path path to the node being explored.
* @param id the id of the node.
* @param iterator the {@link TreeIterator} to use.
* @return paths representing the children of the current node.
*/
protected abstract Iterable<TreePath> getChildren(
TreePath path, long id, TreeWalk.TreeIterator iterator);
public abstract TreeIterator iterator();
/**
* Enumerator
|
TreeWalk
|
java
|
alibaba__nacos
|
console/src/test/java/com/alibaba/nacos/console/handler/impl/remote/core/ClusterRemoteHandlerTest.java
|
{
"start": 1159,
"end": 1860
}
|
class ____ extends AbstractRemoteHandlerTest {
ClusterRemoteHandler clusterRemoteHandler;
@BeforeEach
void setUp() {
super.setUpWithNaming();
clusterRemoteHandler = new ClusterRemoteHandler(clientHolder);
}
@Test
void getNodeList() throws NacosException {
Collection<NacosMember> mockList = new LinkedList<>();
mockList.add(new NacosMember());
when(namingMaintainerService.listClusterNodes(StringUtils.EMPTY, StringUtils.EMPTY)).thenReturn(mockList);
Collection<? extends NacosMember> actual = clusterRemoteHandler.getNodeList(StringUtils.EMPTY);
assertEquals(mockList, actual);
}
}
|
ClusterRemoteHandlerTest
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/proxy/jdbc/JdbcParameterImpl.java
|
{
"start": 696,
"end": 2019
}
|
class ____ implements JdbcParameter {
private final int sqlType;
private final Object value;
private final long length;
private final Calendar calendar;
private final int scaleOrLength;
public JdbcParameterImpl(int sqlType, Object value, long length, Calendar calendar, int scaleOrLength) {
this.sqlType = sqlType;
this.value = value;
this.length = length;
this.calendar = calendar;
this.scaleOrLength = scaleOrLength;
}
public JdbcParameterImpl(int sqlType, Object value, long length, Calendar calendar) {
this(sqlType, value, -1, null, -1);
}
public JdbcParameterImpl(int sqlType, Object value) {
this(sqlType, value, -1, null);
}
public JdbcParameterImpl(int sqlType, Object value, long length) {
this(sqlType, value, length, null);
}
public JdbcParameterImpl(int sqlType, Object value, Calendar calendar) {
this(sqlType, value, -1, calendar);
}
public int getScaleOrLength() {
return scaleOrLength;
}
public Object getValue() {
return value;
}
public long getLength() {
return length;
}
public Calendar getCalendar() {
return calendar;
}
public int getSqlType() {
return sqlType;
}
}
|
JdbcParameterImpl
|
java
|
grpc__grpc-java
|
netty/src/main/java/io/grpc/netty/FixedKeyManagerFactory.java
|
{
"start": 977,
"end": 1239
}
|
class ____ extends KeyManagerFactory {
public FixedKeyManagerFactory(List<KeyManager> keyManagers) {
super(new FixedKeyManagerFactorySpi(keyManagers), new UnhelpfulSecurityProvider(),
"FakeAlgorithm");
}
private static final
|
FixedKeyManagerFactory
|
java
|
google__auto
|
common/src/test/java/com/google/auto/common/AnnotationMirrorsTest.java
|
{
"start": 2396,
"end": 2444
}
|
class ____ {}
@Outer(FOO)
static
|
TestClassBlah2
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/long_/LongAssert_isEqualTo_long_Test.java
|
{
"start": 881,
"end": 1189
}
|
class ____ extends LongAssertBaseTest {
@Override
protected LongAssert invoke_api_method() {
return assertions.isEqualTo(8L);
}
@Override
protected void verify_internal_effects() {
verify(longs).assertEqual(getInfo(assertions), getActual(assertions), 8L);
}
}
|
LongAssert_isEqualTo_long_Test
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/PlanEdit.java
|
{
"start": 1169,
"end": 3356
}
|
interface ____ extends PlanContext, PlanView {
/**
* Add a new {@link ReservationAllocation} to the plan.
*
* @param reservation the {@link ReservationAllocation} to be added to the
* plan
* @param isRecovering flag to indicate if reservation is being added as part
* of failover or not
* @return true if addition is successful, false otherwise
* @throws PlanningException if addition is unsuccessful
*/
boolean addReservation(ReservationAllocation reservation,
boolean isRecovering) throws PlanningException;
/**
* Updates an existing {@link ReservationAllocation} in the plan. This is
* required for re-negotiation.
*
* @param reservation the {@link ReservationAllocation} to be updated the plan
* @return true if update is successful, false otherwise
* @throws PlanningException if update is unsuccessful
*/
boolean updateReservation(ReservationAllocation reservation)
throws PlanningException;
/**
* Delete an existing {@link ReservationAllocation} from the plan identified
* uniquely by its {@link ReservationId}. This will generally be used for
* garbage collection.
*
* @param reservationID the {@link ReservationAllocation} to be deleted from
* the plan identified uniquely by its {@link ReservationId}
* @return true if delete is successful, false otherwise
* @throws PlanningException if deletion is unsuccessful
*/
boolean deleteReservation(ReservationId reservationID)
throws PlanningException;
/**
* Method invoked to garbage collect old reservations. It cleans up expired
* reservations that have fallen out of the sliding archival window.
*
* @param tick the current time from which the archival window is computed
* @throws PlanningException if archival is unsuccessful
*/
void archiveCompletedReservations(long tick) throws PlanningException;
/**
* Sets the overall capacity in terms of {@link Resource} assigned to this
* plan.
*
* @param capacity the overall capacity in terms of {@link Resource} assigned
* to this plan
*/
void setTotalCapacity(Resource capacity);
}
|
PlanEdit
|
java
|
apache__dubbo
|
dubbo-registry/dubbo-registry-api/src/main/java/org/apache/dubbo/registry/client/migration/DefaultMigrationAddressComparator.java
|
{
"start": 4455,
"end": 5430
}
|
interface ____ size " + oldAddressSize + ", threshold " + threshold);
if (newAddressSize != 0 && oldAddressSize == 0) {
return true;
}
if (newAddressSize == 0 && oldAddressSize == 0) {
return false;
}
return ((float) newAddressSize / (float) oldAddressSize) >= threshold;
}
private <T> int getAddressSize(ClusterInvoker<T> invoker) {
if (invoker == null) {
return -1;
}
List<Invoker<T>> invokers = invoker.getDirectory().getAllInvokers();
return CollectionUtils.isNotEmpty(invokers) ? invokers.size() : 0;
}
@Override
public Map<String, Integer> getAddressSize(String displayServiceKey) {
return serviceMigrationData.get(displayServiceKey);
}
private String getInvokerType(ClusterInvoker<?> invoker) {
if (invoker.isServiceDiscovery()) {
return "instance";
}
return "interface";
}
}
|
address
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowResponse.java
|
{
"start": 1773,
"end": 1865
}
|
class ____ implements ChunkedRestResponseBodyPart, Releasable {
public static
|
ArrowResponse
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/oracle/ast/OracleDataTypeIntervalYear.java
|
{
"start": 847,
"end": 1519
}
|
class ____ extends SQLDataTypeImpl implements OracleSQLObject {
public OracleDataTypeIntervalYear() {
this.setName("INTERVAL YEAR");
}
@Override
protected void accept0(SQLASTVisitor visitor) {
this.accept0((OracleASTVisitor) visitor);
}
@Override
public void accept0(OracleASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, getArguments());
}
visitor.endVisit(this);
}
public OracleDataTypeIntervalYear clone() {
OracleDataTypeIntervalYear x = new OracleDataTypeIntervalYear();
super.cloneTo(x);
return x;
}
}
|
OracleDataTypeIntervalYear
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/deser/CollectingProblemHandler.java
|
{
"start": 1019,
"end": 3607
}
|
class ____ internal infrastructure, registered automatically by
* {@code ObjectReader.problemCollectingReader()}. Users should not instantiate or
* register this handler manually.
*
* <p><b>Design rationale - Context Attributes vs Handler State</b>:
*
* <p>Problem collection state is stored in {@link DeserializationContext} attributes
* rather than within this handler for several reasons:
*
* <ol>
* <li><b>Thread-safety</b>: The handler instance is shared across all calls to the
* same ObjectReader. Storing mutable state in the handler would require
* synchronization and complicate the implementation.</li>
*
* <li><b>Call isolation</b>: Each call to {@code readValueCollectingProblems()} needs
* its own problem bucket. Context attributes are perfect for this - they're created
* per-call and automatically cleaned up after deserialization.</li>
*
* <li><b>Immutability</b>: Jackson's config objects (including handlers) are designed
* to be immutable and reusable. Storing per-call state violates this principle.</li>
*
* <li><b>Configuration vs State</b>: The handler stores configuration (max problems
* limit) while attributes store runtime state (the actual problem list). This
* separation follows Jackson's design patterns.</li>
* </ol>
*
* <p>The handler itself is stateless - it's just a strategy for handling problems.
* The actual collection happens in a bucket passed through context attributes.
*
* <p><b>Recoverable errors handled</b>:
* <ul>
* <li>Unknown properties ({@link #handleUnknownProperty handleUnknownProperty}) - skips children</li>
* <li>Type coercion failures ({@link #handleWeirdStringValue handleWeirdStringValue},
* {@link #handleWeirdNumberValue handleWeirdNumberValue}) - returns defaults</li>
* <li>Map key coercion ({@link #handleWeirdKey handleWeirdKey}) - returns {@code NOT_HANDLED}</li>
* <li>Instantiation failures ({@link #handleInstantiationProblem handleInstantiationProblem}) -
* returns null when safe</li>
* </ul>
*
* <p><b>Default values</b>: Primitives receive zero/false defaults; reference types
* (including boxed primitives) receive {@code null} to avoid masking nullability issues.
*
* <p><b>DoS protection</b>: Collection stops when the configured limit (default 100)
* is reached, preventing memory/CPU exhaustion attacks.
*
* <p><b>JSON Pointer</b>: Paths are built from parser context following RFC 6901,
* with proper escaping of {@code ~} and {@code /} characters via jackson-core's
* {@link JsonPointer} class.
*
* @since 3.1
*/
public
|
is
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/CompileTimeConstantCheckerTest.java
|
{
"start": 19220,
"end": 19873
}
|
interface ____ {
void f(@CompileTimeConstant String x);
}
void f(String s) {
I i = x -> {};
// BUG: Diagnostic contains: Non-compile-time constant expression passed
i.f(s);
}
}
""")
.doTest();
}
@Test
public void reportsDiagnostic_whenConstantFieldDeclaredWithoutFinal() {
compilationHelper
.addSourceLines(
"test/CompileTimeConstantTestCase.java",
"""
package test;
import com.google.errorprone.annotations.CompileTimeConstant;
public
|
I
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/overloading/InconsistentOverloadsTest.java
|
{
"start": 9850,
"end": 10067
}
|
class ____ {
void someMethod(String foo, int bar) {}
// BUG: Diagnostic contains: someMethod(String foo, int bar, List<String> baz)
void someMethod(int bar, String foo, List<String> baz) {}
}
|
SuperClass
|
java
|
netty__netty
|
codec-http3/src/main/java/io/netty/handler/codec/http3/Http3ErrorCode.java
|
{
"start": 800,
"end": 3819
}
|
enum ____ {
/**
* No error. This is used when the connection or stream needs to be closed, but there is no error to signal.
*/
H3_NO_ERROR(0x100),
/**
* Peer violated protocol requirements in a way that does not match a more specific error code,
* or endpoint declines to use the more specific error code.
*/
H3_GENERAL_PROTOCOL_ERROR(0x101),
/**
* An internal error has occurred in the HTTP stack.
*/
H3_INTERNAL_ERROR(0x102),
/**
* The endpoint detected that its peer created a stream that it will not accept.
*/
H3_STREAM_CREATION_ERROR(0x103),
/**
* A stream required by the HTTP/3 connection was closed or reset.
*/
H3_CLOSED_CRITICAL_STREAM(0x104),
/**
* A frame was received that was not permitted in the current state or on the current stream.
*/
H3_FRAME_UNEXPECTED(0x105),
/**
* A frame that fails to satisfy layout requirements or with an invalid size was received.
*/
H3_FRAME_ERROR(0x106),
/**
* The endpoint detected that its peer is exhibiting a behavior that might be generating excessive load.
*/
H3_EXCESSIVE_LOAD(0x107),
/**
* A Stream ID or Push ID was used incorrectly, such as exceeding a limit, reducing a limit, or being reused.
*/
H3_ID_ERROR(0x108),
/**
* An endpoint detected an error in the payload of a SETTINGS frame.
*/
H3_SETTINGS_ERROR(0x109),
/**
* No SETTINGS frame was received at the beginning of the control stream.
*/
H3_MISSING_SETTINGS(0x10a),
/**
* A server rejected a request without performing any application processing.
*/
H3_REQUEST_REJECTED(0x10b),
/**
* The request or its response (including pushed response) is cancelled.
*/
H3_REQUEST_CANCELLED(0x10c),
/**
* The client's stream terminated without containing a fully-formed request.
*/
H3_REQUEST_INCOMPLETE(0x10d),
/**
* An HTTP message was malformed and cannot be processed.
*/
H3_MESSAGE_ERROR(0x10e),
/**
* The TCP connection established in response to a CONNECT request was reset or abnormally closed.
*/
H3_CONNECT_ERROR(0x10f),
/**
* The requested operation cannot be served over HTTP/3. The peer should retry over HTTP/1.1.
*/
H3_VERSION_FALLBACK(0x110),
/**
* The decoder failed to interpret an encoded field section and is not able to continue decoding that field section.
*/
QPACK_DECOMPRESSION_FAILED(0x200),
/**
* The decoder failed to interpret an encoder instruction received on the encoder stream.
*/
QPACK_ENCODER_STREAM_ERROR(0x201),
/**
* The encoder failed to interpret a decoder instruction received on the decoder stream.
*/
QPACK_DECODER_STREAM_ERROR(0x202);
final int code;
Http3ErrorCode(int code) {
this.code = code;
}
public int code() {
return code;
}
}
|
Http3ErrorCode
|
java
|
quarkusio__quarkus
|
extensions/oidc/deployment/src/test/java/io/quarkus/oidc/test/CodeFlowManagementInterfaceDevModeTest.java
|
{
"start": 935,
"end": 3219
}
|
class ____ {
@RegisterExtension
static final QuarkusDevModeTest test = new QuarkusDevModeTest()
.withApplicationRoot((jar) -> jar
.addClasses(CodeFlowManagementRoute.class)
.addAsResource(
new StringAsset("""
quarkus.management.enabled=true
quarkus.management.auth.enabled=true
quarkus.oidc.auth-server-url=${keycloak.url}/realms/quarkus
quarkus.oidc.client-id=quarkus-web-app
quarkus.oidc.credentials.secret=secret
quarkus.oidc.application-type=web-app
quarkus.management.auth.permission.code-flow.paths=/code-flow
quarkus.management.auth.permission.code-flow.policy=authenticated
quarkus.management.auth.permission.code-flow.auth-mechanism=code
quarkus.log.category."org.htmlunit".level=ERROR
quarkus.log.file.enabled=true
"""),
"application.properties"));
@Test
public void testAuthenticatedHttpPermission() throws IOException {
try (final WebClient webClient = createWebClient()) {
HtmlPage page = webClient.getPage("http://localhost:9000/code-flow");
assertEquals("Sign in to quarkus", page.getTitleText());
HtmlForm loginForm = page.getForms().get(0);
loginForm.getInputByName("username").setValueAttribute("alice");
loginForm.getInputByName("password").setValueAttribute("alice");
TextPage textPage = loginForm.getButtonByName("login").click();
assertEquals("alice", textPage.getContent());
webClient.getCookieManager().clearCookies();
}
}
private WebClient createWebClient() {
WebClient webClient = new WebClient();
webClient.setCssErrorHandler(new SilentCssErrorHandler());
return webClient;
}
@Singleton
public static
|
CodeFlowManagementInterfaceDevModeTest
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/vetoed/ParentOfVetoedBean.java
|
{
"start": 130,
"end": 281
}
|
class ____ extends VetoedSuperclassBean {
public ParentOfVetoedBean(BeanContext beanContext) {
super(beanContext);
}
}
|
ParentOfVetoedBean
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt5Evaluator.java
|
{
"start": 1085,
"end": 4295
}
|
class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToInt5Evaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator field;
private final int p0;
private final int p1;
private final int p2;
private final int p3;
private final int p4;
private final DriverContext driverContext;
private Warnings warnings;
public RoundToInt5Evaluator(Source source, EvalOperator.ExpressionEvaluator field, int p0, int p1,
int p2, int p3, int p4, DriverContext driverContext) {
this.source = source;
this.field = field;
this.p0 = p0;
this.p1 = p1;
this.p2 = p2;
this.p3 = p3;
this.p4 = p4;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (IntBlock fieldBlock = (IntBlock) field.eval(page)) {
IntVector fieldVector = fieldBlock.asVector();
if (fieldVector == null) {
return eval(page.getPositionCount(), fieldBlock);
}
return eval(page.getPositionCount(), fieldVector).asBlock();
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += field.baseRamBytesUsed();
return baseRamBytesUsed;
}
public IntBlock eval(int positionCount, IntBlock fieldBlock) {
try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
switch (fieldBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
int field = fieldBlock.getInt(fieldBlock.getFirstValueIndex(p));
result.appendInt(RoundToInt.process(field, this.p0, this.p1, this.p2, this.p3, this.p4));
}
return result.build();
}
}
public IntVector eval(int positionCount, IntVector fieldVector) {
try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
int field = fieldVector.getInt(p);
result.appendInt(p, RoundToInt.process(field, this.p0, this.p1, this.p2, this.p3, this.p4));
}
return result.build();
}
}
@Override
public String toString() {
return "RoundToInt5Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(field);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static
|
RoundToInt5Evaluator
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/TaskExecutorPartitionTracker.java
|
{
"start": 1121,
"end": 2936
}
|
interface ____
extends PartitionTracker<JobID, TaskExecutorPartitionInfo> {
/**
* Starts the tracking of the given partition for the given job.
*
* @param producingJobId ID of job by which the partition is produced
* @param partitionInfo information about the partition
*/
void startTrackingPartition(JobID producingJobId, TaskExecutorPartitionInfo partitionInfo);
/** Releases the given partitions and stop the tracking of partitions that were released. */
void stopTrackingAndReleaseJobPartitions(Collection<ResultPartitionID> resultPartitionIds);
/**
* Releases all partitions for the given job and stop the tracking of partitions that were
* released.
*/
void stopTrackingAndReleaseJobPartitionsFor(JobID producingJobId);
/**
* Get all partitions tracked for the given job.
*
* @param producingJobId the job id
* @return the tracked partitions
*/
Collection<TaskExecutorPartitionInfo> getTrackedPartitionsFor(JobID producingJobId);
/** Promotes the given partitions. */
void promoteJobPartitions(Collection<ResultPartitionID> partitionsToPromote);
/**
* Releases partitions associated with the given datasets and stops tracking of partitions that
* were released.
*
* @param dataSetsToRelease data sets to release
*/
void stopTrackingAndReleaseClusterPartitions(
Collection<IntermediateDataSetID> dataSetsToRelease);
/** Releases and stops tracking all partitions. */
void stopTrackingAndReleaseAllClusterPartitions();
/**
* Creates a {@link ClusterPartitionReport}, describing which cluster partitions are currently
* available.
*/
ClusterPartitionReport createClusterPartitionReport();
}
|
TaskExecutorPartitionTracker
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
|
{
"start": 25114,
"end": 36784
}
|
class ____ implements Runnable {
private LocalFileSystem localFS;
private long fsLimit;
private long checkInterval;
private String[] localDirs;
private boolean killOnLimitExceeded;
public DiskLimitCheck(JobConf conf) throws IOException {
this.localFS = FileSystem.getLocal(conf);
this.fsLimit = conf.getLong(MRJobConfig.JOB_SINGLE_DISK_LIMIT_BYTES,
MRJobConfig.DEFAULT_JOB_SINGLE_DISK_LIMIT_BYTES);
this.localDirs = conf.getLocalDirs();
this.checkInterval = conf.getLong(
MRJobConfig.JOB_SINGLE_DISK_LIMIT_CHECK_INTERVAL_MS,
MRJobConfig.DEFAULT_JOB_SINGLE_DISK_LIMIT_CHECK_INTERVAL_MS);
this.killOnLimitExceeded = conf.getBoolean(
MRJobConfig.JOB_SINGLE_DISK_LIMIT_KILL_LIMIT_EXCEED,
MRJobConfig.DEFAULT_JOB_SINGLE_DISK_LIMIT_KILL_LIMIT_EXCEED);
}
@Override
public void run() {
while (!taskDone.get()) {
try {
long localWritesSize = 0L;
String largestWorkDir = null;
for (String local : localDirs) {
long size = FileUtil.getDU(localFS.pathToFile(new Path(local)));
if (localWritesSize < size) {
localWritesSize = size;
largestWorkDir = local;
}
}
if (localWritesSize > fsLimit) {
String localStatus =
"too much data in local scratch dir="
+ largestWorkDir
+ ". current size is "
+ localWritesSize
+ " the limit is " + fsLimit;
if (killOnLimitExceeded) {
LOG.error(localStatus);
diskLimitCheckStatus = localStatus;
} else {
LOG.warn(localStatus);
}
break;
}
Thread.sleep(checkInterval);
} catch (Exception e) {
LOG.error(e.getMessage(), e);
}
}
}
}
/**
* check the counters to see whether the task has exceeded any configured
* limits.
* @throws TaskLimitException
*/
protected void checkTaskLimits() throws TaskLimitException {
// check the limit for writing to local file system
long limit = conf.getLong(MRJobConfig.TASK_LOCAL_WRITE_LIMIT_BYTES,
MRJobConfig.DEFAULT_TASK_LOCAL_WRITE_LIMIT_BYTES);
if (limit >= 0) {
Counters.Counter localWritesCounter = null;
try {
LocalFileSystem localFS = FileSystem.getLocal(conf);
localWritesCounter = counters.findCounter(localFS.getScheme(),
FileSystemCounter.BYTES_WRITTEN);
} catch (IOException e) {
LOG.warn("Could not get LocalFileSystem BYTES_WRITTEN counter");
}
if (localWritesCounter != null
&& localWritesCounter.getCounter() > limit) {
throw new TaskLimitException("too much write to local file system." +
" current value is " + localWritesCounter.getCounter() +
" the limit is " + limit);
}
}
if (diskLimitCheckStatus != null) {
throw new TaskLimitException(diskLimitCheckStatus);
}
}
/**
* The communication thread handles communication with the parent (Task
* Tracker). It sends progress updates if progress has been made or if
* the task needs to let the parent know that it's alive. It also pings
* the parent to see if it's alive.
*/
public void run() {
final int MAX_RETRIES = 3;
int remainingRetries = MAX_RETRIES;
// get current flag value and reset it as well
boolean sendProgress = resetProgressFlag();
long taskProgressInterval = MRJobConfUtil.
getTaskProgressReportInterval(conf);
while (!taskDone.get()) {
synchronized (lock) {
done = false;
}
try {
boolean taskFound = true; // whether TT knows about this task
AMFeedback amFeedback = null;
// sleep for a bit
synchronized(lock) {
if (taskDone.get()) {
break;
}
lock.wait(taskProgressInterval);
}
if (taskDone.get()) {
break;
}
if (sendProgress) {
// we need to send progress update
updateCounters();
checkTaskLimits();
taskStatus.statusUpdate(taskProgress.get(),
taskProgress.toString(),
counters);
amFeedback = umbilical.statusUpdate(taskId, taskStatus);
taskFound = amFeedback.getTaskFound();
taskStatus.clearStatus();
}
else {
// send ping
amFeedback = umbilical.statusUpdate(taskId, null);
taskFound = amFeedback.getTaskFound();
}
// if Task Tracker is not aware of our task ID (probably because it died and
// came back up), kill ourselves
if (!taskFound) {
if (uberized) {
taskDone.set(true);
break;
} else {
LOG.warn("Parent died. Exiting "+taskId);
resetDoneFlag();
System.exit(66);
}
}
// Set a flag that says we should preempt this is read by
// ReduceTasks in places of the execution where it is
// safe/easy to preempt
boolean lastPreempt = mustPreempt.get();
mustPreempt.set(mustPreempt.get() || amFeedback.getPreemption());
if (lastPreempt ^ mustPreempt.get()) {
LOG.info("PREEMPTION TASK: setting mustPreempt to " +
mustPreempt.get() + " given " + amFeedback.getPreemption() +
" for "+ taskId + " task status: " +taskStatus.getPhase());
}
sendProgress = resetProgressFlag();
remainingRetries = MAX_RETRIES;
} catch (TaskLimitException e) {
String errMsg = "Task exceeded the limits: " +
StringUtils.stringifyException(e);
LOG.error(errMsg);
try {
umbilical.fatalError(taskId, errMsg, true);
} catch (IOException ioe) {
LOG.error("Failed to update failure diagnosis", ioe);
}
LOG.error("Killing " + taskId);
resetDoneFlag();
ExitUtil.terminate(69);
} catch (Throwable t) {
LOG.info("Communication exception: " + StringUtils.stringifyException(t));
remainingRetries -=1;
if (remainingRetries == 0) {
ReflectionUtils.logThreadInfo(LOG, "Communication exception", 0);
LOG.warn("Last retry, killing "+taskId);
resetDoneFlag();
System.exit(65);
}
}
}
//Notify that we are done with the work
resetDoneFlag();
}
void resetDoneFlag() {
synchronized (lock) {
done = true;
lock.notify();
}
}
public void startCommunicationThread() {
if (pingThread == null) {
pingThread = new SubjectInheritingThread(this, "communication thread");
pingThread.setDaemon(true);
pingThread.start();
}
startDiskLimitCheckerThreadIfNeeded();
}
public void startDiskLimitCheckerThreadIfNeeded() {
if (diskLimitCheckThread == null && conf.getLong(
MRJobConfig.JOB_SINGLE_DISK_LIMIT_BYTES,
MRJobConfig.DEFAULT_JOB_SINGLE_DISK_LIMIT_BYTES) >= 0) {
try {
diskLimitCheckThread = new SubjectInheritingThread(new DiskLimitCheck(conf),
"disk limit check thread");
diskLimitCheckThread.setDaemon(true);
diskLimitCheckThread.start();
} catch (IOException e) {
LOG.error("Issues starting disk monitor thread: "
+ e.getMessage(), e);
}
}
}
public void stopCommunicationThread() throws InterruptedException {
if (pingThread != null) {
// Intent of the lock is to not send an interrupt in the middle of an
// umbilical.ping or umbilical.statusUpdate
synchronized(lock) {
//Interrupt if sleeping. Otherwise wait for the RPC call to return.
lock.notify();
}
synchronized (lock) {
while (!done) {
lock.wait();
}
}
pingThread.interrupt();
pingThread.join();
}
}
}
/**
* Reports the next executing record range to TaskTracker.
*
* @param umbilical
* @param nextRecIndex the record index which would be fed next.
* @throws IOException
*/
protected void reportNextRecordRange(final TaskUmbilicalProtocol umbilical,
long nextRecIndex) throws IOException{
//currentRecStartIndex is the start index which has not yet been finished
//and is still in task's stomach.
long len = nextRecIndex - currentRecStartIndex +1;
SortedRanges.Range range =
new SortedRanges.Range(currentRecStartIndex, len);
taskStatus.setNextRecordRange(range);
if (LOG.isDebugEnabled()) {
LOG.debug("sending reportNextRecordRange " + range);
}
umbilical.reportNextRecordRange(taskId, range);
}
/**
* Create a TaskReporter and start communication thread
*/
TaskReporter startReporter(final TaskUmbilicalProtocol umbilical) {
// start thread that will handle communication with parent
TaskReporter reporter = new TaskReporter(getProgress(), umbilical);
reporter.startCommunicationThread();
return reporter;
}
/**
* Update resource information counters
*/
void updateResourceCounters() {
// Update generic resource counters
updateHeapUsageCounter();
// Updating resources specified in ResourceCalculatorProcessTree
if (pTree == null) {
return;
}
pTree.updateProcessTree();
long cpuTime = pTree.getCumulativeCpuTime();
long pMem = pTree.getRssMemorySize();
long vMem = pTree.getVirtualMemorySize();
// Remove the CPU time consumed previously by JVM reuse
if (cpuTime != ResourceCalculatorProcessTree.UNAVAILABLE &&
initCpuCumulativeTime != ResourceCalculatorProcessTree.UNAVAILABLE) {
cpuTime -= initCpuCumulativeTime;
}
if (cpuTime != ResourceCalculatorProcessTree.UNAVAILABLE) {
counters.findCounter(TaskCounter.CPU_MILLISECONDS).setValue(cpuTime);
}
if (pMem != ResourceCalculatorProcessTree.UNAVAILABLE) {
counters.findCounter(TaskCounter.PHYSICAL_MEMORY_BYTES).setValue(pMem);
}
if (vMem != ResourceCalculatorProcessTree.UNAVAILABLE) {
counters.findCounter(TaskCounter.VIRTUAL_MEMORY_BYTES).setValue(vMem);
}
if (pMem != ResourceCalculatorProcessTree.UNAVAILABLE) {
TaskCounter counter = isMapTask() ?
TaskCounter.MAP_PHYSICAL_MEMORY_BYTES_MAX :
TaskCounter.REDUCE_PHYSICAL_MEMORY_BYTES_MAX;
Counters.Counter pMemCounter =
counters.findCounter(counter);
pMemCounter.setValue(Math.max(pMemCounter.getValue(), pMem));
}
if (vMem != ResourceCalculatorProcessTree.UNAVAILABLE) {
TaskCounter counter = isMapTask() ?
TaskCounter.MAP_VIRTUAL_MEMORY_BYTES_MAX :
TaskCounter.REDUCE_VIRTUAL_MEMORY_BYTES_MAX;
Counters.Counter vMemCounter =
counters.findCounter(counter);
vMemCounter.setValue(Math.max(vMemCounter.getValue(), vMem));
}
}
/**
* An updater that tracks the amount of time this task has spent in GC.
*/
|
DiskLimitCheck
|
java
|
quarkusio__quarkus
|
extensions/container-image/spi/src/main/java/io/quarkus/container/spi/FallbackContainerImageRegistryBuildItem.java
|
{
"start": 98,
"end": 385
}
|
class ____ extends SimpleBuildItem {
private final String registry;
public FallbackContainerImageRegistryBuildItem(String registry) {
this.registry = registry;
}
public String getRegistry() {
return registry;
}
}
|
FallbackContainerImageRegistryBuildItem
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/integration/AggregatedJmsRouteIT.java
|
{
"start": 5613,
"end": 5801
}
|
class ____ implements Processor {
@Override
public void process(Exchange exchange) {
LOG.info("get the exchange here {}", exchange);
}
}
}
|
MyProcessor
|
java
|
quarkusio__quarkus
|
extensions/devui/deployment/src/main/java/io/quarkus/devui/deployment/menu/EndpointsProcessor.java
|
{
"start": 662,
"end": 2392
}
|
class ____ {
private static final String NAMESPACE = "devui-endpoints";
@BuildStep(onlyIf = IsLocalDevelopment.class)
InternalPageBuildItem createEndpointsPage(Capabilities capabilities,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem) {
final boolean swaggerIsAvailable = capabilities.isPresent(Capability.SMALLRYE_OPENAPI);
final String swaggerUiPath;
if (swaggerIsAvailable) {
swaggerUiPath = nonApplicationRootPathBuildItem
.resolvePath(ConfigProvider.getConfig().getValue("quarkus.swagger-ui.path", String.class));
} else {
swaggerUiPath = "";
}
String basepath = nonApplicationRootPathBuildItem.resolvePath(Constants.DEV_UI);
InternalPageBuildItem endpointsPage = new InternalPageBuildItem("Endpoints", 25);
endpointsPage.addBuildTimeData("basepath", basepath);
endpointsPage.addBuildTimeData("swaggerUiPath", swaggerUiPath);
// Page
endpointsPage.addPage(Page.webComponentPageBuilder()
.namespace(NAMESPACE)
.title("Endpoints")
.icon("font-awesome-solid:plug")
.componentLink("qwc-endpoints.js"));
endpointsPage.addPage(Page.webComponentPageBuilder()
.namespace(NAMESPACE)
.title("Routes")
.icon("font-awesome-solid:route")
.componentLink("qwc-routes.js"));
return endpointsPage;
}
@BuildStep(onlyIf = IsLocalDevelopment.class)
JsonRPCProvidersBuildItem createJsonRPCService() {
return new JsonRPCProvidersBuildItem(NAMESPACE, ResourceNotFoundData.class);
}
}
|
EndpointsProcessor
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/all/ListAllTest.java
|
{
"start": 4250,
"end": 4723
}
|
class ____ implements Service {
static final AtomicBoolean DESTROYED = new AtomicBoolean();
@Inject
InjectionPoint injectionPoint;
public String ping() {
return "bravo";
}
@Override
public Optional<InjectionPoint> getInjectionPoint() {
return Optional.of(injectionPoint);
}
@PreDestroy
void destroy() {
DESTROYED.set(true);
}
}
}
|
ServiceBravo
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/jpa/internal/util/PersistenceUtilHelper.java
|
{
"start": 12532,
"end": 13333
}
|
class ____ check.
* @param attributeName The attribute name.
*
* @return Returns the method with the specified name or <code>null</code> if it does not exist.
*/
private static Method getMethod(Class<?> clazz, String attributeName) {
try {
final char[] string = attributeName.toCharArray();
string[0] = Character.toUpperCase( string[0] );
final String casedAttributeName = new String( string );
try {
return clazz.getDeclaredMethod( "get" + casedAttributeName );
}
catch ( NoSuchMethodException e ) {
return clazz.getDeclaredMethod( "is" + casedAttributeName );
}
}
catch ( NoSuchMethodException e ) {
return null;
}
}
/**
* Cache hierarchy and member resolution, taking care to not leak
* references to Class instances.
*/
public static final
|
to
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/ValidateTest.java
|
{
"start": 62980,
"end": 64533
}
|
class ____ {
@Test
void shouldNotThrowExceptionForValidIndex() {
Validate.validIndex(Collections.singleton("a"), 0, "MSG");
}
@Test
void shouldReturnSameInstance() {
final Set<String> col = Collections.singleton("a");
assertSame(col, Validate.validIndex(col, 0, "MSG"));
}
@Test
void shouldThrowIndexOutOfBoundsExceptionWithGivenMessageForIndexOutOfBounds() {
final IndexOutOfBoundsException ex = assertIndexOutOfBoundsException(() -> Validate.validIndex(Collections.singleton("a"), 1, "MSG"));
assertEquals("MSG", ex.getMessage());
}
@Test
void shouldThrowIndexOutOfBoundsExceptionWithGivenMessageForNegativeIndex() {
final IndexOutOfBoundsException ex = assertIndexOutOfBoundsException(
() -> Validate.validIndex(Collections.singleton("a"), -1, "MSG"));
assertEquals("MSG", ex.getMessage());
}
@Test
void shouldThrowNullPointerExceptionWithDefaultMessageForNullCollection() {
final NullPointerException ex = assertNullPointerException(() -> Validate.validIndex((Collection<?>) null, 1, "MSG"));
assertEquals("collection", ex.getMessage());
}
}
@Nested
final
|
WithMessage
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArrays_assertHasSizeGreaterThan_Test.java
|
{
"start": 999,
"end": 1719
}
|
class ____ extends ObjectArraysBaseTest {
@Test
void should_fail_if_actual_is_null() {
// WHEN
var error = expectAssertionError(() -> arrays.assertHasSizeGreaterThan(INFO, null, 6));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_if_size_of_actual_is_not_greater_than_boundary() {
// WHEN
var error = expectAssertionError(() -> arrays.assertHasSizeGreaterThan(INFO, actual, 6));
// THEN
then(error).hasMessage(shouldHaveSizeGreaterThan(actual, actual.length, 6).create());
}
@Test
void should_pass_if_size_of_actual_is_greater_than_boundary() {
arrays.assertHasSizeGreaterThan(INFO, actual, 1);
}
}
|
ObjectArrays_assertHasSizeGreaterThan_Test
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/support/DistinguishedNameNormalizerTests.java
|
{
"start": 1209,
"end": 7319
}
|
class ____ extends ESTestCase {
private UserRoleMapper.DistinguishedNameNormalizer dnNormalizer;
@Before
public void init() {
dnNormalizer = getDnNormalizer();
}
public void testDnNormalizingIsCached() {
// Parse same DN multiple times, only 1st time DN parsing is performed, 2nd time reads from the cache
Mockito.clearInvocations(dnNormalizer);
final String dn = randomDn();
parseDnMultipleTimes(dn);
verify(dnNormalizer, times(1)).doNormalize(dn);
// The cache is keyed by the literal string form.
// Therefore if the literal string changes, it needs to be parsed again even though it is still the same DN
Mockito.clearInvocations(dnNormalizer);
final String mutatedDn = mutateDn(dn);
parseDnMultipleTimes(mutatedDn);
verify(dnNormalizer, times(1)).doNormalize(mutatedDn);
// Invalid DNs should also be cached
Mockito.clearInvocations(dnNormalizer);
final String invalidDn = randomFrom(
"",
randomAlphaOfLengthBetween(1, 8),
randomAlphaOfLengthBetween(1, 8) + "*",
randomAlphaOfLengthBetween(1, 8) + "=",
"=" + randomAlphaOfLengthBetween(1, 8)
);
parseDnMultipleTimes(invalidDn);
verify(dnNormalizer, times(1)).doNormalize(invalidDn);
}
public void testDnNormalizingIsCachedForDnPredicate() {
final String dn = randomDn();
final Predicate<FieldValue> predicate = new UserRoleMapper.DistinguishedNamePredicate(dn, dnNormalizer);
verify(dnNormalizer, times(1)).doNormalize(dn);
// Same DN, it's cached
runPredicateMultipleTimes(predicate, dn);
verify(dnNormalizer, times(1)).doNormalize(dn);
// Predicate short-circuits for case differences
Mockito.clearInvocations(dnNormalizer);
final String casedDn = randomFrom(dn.toLowerCase(Locale.ENGLISH), dn.toUpperCase(Locale.ENGLISH));
runPredicateMultipleTimes(predicate, casedDn);
verify(dnNormalizer, never()).doNormalize(anyString());
// Literal string form changes, it will be parsed again
Mockito.clearInvocations(dnNormalizer);
final String mutatedDn = randomFrom(dn.replace(" ", ""), dn.replace(",", " ,"));
runPredicateMultipleTimes(predicate, mutatedDn);
verify(dnNormalizer, times(1)).doNormalize(mutatedDn);
// Subtree DN is also cached
Mockito.clearInvocations(dnNormalizer);
final String subtreeDn = "*," + randomDn();
runPredicateMultipleTimes(predicate, subtreeDn);
verify(dnNormalizer, times(1)).doNormalize(subtreeDn.substring(2));
// Subtree DN is also keyed by the literal form, so they are space sensitive
Mockito.clearInvocations(dnNormalizer);
final String mutatedSubtreeDn = "*, " + subtreeDn.substring(2);
runPredicateMultipleTimes(predicate, mutatedSubtreeDn);
verify(dnNormalizer, times(1)).doNormalize(mutatedSubtreeDn.substring(2));
}
public void testUserDataUsesCachedDnNormalizer() {
final String userDn = "uid=foo," + randomDn();
final List<String> groups = IntStream.range(0, randomIntBetween(50, 100))
.mapToObj(i -> "gid=g" + i + "," + randomDn())
.distinct()
.toList();
final RealmConfig realmConfig = mock(RealmConfig.class);
when(realmConfig.name()).thenReturn(randomAlphaOfLengthBetween(3, 8));
final UserRoleMapper.UserData userData = new UserRoleMapper.UserData(
randomAlphaOfLengthBetween(5, 8),
userDn,
groups,
Map.of(),
realmConfig
);
UserRoleMapper.UserData spyUserdata = spy(userData);
final UserRoleMapper.DistinguishedNameNormalizer spyDnNormalizer = spy(userData.getDnNormalizer());
when(spyUserdata.getDnNormalizer()).thenReturn(spyDnNormalizer);
final ExpressionModel expressionModel = spyUserdata.asModel();
// All DNs to be tested should only be parsed once no matter how many groups the userData may have
Mockito.clearInvocations(spyDnNormalizer);
final List<String> dnList = randomList(100, 200, DistinguishedNameNormalizerTests::randomDn).stream().distinct().toList();
final List<FieldValue> fieldValues = dnList.stream()
.map(dn -> randomBoolean() ? new FieldValue(dn) : new FieldValue("*," + dn))
.toList();
expressionModel.test("groups", fieldValues);
// Also does not matter how many times the model is tested
expressionModel.test("groups", randomNonEmptySubsetOf(fieldValues));
final ArgumentCaptor<String> argumentCaptor = ArgumentCaptor.forClass(String.class);
verify(spyDnNormalizer, times(dnList.size())).doNormalize(argumentCaptor.capture());
assertThat(argumentCaptor.getAllValues(), equalTo(dnList));
}
private void parseDnMultipleTimes(String dn) {
IntStream.range(0, randomIntBetween(3, 5)).forEach(i -> dnNormalizer.normalize(dn));
}
private void runPredicateMultipleTimes(Predicate<FieldValue> predicate, Object value) {
IntStream.range(0, randomIntBetween(3, 5)).forEach(i -> predicate.test(new FieldValue(value)));
}
private UserRoleMapper.DistinguishedNameNormalizer getDnNormalizer() {
return spy(new UserRoleMapper.DistinguishedNameNormalizer());
}
private static String randomDn() {
return "CN="
+ randomAlphaOfLengthBetween(3, 12)
+ ",OU="
+ randomAlphaOfLength(4)
+ ", O="
+ randomAlphaOfLengthBetween(2, 6)
+ ",dc="
+ randomAlphaOfLength(3);
}
private static String mutateDn(String dn) {
return switch (randomIntBetween(1, 4)) {
case 1 -> dn.toLowerCase(Locale.ENGLISH);
case 2 -> dn.toUpperCase(Locale.ENGLISH);
case 3 -> dn.replace(" ", "");
default -> dn.replace(",", " ,");
};
}
}
|
DistinguishedNameNormalizerTests
|
java
|
spring-projects__spring-security
|
messaging/src/main/java/org/springframework/security/messaging/context/SecurityContextChannelInterceptor.java
|
{
"start": 1792,
"end": 5864
}
|
class ____ implements ExecutorChannelInterceptor, ChannelInterceptor {
private static final ThreadLocal<Stack<SecurityContext>> originalContext = new ThreadLocal<>();
private SecurityContextHolderStrategy securityContextHolderStrategy = SecurityContextHolder
.getContextHolderStrategy();
private SecurityContext empty = this.securityContextHolderStrategy.createEmptyContext();
private final String authenticationHeaderName;
private Authentication anonymous = new AnonymousAuthenticationToken("key", "anonymous",
AuthorityUtils.createAuthorityList("ROLE_ANONYMOUS"));
/**
* Creates a new instance using the header of the name
* {@link SimpMessageHeaderAccessor#USER_HEADER}.
*/
public SecurityContextChannelInterceptor() {
this(SimpMessageHeaderAccessor.USER_HEADER);
}
/**
* Creates a new instance that uses the specified header to obtain the
* {@link Authentication}.
* @param authenticationHeaderName the header name to obtain the
* {@link Authentication}. Cannot be null.
*/
public SecurityContextChannelInterceptor(String authenticationHeaderName) {
Assert.notNull(authenticationHeaderName, "authenticationHeaderName cannot be null");
this.authenticationHeaderName = authenticationHeaderName;
}
/**
* Allows setting the Authentication used for anonymous authentication. Default is:
*
* <pre>
* new AnonymousAuthenticationToken("key", "anonymous",
* AuthorityUtils.createAuthorityList("ROLE_ANONYMOUS"));
* </pre>
* @param authentication the Authentication used for anonymous authentication. Cannot
* be null.
*/
public void setAnonymousAuthentication(Authentication authentication) {
Assert.notNull(authentication, "authentication cannot be null");
this.anonymous = authentication;
}
@Override
public Message<?> preSend(Message<?> message, MessageChannel channel) {
setup(message);
return message;
}
@Override
public void afterSendCompletion(Message<?> message, MessageChannel channel, boolean sent, @Nullable Exception ex) {
cleanup();
}
@Override
public Message<?> beforeHandle(Message<?> message, MessageChannel channel, MessageHandler handler) {
setup(message);
return message;
}
@Override
public void afterMessageHandled(Message<?> message, MessageChannel channel, MessageHandler handler,
@Nullable Exception ex) {
cleanup();
}
public void setSecurityContextHolderStrategy(SecurityContextHolderStrategy strategy) {
this.securityContextHolderStrategy = strategy;
this.empty = this.securityContextHolderStrategy.createEmptyContext();
}
private void setup(Message<?> message) {
SecurityContext currentContext = this.securityContextHolderStrategy.getContext();
Stack<SecurityContext> contextStack = originalContext.get();
if (contextStack == null) {
contextStack = new Stack<>();
originalContext.set(contextStack);
}
contextStack.push(currentContext);
Object user = message.getHeaders().get(this.authenticationHeaderName);
Authentication authentication = getAuthentication(user);
SecurityContext context = this.securityContextHolderStrategy.createEmptyContext();
context.setAuthentication(authentication);
this.securityContextHolderStrategy.setContext(context);
}
private Authentication getAuthentication(@Nullable Object user) {
if ((user instanceof Authentication)) {
return (Authentication) user;
}
return this.anonymous;
}
private void cleanup() {
Stack<SecurityContext> contextStack = originalContext.get();
if (contextStack == null || contextStack.isEmpty()) {
this.securityContextHolderStrategy.clearContext();
originalContext.remove();
return;
}
SecurityContext context = contextStack.pop();
try {
if (SecurityContextChannelInterceptor.this.empty.equals(context)) {
this.securityContextHolderStrategy.clearContext();
originalContext.remove();
}
else {
this.securityContextHolderStrategy.setContext(context);
}
}
catch (Throwable ex) {
this.securityContextHolderStrategy.clearContext();
}
}
}
|
SecurityContextChannelInterceptor
|
java
|
spring-projects__spring-security
|
core/src/test/java/org/springframework/security/core/annotation/UniqueSecurityAnnotationScannerTests.java
|
{
"start": 15642,
"end": 15746
}
|
interface ____ {
void delete(@CustomParameterAnnotation("five") String user);
}
|
ThirdPartyUserService
|
java
|
elastic__elasticsearch
|
modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java
|
{
"start": 1746,
"end": 5208
}
|
class ____ extends ESTestCase {
@SuppressWarnings("unchecked")
public void testDeleteBlobsIgnoringIfNotExistsThrowsIOException() throws Exception {
final List<String> blobs = Arrays.asList("blobA", "blobB");
final StorageBatch batch = mock(StorageBatch.class);
if (randomBoolean()) {
StorageBatchResult<Boolean> result = mock(StorageBatchResult.class);
when(batch.delete(any(BlobId.class))).thenReturn(result);
doThrow(new StorageException(new IOException("Batch submit throws a storage exception"))).when(batch).submit();
} else {
StorageBatchResult<Boolean> resultA = mock(StorageBatchResult.class);
doReturn(resultA).when(batch).delete(eq(BlobId.of("bucket", "blobA")));
doAnswer(invocation -> {
StorageException storageException = new StorageException(new IOException("Batched delete throws a storage exception"));
((BatchResult.Callback) invocation.getArguments()[0]).error(storageException);
return null;
}).when(resultA).notify(any(StorageBatchResult.Callback.class));
StorageBatchResult<Boolean> resultB = mock(StorageBatchResult.class);
doReturn(resultB).when(batch).delete(eq(BlobId.of("bucket", "blobB")));
doAnswer(invocation -> {
if (randomBoolean()) {
StorageException storageException = new StorageException(new IOException("Batched delete throws a storage exception"));
((BatchResult.Callback) invocation.getArguments()[0]).error(storageException);
} else {
((BatchResult.Callback) invocation.getArguments()[0]).success(randomBoolean());
}
return null;
}).when(resultB).notify(any(StorageBatchResult.Callback.class));
doNothing().when(batch).submit();
}
final Storage storage = mock(Storage.class);
when(storage.get("bucket")).thenReturn(mock(Bucket.class));
when(storage.batch()).thenReturn(batch);
final com.google.api.services.storage.Storage storageRpc = mock(com.google.api.services.storage.Storage.class);
final MeteredStorage meteredStorage = new MeteredStorage(storage, storageRpc, new GcsRepositoryStatsCollector());
final GoogleCloudStorageService storageService = mock(GoogleCloudStorageService.class);
when(storageService.client(eq(ProjectId.DEFAULT), any(String.class), any(String.class), any(GcsRepositoryStatsCollector.class)))
.thenReturn(meteredStorage);
try (
BlobStore store = new GoogleCloudStorageBlobStore(
ProjectId.DEFAULT,
"bucket",
"test",
"repo",
storageService,
BigArrays.NON_RECYCLING_INSTANCE,
randomIntBetween(1, 8) * 1024,
BackoffPolicy.noBackoff(),
new GcsRepositoryStatsCollector()
)
) {
final BlobContainer container = store.blobContainer(BlobPath.EMPTY);
IOException e = expectThrows(
IOException.class,
() -> container.deleteBlobsIgnoringIfNotExists(randomPurpose(), blobs.iterator())
);
assertThat(e.getCause(), instanceOf(StorageException.class));
}
}
}
|
GoogleCloudStorageBlobStoreContainerTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/fetch/batch/SimpleBatchFetchTests.java
|
{
"start": 3766,
"end": 4869
}
|
class ____ {
@Id
private Integer id;
private String name;
@ManyToOne(fetch = FetchType.EAGER, cascade = CascadeType.ALL)
private Employee lead;
@OneToMany(cascade = CascadeType.ALL)
@JoinTable(name="EmployeeGroup_employees")
@BatchSize( size = 100 )
private List<Employee> employees = new ArrayList<>();
@SuppressWarnings("unused")
protected EmployeeGroup() {
}
public EmployeeGroup(int id,String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Employee getLead() {
return lead;
}
public void setLead(Employee lead) {
this.lead = lead;
}
public boolean addEmployee(Employee employee) {
return employees.add(employee);
}
public List<Employee> getEmployees() {
return employees;
}
@Override
public String toString() {
return "EmployeeGroup(" + id.toString() + " : " + name + ")";
}
}
@Entity( name="Employee")
@Table(name = "Employee")
public static
|
EmployeeGroup
|
java
|
apache__kafka
|
connect/runtime/src/test/java/org/apache/kafka/connect/integration/RestExtensionIntegrationTest.java
|
{
"start": 2456,
"end": 7440
}
|
class ____ {
private static final long REST_EXTENSION_REGISTRATION_TIMEOUT_MS = TimeUnit.MINUTES.toMillis(1);
private static final long CONNECTOR_HEALTH_AND_CONFIG_TIMEOUT_MS = TimeUnit.MINUTES.toMillis(1);
private static final int NUM_WORKERS = 1;
private EmbeddedConnectCluster connect;
@Test
public void testRestExtensionApi() throws InterruptedException {
// setup Connect worker properties
Map<String, String> workerProps = new HashMap<>();
workerProps.put(REST_EXTENSION_CLASSES_CONFIG, IntegrationTestRestExtension.class.getName());
// build a Connect cluster backed by a Kafka KRaft cluster
connect = new EmbeddedConnectCluster.Builder()
.name("connect-cluster")
.numWorkers(NUM_WORKERS)
.numBrokers(1)
.workerProps(workerProps)
.build();
// start the clusters
connect.start();
WorkerHandle worker = connect.workers().stream()
.findFirst()
.orElseThrow(() -> new AssertionError("At least one worker handle should be available"));
waitForCondition(
this::extensionIsRegistered,
REST_EXTENSION_REGISTRATION_TIMEOUT_MS,
"REST extension was never registered"
);
ConnectorHandle connectorHandle = RuntimeHandles.get().connectorHandle("test-conn");
try {
// setup up props for the connector
Map<String, String> connectorProps = new HashMap<>();
connectorProps.put(CONNECTOR_CLASS_CONFIG, TestableSinkConnector.class.getSimpleName());
connectorProps.put(TASKS_MAX_CONFIG, String.valueOf(1));
connectorProps.put(TOPICS_CONFIG, "test-topic");
// start a connector
connectorHandle.taskHandle(connectorHandle.name() + "-0");
StartAndStopLatch connectorStartLatch = connectorHandle.expectedStarts(1);
connect.configureConnector(connectorHandle.name(), connectorProps);
connect.assertions().assertConnectorAndAtLeastNumTasksAreRunning(connectorHandle.name(), 1,
"Connector tasks did not start in time.");
connectorStartLatch.await(CONNECTOR_HEALTH_AND_CONFIG_TIMEOUT_MS, TimeUnit.MILLISECONDS);
String workerId = String.format("%s:%d", worker.url().getHost(), worker.url().getPort());
ConnectorHealth expectedHealth = new ConnectorHealth(
connectorHandle.name(),
new ConnectorState(
"RUNNING",
workerId,
null
),
Map.of(
0,
new TaskState(0, "RUNNING", workerId, null)
),
ConnectorType.SINK
);
connectorProps.put(NAME_CONFIG, connectorHandle.name());
// Test the REST extension API; specifically, that the connector's health and configuration
// are available to the REST extension we registered and that they contain expected values
waitForCondition(
() -> verifyConnectorHealthAndConfig(connectorHandle.name(), expectedHealth, connectorProps),
CONNECTOR_HEALTH_AND_CONFIG_TIMEOUT_MS,
"Connector health and/or config was never accessible by the REST extension"
);
} finally {
RuntimeHandles.get().deleteConnector(connectorHandle.name());
}
}
@AfterEach
public void close() {
// stop the Connect cluster and its backing Kafka cluster.
connect.stop();
IntegrationTestRestExtension.instance = null;
}
private boolean extensionIsRegistered() {
try {
String extensionUrl = connect.endpointForResource("integration-test-rest-extension/registered");
Response response = connect.requestGet(extensionUrl);
return response.getStatus() < BAD_REQUEST.getStatusCode();
} catch (ConnectException e) {
return false;
}
}
private boolean verifyConnectorHealthAndConfig(
String connectorName,
ConnectorHealth expectedHealth,
Map<String, String> expectedConfig
) {
ConnectClusterState clusterState =
IntegrationTestRestExtension.instance.restPluginContext.clusterState();
ConnectorHealth actualHealth = clusterState.connectorHealth(connectorName);
if (actualHealth.tasksState().isEmpty()) {
// Happens if the task has been started but its status has not yet been picked up from
// the status topic by the worker.
return false;
}
Map<String, String> actualConfig = clusterState.connectorConfig(connectorName);
assertEquals(expectedConfig, actualConfig);
assertEquals(expectedHealth, actualHealth);
return true;
}
public static
|
RestExtensionIntegrationTest
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/dataview/StateMapView.java
|
{
"start": 12249,
"end": 13138
}
|
class ____<N, EK, EV>
extends StateMapViewWithKeysNullable<N, EK, EV> {
private final MapState<EK, EV> mapState;
private final ValueState<EV> nullState;
public KeyedStateMapViewWithKeysNullable(
MapState<EK, EV> mapState, ValueState<EV> nullState) {
this.mapState = mapState;
this.nullState = nullState;
}
@Override
public void setCurrentNamespace(N namespace) {
throw new UnsupportedOperationException();
}
@Override
protected MapState<EK, EV> getMapState() {
return mapState;
}
@Override
protected ValueState<EV> getNullState() {
return nullState;
}
}
/** A state {@link MapView} which supports nullable keys and namespace. */
public static final
|
KeyedStateMapViewWithKeysNullable
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/intarrays/IntArrays_assertNotEmpty_Test.java
|
{
"start": 1532,
"end": 2248
}
|
class ____ extends IntArraysBaseTest {
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertNotEmpty(someInfo(), null))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_is_empty() {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> arrays.assertNotEmpty(info, emptyArray()));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldNotBeEmpty());
}
@Test
void should_pass_if_actual_is_not_empty() {
arrays.assertNotEmpty(someInfo(), arrayOf(8));
}
}
|
IntArrays_assertNotEmpty_Test
|
java
|
quarkusio__quarkus
|
extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/MessageBundleBuildItem.java
|
{
"start": 250,
"end": 1879
}
|
class ____ extends MultiBuildItem {
private final String name;
private final ClassInfo defaultBundleInterface;
private final Map<String, ClassInfo> localizedInterfaces;
private final Map<String, List<MessageFile>> localizedFiles;
private final Map<String, List<MessageFile>> mergeCandidates;
private final String defaultLocale;
public MessageBundleBuildItem(String name, ClassInfo defaultBundleInterface,
Map<String, ClassInfo> localizedInterfaces, Map<String, List<MessageFile>> localizedFiles,
Map<String, List<MessageFile>> mergeCandidates, String defaultLocale) {
this.name = name;
this.defaultBundleInterface = defaultBundleInterface;
this.localizedInterfaces = localizedInterfaces;
this.localizedFiles = localizedFiles;
this.mergeCandidates = mergeCandidates;
this.defaultLocale = defaultLocale;
}
public String getName() {
return name;
}
public ClassInfo getDefaultBundleInterface() {
return defaultBundleInterface;
}
public Map<String, ClassInfo> getLocalizedInterfaces() {
return localizedInterfaces;
}
public Map<String, List<MessageFile>> getLocalizedFiles() {
return localizedFiles;
}
/**
* Merge candidates are localized files used as a supplementary source of message templates
* not specified by localized interfaces.
*/
public Map<String, List<MessageFile>> getMergeCandidates() {
return mergeCandidates;
}
public String getDefaultLocale() {
return defaultLocale;
}
}
|
MessageBundleBuildItem
|
java
|
elastic__elasticsearch
|
modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorFactoryTests.java
|
{
"start": 518,
"end": 736
}
|
class ____ extends AbstractStringProcessorFactoryTestCase {
@Override
protected AbstractStringProcessor.Factory newFactory() {
return new UppercaseProcessor.Factory();
}
}
|
UppercaseProcessorFactoryTests
|
java
|
elastic__elasticsearch
|
modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java
|
{
"start": 9727,
"end": 11400
}
|
class ____ implements ClusterStateApplier {
private static final String AZURE_SETTING_PREFIX = "azure.";
private final Settings nodeAzureSettings;
private volatile Map<String, AzureStorageSettings> clusterStorageSettings;
private final Map<ProjectId, Map<String, AzureStorageSettings>> perProjectStorageSettings;
AzureStorageClientsManager(Settings nodeSettings, boolean supportsMultipleProjects) {
// eagerly load client settings so that secure settings are read
final Map<String, AzureStorageSettings> clientsSettings = AzureStorageSettings.load(nodeSettings);
refreshClusterClientSettings(clientsSettings);
this.nodeAzureSettings = Settings.builder()
.put(nodeSettings.getByPrefix(AZURE_SETTING_PREFIX), false) // not rely on any cluster scoped secrets
.normalizePrefix(AZURE_SETTING_PREFIX)
.build();
if (supportsMultipleProjects) {
this.perProjectStorageSettings = ConcurrentCollections.newConcurrentMap();
} else {
this.perProjectStorageSettings = null;
}
}
@Override
public void applyClusterState(ClusterChangedEvent event) {
assert perProjectStorageSettings != null;
final Map<ProjectId, ProjectMetadata> currentProjects = event.state().metadata().projects();
for (var project : currentProjects.values()) {
// Skip the default project, it is tracked separately with clusterStorageSettings and
// updated differently with the ReloadablePlugin
|
AzureStorageClientsManager
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/filter/factory/cache/LocalResponseCacheGatewayFilterFactoryTests.java
|
{
"start": 23464,
"end": 23929
}
|
class ____ {
@Value("${test.uri}")
String uri;
@Bean
public RouteLocator testRouteLocator(RouteLocatorBuilder builder) {
return builder.routes()
.route("local_response_cache_java_test",
r -> r.path("/{namespace}/cache/**")
.and()
.host("{sub}.localresponsecache.org")
.filters(f -> f.stripPrefix(2).prefixPath("/httpbin").localResponseCache(null, null))
.uri(uri))
.build();
}
}
}
}
|
TestConfig
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/cache/LocalCachedMapUpdate.java
|
{
"start": 961,
"end": 2689
}
|
class ____ {
private final byte[] key;
private final byte[] value;
public Entry(byte[] key, byte[] value) {
this.key = key;
this.value = value;
}
public Entry(ByteBuf keyBuf, ByteBuf valueBuf) {
key = new byte[keyBuf.readableBytes()];
keyBuf.getBytes(keyBuf.readerIndex(), key);
value = new byte[valueBuf.readableBytes()];
valueBuf.getBytes(valueBuf.readerIndex(), value);
}
public byte[] getKey() {
return key;
}
public byte[] getValue() {
return value;
}
}
private List<Entry> entries = new ArrayList<Entry>();
private byte[] excludedId;
public LocalCachedMapUpdate() {
}
public LocalCachedMapUpdate(byte[] excludedId, List<Entry> entries) {
super();
this.excludedId = excludedId;
this.entries = entries;
}
public LocalCachedMapUpdate(byte[] excludedId, ByteBuf keyBuf, ByteBuf valueBuf) {
this.excludedId = excludedId;
byte[] key = new byte[keyBuf.readableBytes()];
keyBuf.getBytes(keyBuf.readerIndex(), key);
byte[] value = new byte[valueBuf.readableBytes()];
valueBuf.getBytes(valueBuf.readerIndex(), value);
entries = Collections.singletonList(new Entry(key, value));
}
public LocalCachedMapUpdate(byte[] key, byte[] value) {
entries = Collections.singletonList(new Entry(key, value));
}
public Collection<Entry> getEntries() {
return entries;
}
public byte[] getExcludedId() {
return excludedId;
}
}
|
Entry
|
java
|
junit-team__junit5
|
junit-platform-engine/src/main/java/org/junit/platform/engine/TestSource.java
|
{
"start": 939,
"end": 985
}
|
interface ____ extends Serializable {
}
|
TestSource
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringStaticRecipientListTest.java
|
{
"start": 1044,
"end": 1311
}
|
class ____ extends StaticRecipientListTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/processor/staticRecipientList.xml");
}
}
|
SpringStaticRecipientListTest
|
java
|
spring-projects__spring-boot
|
build-plugin/spring-boot-maven-plugin/src/intTest/projects/aot-jdk-proxy/src/main/java/org/test/SampleApplication.java
|
{
"start": 1344,
"end": 1654
}
|
class ____ implements RuntimeHintsRegistrar {
@Override
public void registerHints(RuntimeHints hints, ClassLoader classLoader) {
// Force creation of at least one JDK proxy
hints.proxies().registerJdkProxy(AopProxyUtils.completeJdkProxyInterfaces(Service.class));
}
}
}
|
SampleApplicationRuntimeHints
|
java
|
FasterXML__jackson-core
|
src/main/java/tools/jackson/core/json/WriterBasedJsonGenerator.java
|
{
"start": 445,
"end": 74731
}
|
class ____
extends JsonGeneratorBase
{
protected final static int SHORT_WRITE = 32;
protected final static char[] HEX_CHARS_UPPER = CharTypes.copyHexChars(true);
protected final static char[] HEX_CHARS_LOWER = CharTypes.copyHexChars(false);
private char[] getHexChars() {
return _cfgWriteHexUppercase ? HEX_CHARS_UPPER : HEX_CHARS_LOWER;
}
/*
/**********************************************************************
/* Configuration
/**********************************************************************
*/
protected final Writer _writer;
/**
* Character used for quoting JSON Object property names
* and String values.
*/
protected final char _quoteChar;
/*
/**********************************************************************
/* Output buffering
/**********************************************************************
*/
/**
* Intermediate buffer in which contents are buffered before
* being written using {@link #_writer}.
*/
protected char[] _outputBuffer;
/**
* Pointer to the first buffered character to output
*/
protected int _outputHead;
/**
* Pointer to the position right beyond the last character to output
* (end marker; may point to position right beyond the end of the buffer)
*/
protected int _outputTail;
/**
* End marker of the output buffer; one past the last valid position
* within the buffer.
*/
protected int _outputEnd;
/**
* Short (14 char) temporary buffer allocated if needed, for constructing
* escape sequences
*/
protected char[] _entityBuffer;
/**
* When custom escapes are used, this member variable is used
* internally to hold a reference to currently used escape
*/
protected SerializableString _currentEscape;
/**
* Intermediate buffer in which characters of a String are copied
* before being encoded.
*/
protected char[] _copyBuffer;
/*
/**********************************************************************
/* Life-cycle
/**********************************************************************
*/
public WriterBasedJsonGenerator(ObjectWriteContext writeCtxt, IOContext ioCtxt,
int streamWriteFeatures, int formatWriteFeatures, Writer w,
SerializableString rootValueSep, PrettyPrinter pp,
CharacterEscapes charEsc, int maxNonEscaped, char quoteChar)
{
super(writeCtxt, ioCtxt, streamWriteFeatures, formatWriteFeatures, rootValueSep, pp,
charEsc, maxNonEscaped);
_writer = w;
_outputBuffer = ioCtxt.allocConcatBuffer();
_outputEnd = _outputBuffer.length;
_quoteChar = quoteChar;
setCharacterEscapes(charEsc);
}
@Override
public JsonGenerator setCharacterEscapes(CharacterEscapes esc)
{
_characterEscapes = esc;
if (esc == null) {
_outputEscapes = CharTypes.get7BitOutputEscapes(_quoteChar,
JsonWriteFeature.ESCAPE_FORWARD_SLASHES.enabledIn(_formatWriteFeatures));
} else {
_outputEscapes = esc.getEscapeCodesForAscii();
}
return this;
}
/*
/**********************************************************************
/* Overridden configuration, introspection methods
/**********************************************************************
*/
@Override
public Object streamWriteOutputTarget() {
return _writer;
}
@Override
public int streamWriteOutputBuffered() {
// Assuming tail and head are kept but... trust and verify:
int len = _outputTail - _outputHead;
return Math.max(0, len);
}
/*
/**********************************************************************
/* Overridden methods
/**********************************************************************
*/
@Override
public JsonGenerator writeName(String name) throws JacksonException
{
int status = _streamWriteContext.writeName(name);
if (status == JsonWriteContext.STATUS_EXPECT_VALUE) {
_reportError("Cannot write a property name, expecting a value");
}
_writeName(name, (status == JsonWriteContext.STATUS_OK_AFTER_COMMA));
return this;
}
@Override
public JsonGenerator writeName(SerializableString name) throws JacksonException
{
// Object is a value, need to verify it's allowed
int status = _streamWriteContext.writeName(name.getValue());
if (status == JsonWriteContext.STATUS_EXPECT_VALUE) {
_reportError("Cannot write a property name, expecting a value");
}
_writeName(name, (status == JsonWriteContext.STATUS_OK_AFTER_COMMA));
return this;
}
protected final void _writeName(String name, boolean commaBefore) throws JacksonException
{
if (_prettyPrinter != null) {
_writePPName(name, commaBefore);
return;
}
// for fast+std case, need to output up to 2 chars, comma, dquote
if ((_outputTail + 1) >= _outputEnd) {
_flushBuffer();
}
if (commaBefore) {
_outputBuffer[_outputTail++] = ',';
}
// Alternate mode, in which quoting of property names disabled?
if (_cfgUnqNames) {
_writeString(name);
return;
}
// we know there's room for at least one more char
_outputBuffer[_outputTail++] = _quoteChar;
// The beef:
_writeString(name);
// and closing quotes; need room for one more char:
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
}
protected final void _writeName(SerializableString name, boolean commaBefore) throws JacksonException
{
if (_prettyPrinter != null) {
_writePPName(name, commaBefore);
return;
}
// for fast+std case, need to output up to 2 chars, comma, dquote
if ((_outputTail + 1) >= _outputEnd) {
_flushBuffer();
}
if (commaBefore) {
_outputBuffer[_outputTail++] = ',';
}
// Alternate mode, in which quoting of property names disabled?
if (_cfgUnqNames) {
final char[] ch = name.asQuotedChars();
writeRaw(ch, 0, ch.length);
return;
}
// we know there's room for at least one more char
_outputBuffer[_outputTail++] = _quoteChar;
// The beef:
int len = name.appendQuoted(_outputBuffer, _outputTail);
if (len < 0) {
_writeNameTail(name);
return;
}
_outputTail += len;
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
}
private final void _writeNameTail(SerializableString name) throws JacksonException
{
final char[] quoted = name.asQuotedChars();
writeRaw(quoted, 0, quoted.length);
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
}
/*
/**********************************************************************
/* Output method implementations, structural
/**********************************************************************
*/
@Override
public JsonGenerator writeStartArray() throws JacksonException
{
_verifyValueWrite("start an array");
_streamWriteContext = _streamWriteContext.createChildArrayContext(null);
streamWriteConstraints().validateNestingDepth(_streamWriteContext.getNestingDepth());
if (_prettyPrinter != null) {
_prettyPrinter.writeStartArray(this);
} else {
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = '[';
}
return this;
}
@Override
public JsonGenerator writeStartArray(Object forValue) throws JacksonException
{
_verifyValueWrite("start an array");
_streamWriteContext = _streamWriteContext.createChildArrayContext(forValue);
streamWriteConstraints().validateNestingDepth(_streamWriteContext.getNestingDepth());
if (_prettyPrinter != null) {
_prettyPrinter.writeStartArray(this);
} else {
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = '[';
}
return this;
}
@Override
public JsonGenerator writeStartArray(Object forValue, int len) throws JacksonException
{
_verifyValueWrite("start an array");
_streamWriteContext = _streamWriteContext.createChildArrayContext(forValue);
streamWriteConstraints().validateNestingDepth(_streamWriteContext.getNestingDepth());
if (_prettyPrinter != null) {
_prettyPrinter.writeStartArray(this);
} else {
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = '[';
}
return this;
}
@Override
public JsonGenerator writeEndArray() throws JacksonException
{
if (!_streamWriteContext.inArray()) {
_reportError("Current context not Array but "+_streamWriteContext.typeDesc());
}
if (_prettyPrinter != null) {
_prettyPrinter.writeEndArray(this, _streamWriteContext.getEntryCount());
} else {
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = ']';
}
_streamWriteContext = _streamWriteContext.clearAndGetParent();
return this;
}
@Override
public JsonGenerator writeStartObject() throws JacksonException
{
_verifyValueWrite("start an object");
_streamWriteContext = _streamWriteContext.createChildObjectContext(null);
streamWriteConstraints().validateNestingDepth(_streamWriteContext.getNestingDepth());
if (_prettyPrinter != null) {
_prettyPrinter.writeStartObject(this);
} else {
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = '{';
}
return this;
}
@Override
public JsonGenerator writeStartObject(Object forValue) throws JacksonException
{
_verifyValueWrite("start an object");
JsonWriteContext ctxt = _streamWriteContext.createChildObjectContext(forValue);
streamWriteConstraints().validateNestingDepth(ctxt.getNestingDepth());
_streamWriteContext = ctxt;
if (_prettyPrinter != null) {
_prettyPrinter.writeStartObject(this);
} else {
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = '{';
}
return this;
}
@Override
public JsonGenerator writeStartObject(Object forValue, int size) throws JacksonException
{
_verifyValueWrite("start an object");
JsonWriteContext ctxt = _streamWriteContext.createChildObjectContext(forValue);
streamWriteConstraints().validateNestingDepth(ctxt.getNestingDepth());
_streamWriteContext = ctxt;
if (_prettyPrinter != null) {
_prettyPrinter.writeStartObject(this);
} else {
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = '{';
}
return this;
}
@Override
public JsonGenerator writeEndObject() throws JacksonException
{
if (!_streamWriteContext.inObject()) {
_reportError("Current context not Object but "+_streamWriteContext.typeDesc());
}
if (_prettyPrinter != null) {
_prettyPrinter.writeEndObject(this, _streamWriteContext.getEntryCount());
} else {
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = '}';
}
_streamWriteContext = _streamWriteContext.clearAndGetParent();
return this;
}
// Specialized version of <code>_writeName</code>, off-lined
// to keep the "fast path" as simple (and hopefully fast) as possible.
protected final void _writePPName(String name, boolean commaBefore) throws JacksonException
{
if (commaBefore) {
_prettyPrinter.writeObjectEntrySeparator(this);
} else {
_prettyPrinter.beforeObjectEntries(this);
}
if (_cfgUnqNames) {// non-standard, omit quotes
_writeString(name);
} else {
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
_writeString(name);
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
}
}
protected final void _writePPName(SerializableString name, boolean commaBefore) throws JacksonException
{
if (commaBefore) {
_prettyPrinter.writeObjectEntrySeparator(this);
} else {
_prettyPrinter.beforeObjectEntries(this);
}
final char[] quoted = name.asQuotedChars();
if (_cfgUnqNames) {// non-standard, omit quotes
writeRaw(quoted, 0, quoted.length);
} else {
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
writeRaw(quoted, 0, quoted.length);
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
}
}
/*
/**********************************************************************
/* Output method implementations, textual
/**********************************************************************
*/
@Override
public JsonGenerator writeString(String text) throws JacksonException
{
_verifyValueWrite(WRITE_STRING);
if (text == null) {
_writeNull();
return this;
}
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
_writeString(text);
// And finally, closing quotes
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
return this;
}
@Override
public JsonGenerator writeString(Reader reader, int len) throws JacksonException
{
_verifyValueWrite(WRITE_STRING);
if (reader == null) {
return _reportError("null reader");
}
int toRead = (len >= 0) ? len : Integer.MAX_VALUE;
// Add leading quote
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
final char[] buf = _allocateCopyBuffer();
while (toRead > 0) {
int toReadNow = Math.min(toRead, buf.length);
int numRead;
try {
numRead = reader.read(buf, 0, toReadNow);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
if (numRead <= 0) {
break;
}
_writeString(buf, 0, numRead);
toRead -= numRead;
}
// Add trailing quote
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
if (toRead > 0 && len >= 0) {
_reportError("Didn't read enough from reader");
}
return this;
}
@Override
public JsonGenerator writeString(char[] text, int offset, int len) throws JacksonException
{
_verifyValueWrite(WRITE_STRING);
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
_writeString(text, offset, len);
// And finally, closing quotes
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
return this;
}
@Override
public JsonGenerator writeString(SerializableString sstr) throws JacksonException
{
_verifyValueWrite(WRITE_STRING);
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
int len = sstr.appendQuoted(_outputBuffer, _outputTail);
if (len < 0) {
_writeString2(sstr);
return this;
}
_outputTail += len;
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
return this;
}
private void _writeString2(SerializableString sstr) throws JacksonException
{
// Note: copied from writeRaw:
char[] text = sstr.asQuotedChars();
final int len = text.length;
if (len < SHORT_WRITE) {
int room = _outputEnd - _outputTail;
if (len > room) {
_flushBuffer();
}
System.arraycopy(text, 0, _outputBuffer, _outputTail, len);
_outputTail += len;
} else {
_flushBuffer();
try {
_writer.write(text, 0, len);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
}
@Override
public JsonGenerator writeRawUTF8String(byte[] text, int offset, int length) throws JacksonException {
// could add support for buffering if we really want it...
return _reportUnsupportedOperation();
}
@Override
public JsonGenerator writeUTF8String(byte[] text, int offset, int length) throws JacksonException {
// could add support for buffering if we really want it...
return _reportUnsupportedOperation();
}
/*
/**********************************************************************
/* Output method implementations, unprocessed ("raw")
/**********************************************************************
*/
@Override
public JsonGenerator writeRaw(String text) throws JacksonException
{
// Nothing to check, can just output as is
int len = text.length();
int room = _outputEnd - _outputTail;
if (room == 0) {
_flushBuffer();
room = _outputEnd - _outputTail;
}
// But would it nicely fit in? If yes, it's easy
if (room >= len) {
text.getChars(0, len, _outputBuffer, _outputTail);
_outputTail += len;
} else {
writeRawLong(text);
}
return this;
}
@Override
public JsonGenerator writeRaw(String text, int offset, int len) throws JacksonException
{
_checkRangeBoundsForString(text, offset, len);
// Nothing to check, can just output as is
int room = _outputEnd - _outputTail;
if (room < len) {
_flushBuffer();
room = _outputEnd - _outputTail;
}
// But would it nicely fit in? If yes, it's easy
if (room >= len) {
text.getChars(offset, offset+len, _outputBuffer, _outputTail);
_outputTail += len;
} else {
writeRawLong(text.substring(offset, offset+len));
}
return this;
}
@Override
public JsonGenerator writeRaw(SerializableString text) throws JacksonException {
int len = text.appendUnquoted(_outputBuffer, _outputTail);
if (len < 0) {
writeRaw(text.getValue());
return this;
}
_outputTail += len;
return this;
}
@Override
public JsonGenerator writeRaw(char[] cbuf, int offset, int len) throws JacksonException
{
_checkRangeBoundsForCharArray(cbuf, offset, len);
// Only worth buffering if it's a short write?
if (len < SHORT_WRITE) {
int room = _outputEnd - _outputTail;
if (len > room) {
_flushBuffer();
}
System.arraycopy(cbuf, offset, _outputBuffer, _outputTail, len);
_outputTail += len;
return this;
}
// Otherwise, better just pass through:
_flushBuffer();
try {
_writer.write(cbuf, offset, len);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
return this;
}
@Override
public JsonGenerator writeRaw(char c) throws JacksonException
{
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = c;
return this;
}
private void writeRawLong(String text) throws JacksonException
{
int room = _outputEnd - _outputTail;
// If not, need to do it by looping
text.getChars(0, room, _outputBuffer, _outputTail);
_outputTail += room;
_flushBuffer();
int offset = room;
int len = text.length() - room;
while (len > _outputEnd) {
int amount = _outputEnd;
text.getChars(offset, offset+amount, _outputBuffer, 0);
_outputHead = 0;
_outputTail = amount;
_flushBuffer();
offset += amount;
len -= amount;
}
// And last piece (at most length of buffer)
text.getChars(offset, offset+len, _outputBuffer, 0);
_outputHead = 0;
_outputTail = len;
}
/*
/**********************************************************************
/* Output method implementations, base64-encoded binary
/**********************************************************************
*/
@Override
public JsonGenerator writeBinary(Base64Variant b64variant, byte[] data, int offset, int len)
throws JacksonException
{
_checkRangeBoundsForByteArray(data, offset, len);
_verifyValueWrite(WRITE_BINARY);
// Starting quotes
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
_writeBinary(b64variant, data, offset, offset+len);
// and closing quotes
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
return this;
}
@Override
public int writeBinary(Base64Variant b64variant,
InputStream data, int dataLength)
throws JacksonException
{
_verifyValueWrite(WRITE_BINARY);
// Starting quotes
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
byte[] encodingBuffer = _ioContext.allocBase64Buffer();
int bytes;
try {
if (dataLength < 0) { // length unknown
bytes = _writeBinary(b64variant, data, encodingBuffer);
} else {
int missing = _writeBinary(b64variant, data, encodingBuffer, dataLength);
if (missing > 0) {
_reportError("Too few bytes available: missing "+missing+" bytes (out of "+dataLength+")");
}
bytes = dataLength;
}
} finally {
_ioContext.releaseBase64Buffer(encodingBuffer);
}
// and closing quotes
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
return bytes;
}
/*
/**********************************************************************
/* Output method implementations, primitive
/**********************************************************************
*/
@Override
public JsonGenerator writeNumber(short s) throws JacksonException
{
_verifyValueWrite(WRITE_NUMBER);
if (_cfgNumbersAsStrings) {
_writeQuotedShort(s);
return this;
}
// up to 5 digits and possible minus sign
if ((_outputTail + 6) >= _outputEnd) {
_flushBuffer();
}
_outputTail = NumberOutput.outputInt(s, _outputBuffer, _outputTail);
return this;
}
private void _writeQuotedShort(short s) throws JacksonException {
if ((_outputTail + 8) >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
_outputTail = NumberOutput.outputInt(s, _outputBuffer, _outputTail);
_outputBuffer[_outputTail++] = _quoteChar;
}
@Override
public JsonGenerator writeNumber(int i) throws JacksonException
{
_verifyValueWrite(WRITE_NUMBER);
if (_cfgNumbersAsStrings) {
_writeQuotedInt(i);
return this;
}
// up to 10 digits and possible minus sign
if ((_outputTail + 11) >= _outputEnd) {
_flushBuffer();
}
_outputTail = NumberOutput.outputInt(i, _outputBuffer, _outputTail);
return this;
}
private void _writeQuotedInt(int i) throws JacksonException {
if ((_outputTail + 13) >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
_outputTail = NumberOutput.outputInt(i, _outputBuffer, _outputTail);
_outputBuffer[_outputTail++] = _quoteChar;
}
@Override
public JsonGenerator writeNumber(long l) throws JacksonException
{
_verifyValueWrite(WRITE_NUMBER);
if (_cfgNumbersAsStrings) {
_writeQuotedLong(l);
return this;
}
if ((_outputTail + 21) >= _outputEnd) {
// up to 20 digits, minus sign
_flushBuffer();
}
_outputTail = NumberOutput.outputLong(l, _outputBuffer, _outputTail);
return this;
}
private void _writeQuotedLong(long l) throws JacksonException {
if ((_outputTail + 23) >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
_outputTail = NumberOutput.outputLong(l, _outputBuffer, _outputTail);
_outputBuffer[_outputTail++] = _quoteChar;
}
// !!! 05-Aug-2008, tatus: Any ways to optimize these?
@Override
public JsonGenerator writeNumber(BigInteger value) throws JacksonException
{
_verifyValueWrite(WRITE_NUMBER);
if (value == null) {
_writeNull();
} else if (_cfgNumbersAsStrings) {
_writeQuotedRaw(value.toString());
} else {
writeRaw(value.toString());
}
return this;
}
@Override
public JsonGenerator writeNumber(double d) throws JacksonException
{
final boolean useFast = isEnabled(StreamWriteFeature.USE_FAST_DOUBLE_WRITER);
if (_cfgNumbersAsStrings ||
(NumberOutput.notFinite(d) && JsonWriteFeature.WRITE_NAN_AS_STRINGS.enabledIn(_formatWriteFeatures))) {
writeString(NumberOutput.toString(d, useFast));
return this;
}
// What is the max length for doubles? 40 chars?
_verifyValueWrite(WRITE_NUMBER);
writeRaw(NumberOutput.toString(d, useFast));
return this;
}
@Override
public JsonGenerator writeNumber(float f) throws JacksonException
{
final boolean useFast = isEnabled(StreamWriteFeature.USE_FAST_DOUBLE_WRITER);
if (_cfgNumbersAsStrings ||
(NumberOutput.notFinite(f) && JsonWriteFeature.WRITE_NAN_AS_STRINGS.enabledIn(_formatWriteFeatures))) {
writeString(NumberOutput.toString(f, useFast));
return this;
}
// What is the max length for floats?
_verifyValueWrite(WRITE_NUMBER);
writeRaw(NumberOutput.toString(f, useFast));
return this;
}
@Override
public JsonGenerator writeNumber(BigDecimal value) throws JacksonException
{
// Don't really know max length for big decimal, no point checking
_verifyValueWrite(WRITE_NUMBER);
if (value == null) {
_writeNull();
} else if (_cfgNumbersAsStrings) {
_writeQuotedRaw(_asString(value));
} else {
writeRaw(_asString(value));
}
return this;
}
@Override
public JsonGenerator writeNumber(String encodedValue) throws JacksonException
{
_verifyValueWrite(WRITE_NUMBER);
if (encodedValue == null) {
_writeNull();
} else if (_cfgNumbersAsStrings) {
_writeQuotedRaw(encodedValue);
} else {
writeRaw(encodedValue);
}
return this;
}
@Override
public JsonGenerator writeNumber(char[] encodedValueBuffer, int offset, int length) throws JacksonException {
_verifyValueWrite(WRITE_NUMBER);
if (_cfgNumbersAsStrings) {
_writeQuotedRaw(encodedValueBuffer, offset, length);
} else {
writeRaw(encodedValueBuffer, offset, length);
}
return this;
}
private void _writeQuotedRaw(String value) throws JacksonException
{
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
writeRaw(value);
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
}
private void _writeQuotedRaw(char[] text, int offset, int length) throws JacksonException
{
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
writeRaw(text, offset, length);
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = _quoteChar;
}
@Override
public JsonGenerator writeBoolean(boolean state) throws JacksonException
{
_verifyValueWrite(WRITE_BOOLEAN);
if ((_outputTail + 5) >= _outputEnd) {
_flushBuffer();
}
int ptr = _outputTail;
char[] buf = _outputBuffer;
if (state) {
buf[ptr] = 't';
buf[++ptr] = 'r';
buf[++ptr] = 'u';
buf[++ptr] = 'e';
} else {
buf[ptr] = 'f';
buf[++ptr] = 'a';
buf[++ptr] = 'l';
buf[++ptr] = 's';
buf[++ptr] = 'e';
}
_outputTail = ptr+1;
return this;
}
@Override
public JsonGenerator writeNull() throws JacksonException {
_verifyValueWrite(WRITE_NULL);
_writeNull();
return this;
}
/*
/**********************************************************************
/* Implementations for other methods
/**********************************************************************
*/
@Override
protected final void _verifyValueWrite(String typeMsg) throws JacksonException
{
final int status = _streamWriteContext.writeValue();
if (_prettyPrinter != null) {
// Otherwise, pretty printer knows what to do...
_verifyPrettyValueWrite(typeMsg, status);
return;
}
char c;
switch (status) {
case JsonWriteContext.STATUS_OK_AS_IS:
default:
return;
case JsonWriteContext.STATUS_OK_AFTER_COMMA:
c = ',';
break;
case JsonWriteContext.STATUS_OK_AFTER_COLON:
c = ':';
break;
case JsonWriteContext.STATUS_OK_AFTER_SPACE: // root-value separator
if (_rootValueSeparator != null) {
writeRaw(_rootValueSeparator.getValue());
}
return;
case JsonWriteContext.STATUS_EXPECT_NAME:
_reportCantWriteValueExpectName(typeMsg);
return;
}
if (_outputTail >= _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = c;
}
/*
/**********************************************************************
/* Low-level output handling
/**********************************************************************
*/
@Override
public void flush() throws JacksonException
{
_flushBuffer();
if (_writer != null) {
if (isEnabled(StreamWriteFeature.FLUSH_PASSED_TO_STREAM)) {
try {
_writer.flush();
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
}
}
@Override
protected void _closeInput() throws JacksonException
{
RuntimeException flushFail = null;
try {
if ((_outputBuffer != null)
&& isEnabled(StreamWriteFeature.AUTO_CLOSE_CONTENT)) {
while (true) {
TokenStreamContext ctxt = streamWriteContext();
if (ctxt.inArray()) {
writeEndArray();
} else if (ctxt.inObject()) {
writeEndObject();
} else {
break;
}
}
}
_flushBuffer();
} catch (RuntimeException e) {
// 10-Jun-2022, tatu: [core#764] Need to avoid failing here; may
// still need to close the underlying output stream
flushFail = e;
}
_outputHead = 0;
_outputTail = 0;
/* We are not to call close() on the underlying Reader, unless we "own" it,
* or auto-closing feature is enabled.
* One downside: when using UTF8Writer, underlying buffer(s)
* may not be properly recycled if we don't close the writer.
*/
if (_writer != null) {
try {
if (_ioContext.isResourceManaged() || isEnabled(StreamWriteFeature.AUTO_CLOSE_TARGET)) {
_writer.close();
} else if (isEnabled(StreamWriteFeature.FLUSH_PASSED_TO_STREAM)) {
// If we can't close it, we should at least flush
_writer.flush();
}
} catch (IOException e) {
JacksonException je = _wrapIOFailure(e);
if (flushFail != null) {
je.addSuppressed(flushFail);
}
throw je;
}
}
if (flushFail != null) {
throw flushFail;
}
}
@Override
protected void _releaseBuffers()
{
char[] buf = _outputBuffer;
if (buf != null) {
_outputBuffer = null;
_ioContext.releaseConcatBuffer(buf);
}
buf = _copyBuffer;
if (buf != null) {
_copyBuffer = null;
_ioContext.releaseNameCopyBuffer(buf);
}
}
/*
/**********************************************************************
/* Internal methods, low-level writing; text, default
/**********************************************************************
*/
private void _writeString(String text) throws JacksonException
{
/* One check first: if String won't fit in the buffer, let's
* segment writes. No point in extending buffer to huge sizes
* (like if someone wants to include multi-megabyte base64
* encoded stuff or such)
*/
final int len = text.length();
if (len > _outputEnd) { // Let's reserve space for entity at begin/end
_writeLongString(text);
return;
}
// Ok: we know String will fit in buffer ok
// But do we need to flush first?
if ((_outputTail + len) > _outputEnd) {
_flushBuffer();
}
text.getChars(0, len, _outputBuffer, _outputTail);
if (_characterEscapes != null) {
_writeStringCustom(len);
} else if (_maximumNonEscapedChar != 0) {
_writeStringASCII(len, _maximumNonEscapedChar);
} else {
_writeString2(len);
}
}
private void _writeString2(final int len) throws JacksonException
{
// And then we'll need to verify need for escaping etc:
final int end = _outputTail + len;
final int[] escCodes = _outputEscapes;
final int escLen = escCodes.length;
output_loop:
while (_outputTail < end) {
// Fast loop for chars not needing escaping
escape_loop:
while (true) {
char c = _outputBuffer[_outputTail];
if (c < escLen && escCodes[c] != 0) {
break escape_loop;
}
if (++_outputTail >= end) {
break output_loop;
}
}
// Ok, bumped into something that needs escaping.
/* First things first: need to flush the buffer.
* Inlined, as we don't want to lose tail pointer
*/
int flushLen = (_outputTail - _outputHead);
if (flushLen > 0) {
try {
_writer.write(_outputBuffer, _outputHead, flushLen);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
/* In any case, tail will be the new start, so hopefully
* we have room now.
*/
char c = _outputBuffer[_outputTail++];
_prependOrWriteCharacterEscape(c, escCodes[c]);
}
}
/**
* Method called to write "long strings", strings whose length exceeds
* output buffer length.
*/
private void _writeLongString(String text) throws JacksonException
{
// First things first: let's flush the buffer to get some more room
_flushBuffer();
// Then we can write
final int textLen = text.length();
int offset = 0;
do {
int max = _outputEnd;
int segmentLen = ((offset + max) > textLen)
? (textLen - offset) : max;
text.getChars(offset, offset+segmentLen, _outputBuffer, 0);
if (_characterEscapes != null) {
_writeSegmentCustom(segmentLen);
} else if (_maximumNonEscapedChar != 0) {
_writeSegmentASCII(segmentLen, _maximumNonEscapedChar);
} else {
_writeSegment(segmentLen);
}
offset += segmentLen;
} while (offset < textLen);
}
/**
* Method called to output textual context which has been copied
* to the output buffer prior to call. If any escaping is needed,
* it will also be handled by the method.
*<p>
* Note: when called, textual content to write is within output
* buffer, right after buffered content (if any). That's why only
* length of that text is passed, as buffer and offset are implied.
*/
private void _writeSegment(int end) throws JacksonException
{
final int[] escCodes = _outputEscapes;
final int escLen = escCodes.length;
int ptr = 0;
int start = ptr;
output_loop:
while (ptr < end) {
// Fast loop for chars not needing escaping
char c;
while (true) {
c = _outputBuffer[ptr];
if (c < escLen && escCodes[c] != 0) {
break;
}
if (++ptr >= end) {
break;
}
}
// Ok, bumped into something that needs escaping.
/* First things first: need to flush the buffer.
* Inlined, as we don't want to lose tail pointer
*/
int flushLen = (ptr - start);
if (flushLen > 0) {
try {
_writer.write(_outputBuffer, start, flushLen);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
if (ptr >= end) {
break output_loop;
}
}
++ptr;
// So; either try to prepend (most likely), or write directly:
start = _prependOrWriteCharacterEscape(_outputBuffer, ptr, end, c, escCodes[c]);
}
}
/**
* This method called when the string content is already in
* a char buffer, and need not be copied for processing.
*/
private void _writeString(char[] text, int offset, int len) throws JacksonException
{
if (_characterEscapes != null) {
_writeStringCustom(text, offset, len);
return;
}
if (_maximumNonEscapedChar != 0) {
_writeStringASCII(text, offset, len, _maximumNonEscapedChar);
return;
}
// Let's just find longest spans of non-escapable content, and for
// each see if it makes sense to copy them, or write through
len += offset; // -> len marks the end from now on
final int[] escCodes = _outputEscapes;
final int escLen = escCodes.length;
while (offset < len) {
int start = offset;
while (true) {
char c = text[offset];
if (c < escLen && escCodes[c] != 0) {
break;
}
if (++offset >= len) {
break;
}
}
// Short span? Better just copy it to buffer first:
int newAmount = offset - start;
if (newAmount < SHORT_WRITE) {
// Note: let's reserve room for escaped char (up to 6 chars)
if ((_outputTail + newAmount) > _outputEnd) {
_flushBuffer();
}
if (newAmount > 0) {
System.arraycopy(text, start, _outputBuffer, _outputTail, newAmount);
_outputTail += newAmount;
}
} else { // Nope: better just write through
_flushBuffer();
try {
_writer.write(text, start, newAmount);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
// Was this the end?
if (offset >= len) { // yup
break;
}
// Nope, need to escape the char.
char c = text[offset++];
_appendCharacterEscape(c, escCodes[c]);
}
}
/*
/**********************************************************************
/* Internal methods, low-level writing, text segment
/* with additional escaping (ASCII or such)
/**********************************************************************
*/
/* Same as "_writeString2()", except needs additional escaping
* for subset of characters
*/
private void _writeStringASCII(final int len, final int maxNonEscaped)
throws JacksonException
{
// And then we'll need to verify need for escaping etc:
int end = _outputTail + len;
final int[] escCodes = _outputEscapes;
final int escLimit = Math.min(escCodes.length, maxNonEscaped+1);
int escCode = 0;
output_loop:
while (_outputTail < end) {
char c;
// Fast loop for chars not needing escaping
escape_loop:
while (true) {
c = _outputBuffer[_outputTail];
if (c < escLimit) {
escCode = escCodes[c];
if (escCode != 0) {
break escape_loop;
}
} else if (c > maxNonEscaped) {
escCode = CharacterEscapes.ESCAPE_STANDARD;
break escape_loop;
}
if (++_outputTail >= end) {
break output_loop;
}
}
int flushLen = (_outputTail - _outputHead);
if (flushLen > 0) {
try {
_writer.write(_outputBuffer, _outputHead, flushLen);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
++_outputTail;
_prependOrWriteCharacterEscape(c, escCode);
}
}
private void _writeSegmentASCII(int end, final int maxNonEscaped)
throws JacksonException
{
final int[] escCodes = _outputEscapes;
final int escLimit = Math.min(escCodes.length, maxNonEscaped+1);
int ptr = 0;
int escCode = 0;
int start = ptr;
output_loop:
while (ptr < end) {
// Fast loop for chars not needing escaping
char c;
while (true) {
c = _outputBuffer[ptr];
if (c < escLimit) {
escCode = escCodes[c];
if (escCode != 0) {
break;
}
} else if (c > maxNonEscaped) {
escCode = CharacterEscapes.ESCAPE_STANDARD;
break;
}
if (++ptr >= end) {
break;
}
}
int flushLen = (ptr - start);
if (flushLen > 0) {
try {
_writer.write(_outputBuffer, start, flushLen);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
if (ptr >= end) {
break output_loop;
}
}
++ptr;
start = _prependOrWriteCharacterEscape(_outputBuffer, ptr, end, c, escCode);
}
}
private void _writeStringASCII(char[] text, int offset, int len,
final int maxNonEscaped)
throws JacksonException
{
len += offset; // -> len marks the end from now on
final int[] escCodes = _outputEscapes;
final int escLimit = Math.min(escCodes.length, maxNonEscaped+1);
int escCode = 0;
while (offset < len) {
int start = offset;
char c;
while (true) {
c = text[offset];
if (c < escLimit) {
escCode = escCodes[c];
if (escCode != 0) {
break;
}
} else if (c > maxNonEscaped) {
escCode = CharacterEscapes.ESCAPE_STANDARD;
break;
}
if (++offset >= len) {
break;
}
}
// Short span? Better just copy it to buffer first:
int newAmount = offset - start;
if (newAmount < SHORT_WRITE) {
// Note: let's reserve room for escaped char (up to 6 chars)
if ((_outputTail + newAmount) > _outputEnd) {
_flushBuffer();
}
if (newAmount > 0) {
System.arraycopy(text, start, _outputBuffer, _outputTail, newAmount);
_outputTail += newAmount;
}
} else { // Nope: better just write through
_flushBuffer();
try {
_writer.write(text, start, newAmount);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
// Was this the end?
if (offset >= len) { // yup
break;
}
// Nope, need to escape the char.
++offset;
_appendCharacterEscape(c, escCode);
}
}
/*
/**********************************************************************
/* Internal methods, low-level writing, text segment
/* with custom escaping (possibly coupling with ASCII limits)
/**********************************************************************
*/
/* Same as "_writeString2()", except needs additional escaping
* for subset of characters
*/
private void _writeStringCustom(final int len)
throws JacksonException
{
// And then we'll need to verify need for escaping etc:
int end = _outputTail + len;
final int[] escCodes = _outputEscapes;
final int maxNonEscaped = (_maximumNonEscapedChar < 1) ? 0xFFFF : _maximumNonEscapedChar;
final int escLimit = Math.min(escCodes.length, maxNonEscaped+1);
int escCode = 0;
final CharacterEscapes customEscapes = _characterEscapes;
output_loop:
while (_outputTail < end) {
char c;
// Fast loop for chars not needing escaping
escape_loop:
while (true) {
c = _outputBuffer[_outputTail];
if (c < escLimit) {
escCode = escCodes[c];
if (escCode != 0) {
break escape_loop;
}
} else if (c > maxNonEscaped) {
escCode = CharacterEscapes.ESCAPE_STANDARD;
break escape_loop;
} else {
if ((_currentEscape = customEscapes.getEscapeSequence(c)) != null) {
escCode = CharacterEscapes.ESCAPE_CUSTOM;
break escape_loop;
}
}
if (++_outputTail >= end) {
break output_loop;
}
}
int flushLen = (_outputTail - _outputHead);
if (flushLen > 0) {
try {
_writer.write(_outputBuffer, _outputHead, flushLen);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
++_outputTail;
_prependOrWriteCharacterEscape(c, escCode);
}
}
private void _writeSegmentCustom(int end)
throws JacksonException
{
final int[] escCodes = _outputEscapes;
final int maxNonEscaped = (_maximumNonEscapedChar < 1) ? 0xFFFF : _maximumNonEscapedChar;
final int escLimit = Math.min(escCodes.length, maxNonEscaped+1);
final CharacterEscapes customEscapes = _characterEscapes;
int ptr = 0;
int escCode = 0;
int start = ptr;
output_loop:
while (ptr < end) {
// Fast loop for chars not needing escaping
char c;
while (true) {
c = _outputBuffer[ptr];
if (c < escLimit) {
escCode = escCodes[c];
if (escCode != 0) {
break;
}
} else if (c > maxNonEscaped) {
escCode = CharacterEscapes.ESCAPE_STANDARD;
break;
} else {
if ((_currentEscape = customEscapes.getEscapeSequence(c)) != null) {
escCode = CharacterEscapes.ESCAPE_CUSTOM;
break;
}
}
if (++ptr >= end) {
break;
}
}
int flushLen = (ptr - start);
if (flushLen > 0) {
try {
_writer.write(_outputBuffer, start, flushLen);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
if (ptr >= end) {
break output_loop;
}
}
++ptr;
start = _prependOrWriteCharacterEscape(_outputBuffer, ptr, end, c, escCode);
}
}
private void _writeStringCustom(char[] text, int offset, int len)
throws JacksonException
{
len += offset; // -> len marks the end from now on
final int[] escCodes = _outputEscapes;
final int maxNonEscaped = (_maximumNonEscapedChar < 1) ? 0xFFFF : _maximumNonEscapedChar;
final int escLimit = Math.min(escCodes.length, maxNonEscaped+1);
final CharacterEscapes customEscapes = _characterEscapes;
int escCode = 0;
while (offset < len) {
int start = offset;
char c;
while (true) {
c = text[offset];
if (c < escLimit) {
escCode = escCodes[c];
if (escCode != 0) {
break;
}
} else if (c > maxNonEscaped) {
escCode = CharacterEscapes.ESCAPE_STANDARD;
break;
} else {
if ((_currentEscape = customEscapes.getEscapeSequence(c)) != null) {
escCode = CharacterEscapes.ESCAPE_CUSTOM;
break;
}
}
if (++offset >= len) {
break;
}
}
// Short span? Better just copy it to buffer first:
int newAmount = offset - start;
if (newAmount < SHORT_WRITE) {
// Note: let's reserve room for escaped char (up to 6 chars)
if ((_outputTail + newAmount) > _outputEnd) {
_flushBuffer();
}
if (newAmount > 0) {
System.arraycopy(text, start, _outputBuffer, _outputTail, newAmount);
_outputTail += newAmount;
}
} else { // Nope: better just write through
_flushBuffer();
try {
_writer.write(text, start, newAmount);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
// Was this the end?
if (offset >= len) { // yup
break;
}
// Nope, need to escape the char.
++offset;
_appendCharacterEscape(c, escCode);
}
}
/*
/**********************************************************************
/* Internal methods, low-level writing; binary
/**********************************************************************
*/
protected final void _writeBinary(Base64Variant b64variant, byte[] input, int inputPtr, final int inputEnd)
throws JacksonException
{
// Encoding is by chunks of 3 input, 4 output chars, so:
int safeInputEnd = inputEnd - 3;
// Let's also reserve room for possible (and quoted) lf char each round
int safeOutputEnd = _outputEnd - 6;
int chunksBeforeLF = b64variant.getMaxLineLength() >> 2;
// Ok, first we loop through all full triplets of data:
while (inputPtr <= safeInputEnd) {
if (_outputTail > safeOutputEnd) { // need to flush
_flushBuffer();
}
// First, mash 3 bytes into lsb of 32-bit int
int b24 = (input[inputPtr++]) << 8;
b24 |= (input[inputPtr++]) & 0xFF;
b24 = (b24 << 8) | ((input[inputPtr++]) & 0xFF);
_outputTail = b64variant.encodeBase64Chunk(b24, _outputBuffer, _outputTail);
if (--chunksBeforeLF <= 0) {
// note: must quote in JSON value
_outputBuffer[_outputTail++] = '\\';
_outputBuffer[_outputTail++] = 'n';
chunksBeforeLF = b64variant.getMaxLineLength() >> 2;
}
}
// And then we may have 1 or 2 leftover bytes to encode
int inputLeft = inputEnd - inputPtr; // 0, 1 or 2
if (inputLeft > 0) { // yes, but do we have room for output?
if (_outputTail > safeOutputEnd) { // don't really need 6 bytes but...
_flushBuffer();
}
int b24 = (input[inputPtr++]) << 16;
if (inputLeft == 2) {
b24 |= ((input[inputPtr++]) & 0xFF) << 8;
}
_outputTail = b64variant.encodeBase64Partial(b24, inputLeft, _outputBuffer, _outputTail);
}
}
// write-method called when length is definitely known
protected final int _writeBinary(Base64Variant b64variant,
InputStream data, byte[] readBuffer, int bytesLeft)
throws JacksonException
{
int inputPtr = 0;
int inputEnd = 0;
int lastFullOffset = -3;
// Let's also reserve room for possible (and quoted) lf char each round
int safeOutputEnd = _outputEnd - 6;
int chunksBeforeLF = b64variant.getMaxLineLength() >> 2;
while (bytesLeft > 2) { // main loop for full triplets
if (inputPtr > lastFullOffset) {
inputEnd = _readMore(data, readBuffer, inputPtr, inputEnd, bytesLeft);
inputPtr = 0;
if (inputEnd < 3) { // required to try to read to have at least 3 bytes
break;
}
lastFullOffset = inputEnd-3;
}
if (_outputTail > safeOutputEnd) { // need to flush
_flushBuffer();
}
int b24 = (readBuffer[inputPtr++]) << 8;
b24 |= (readBuffer[inputPtr++]) & 0xFF;
b24 = (b24 << 8) | ((readBuffer[inputPtr++]) & 0xFF);
bytesLeft -= 3;
_outputTail = b64variant.encodeBase64Chunk(b24, _outputBuffer, _outputTail);
if (--chunksBeforeLF <= 0) {
_outputBuffer[_outputTail++] = '\\';
_outputBuffer[_outputTail++] = 'n';
chunksBeforeLF = b64variant.getMaxLineLength() >> 2;
}
}
// And then we may have 1 or 2 leftover bytes to encode
if (bytesLeft > 0) {
inputEnd = _readMore(data, readBuffer, inputPtr, inputEnd, bytesLeft);
inputPtr = 0;
if (inputEnd > 0) { // yes, but do we have room for output?
if (_outputTail > safeOutputEnd) { // don't really need 6 bytes but...
_flushBuffer();
}
int b24 = (readBuffer[inputPtr++]) << 16;
int amount;
if (inputPtr < inputEnd) {
b24 |= ((readBuffer[inputPtr]) & 0xFF) << 8;
amount = 2;
} else {
amount = 1;
}
_outputTail = b64variant.encodeBase64Partial(b24, amount, _outputBuffer, _outputTail);
bytesLeft -= amount;
}
}
return bytesLeft;
}
// write method when length is unknown
protected final int _writeBinary(Base64Variant b64variant,
InputStream data, byte[] readBuffer)
throws JacksonException
{
int inputPtr = 0;
int inputEnd = 0;
int lastFullOffset = -3;
int bytesDone = 0;
// Let's also reserve room for possible (and quoted) LF char each round
int safeOutputEnd = _outputEnd - 6;
int chunksBeforeLF = b64variant.getMaxLineLength() >> 2;
// Ok, first we loop through all full triplets of data:
while (true) {
if (inputPtr > lastFullOffset) { // need to load more
inputEnd = _readMore(data, readBuffer, inputPtr, inputEnd, readBuffer.length);
inputPtr = 0;
if (inputEnd < 3) { // required to try to read to have at least 3 bytes
break;
}
lastFullOffset = inputEnd-3;
}
if (_outputTail > safeOutputEnd) { // need to flush
_flushBuffer();
}
// First, mash 3 bytes into lsb of 32-bit int
int b24 = (readBuffer[inputPtr++]) << 8;
b24 |= (readBuffer[inputPtr++]) & 0xFF;
b24 = (b24 << 8) | ((readBuffer[inputPtr++]) & 0xFF);
bytesDone += 3;
_outputTail = b64variant.encodeBase64Chunk(b24, _outputBuffer, _outputTail);
if (--chunksBeforeLF <= 0) {
_outputBuffer[_outputTail++] = '\\';
_outputBuffer[_outputTail++] = 'n';
chunksBeforeLF = b64variant.getMaxLineLength() >> 2;
}
}
// And then we may have 1 or 2 leftover bytes to encode
if (inputPtr < inputEnd) { // yes, but do we have room for output?
if (_outputTail > safeOutputEnd) { // don't really need 6 bytes but...
_flushBuffer();
}
int b24 = (readBuffer[inputPtr++]) << 16;
int amount = 1;
if (inputPtr < inputEnd) {
b24 |= ((readBuffer[inputPtr]) & 0xFF) << 8;
amount = 2;
}
bytesDone += amount;
_outputTail = b64variant.encodeBase64Partial(b24, amount, _outputBuffer, _outputTail);
}
return bytesDone;
}
private int _readMore(InputStream in,
byte[] readBuffer, int inputPtr, int inputEnd,
int maxRead) throws JacksonException
{
// anything to shift to front?
int i = 0;
while (inputPtr < inputEnd) {
readBuffer[i++] = readBuffer[inputPtr++];
}
inputPtr = 0;
inputEnd = i;
maxRead = Math.min(maxRead, readBuffer.length);
do {
int length = maxRead - inputEnd;
if (length == 0) {
break;
}
int count;
try {
count = in.read(readBuffer, inputEnd, length);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
if (count < 0) {
return inputEnd;
}
inputEnd += count;
} while (inputEnd < 3);
return inputEnd;
}
/*
/**********************************************************************
/* Internal methods, low-level writing, other
/**********************************************************************
*/
private final void _writeNull() throws JacksonException
{
if ((_outputTail + 4) >= _outputEnd) {
_flushBuffer();
}
int ptr = _outputTail;
char[] buf = _outputBuffer;
buf[ptr] = 'n';
buf[++ptr] = 'u';
buf[++ptr] = 'l';
buf[++ptr] = 'l';
_outputTail = ptr+1;
}
/*
/**********************************************************************
/* Internal methods, low-level writing, escapes
/**********************************************************************
*/
/**
* Method called to try to either prepend character escape at front of
* given buffer; or if not possible, to write it out directly.
* Uses head and tail pointers (and updates as necessary)
*/
private void _prependOrWriteCharacterEscape(char ch, int escCode)
throws JacksonException
{
if (escCode >= 0) { // \\N (2 char)
if (_outputTail >= 2) { // fits, just prepend
int ptr = _outputTail - 2;
_outputHead = ptr;
_outputBuffer[ptr++] = '\\';
_outputBuffer[ptr] = (char) escCode;
return;
}
// won't fit, write
char[] buf = _entityBuffer;
if (buf == null) {
buf = _allocateEntityBuffer();
}
_outputHead = _outputTail;
buf[1] = (char) escCode;
try {
_writer.write(buf, 0, 2);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
return;
}
if (escCode != CharacterEscapes.ESCAPE_CUSTOM) { // std, \\uXXXX
char[] HEX_CHARS = getHexChars();
if (_outputTail >= 6) { // fits, prepend to buffer
char[] buf = _outputBuffer;
int ptr = _outputTail - 6;
_outputHead = ptr;
buf[ptr] = '\\';
buf[++ptr] = 'u';
// We know it's a control char, so only the last 2 chars are non-0
if (ch > 0xFF) { // beyond 8 bytes
int hi = (ch >> 8) & 0xFF;
buf[++ptr] = HEX_CHARS[hi >> 4];
buf[++ptr] = HEX_CHARS[hi & 0xF];
ch &= 0xFF;
} else {
buf[++ptr] = '0';
buf[++ptr] = '0';
}
buf[++ptr] = HEX_CHARS[ch >> 4];
buf[++ptr] = HEX_CHARS[ch & 0xF];
return;
}
// won't fit, flush and write
char[] buf = _entityBuffer;
if (buf == null) {
buf = _allocateEntityBuffer();
}
_outputHead = _outputTail;
try {
if (ch > 0xFF) { // beyond 8 bytes
int hi = (ch >> 8) & 0xFF;
int lo = ch & 0xFF;
buf[10] = HEX_CHARS[hi >> 4];
buf[11] = HEX_CHARS[hi & 0xF];
buf[12] = HEX_CHARS[lo >> 4];
buf[13] = HEX_CHARS[lo & 0xF];
_writer.write(buf, 8, 6);
} else { // We know it's a control char, so only the last 2 chars are non-0
buf[6] = HEX_CHARS[ch >> 4];
buf[7] = HEX_CHARS[ch & 0xF];
_writer.write(buf, 2, 6);
}
} catch (IOException e) {
throw _wrapIOFailure(e);
}
return;
}
String escape;
if (_currentEscape == null) {
escape = _characterEscapes.getEscapeSequence(ch).getValue();
} else {
escape = _currentEscape.getValue();
_currentEscape = null;
}
int len = escape.length();
if (_outputTail >= len) { // fits in, prepend
int ptr = _outputTail - len;
_outputHead = ptr;
escape.getChars(0, len, _outputBuffer, ptr);
return;
}
// won't fit, write separately
_outputHead = _outputTail;
try {
_writer.write(escape);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
/**
* Method called to try to either prepend character escape at front of
* given buffer; or if not possible, to write it out directly.
*
* @return Pointer to start of prepended entity (if prepended); or 'ptr'
* if not.
*/
private int _prependOrWriteCharacterEscape(char[] buffer, int ptr, int end,
char ch, int escCode)
throws JacksonException
{
if (escCode >= 0) { // \\N (2 char)
if (ptr > 1 && ptr < end) { // fits, just prepend
ptr -= 2;
buffer[ptr] = '\\';
buffer[ptr+1] = (char) escCode;
} else { // won't fit, write
char[] ent = _entityBuffer;
if (ent == null) {
ent = _allocateEntityBuffer();
}
ent[1] = (char) escCode;
try {
_writer.write(ent, 0, 2);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
return ptr;
}
if (escCode != CharacterEscapes.ESCAPE_CUSTOM) { // std, \\uXXXX
char[] HEX_CHARS = getHexChars();
if (ptr > 5 && ptr < end) { // fits, prepend to buffer
ptr -= 6;
buffer[ptr++] = '\\';
buffer[ptr++] = 'u';
// We know it's a control char, so only the last 2 chars are non-0
if (ch > 0xFF) { // beyond 8 bytes
int hi = (ch >> 8) & 0xFF;
buffer[ptr++] = HEX_CHARS[hi >> 4];
buffer[ptr++] = HEX_CHARS[hi & 0xF];
ch &= 0xFF;
} else {
buffer[ptr++] = '0';
buffer[ptr++] = '0';
}
buffer[ptr++] = HEX_CHARS[ch >> 4];
buffer[ptr] = HEX_CHARS[ch & 0xF];
ptr -= 5;
} else {
// won't fit, flush and write
char[] ent = _entityBuffer;
if (ent == null) {
ent = _allocateEntityBuffer();
}
_outputHead = _outputTail;
try {
if (ch > 0xFF) { // beyond 8 bytes
int hi = (ch >> 8) & 0xFF;
int lo = ch & 0xFF;
ent[10] = HEX_CHARS[hi >> 4];
ent[11] = HEX_CHARS[hi & 0xF];
ent[12] = HEX_CHARS[lo >> 4];
ent[13] = HEX_CHARS[lo & 0xF];
_writer.write(ent, 8, 6);
} else { // We know it's a control char, so only the last 2 chars are non-0
ent[6] = HEX_CHARS[ch >> 4];
ent[7] = HEX_CHARS[ch & 0xF];
_writer.write(ent, 2, 6);
}
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
return ptr;
}
String escape;
if (_currentEscape == null) {
escape = _characterEscapes.getEscapeSequence(ch).getValue();
} else {
escape = _currentEscape.getValue();
_currentEscape = null;
}
int len = escape.length();
if (ptr >= len && ptr < end) { // fits in, prepend
ptr -= len;
escape.getChars(0, len, buffer, ptr);
} else { // won't fit, write separately
try {
_writer.write(escape);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
return ptr;
}
/**
* Method called to append escape sequence for given character, at the
* end of standard output buffer; or if not possible, write out directly.
*/
private void _appendCharacterEscape(char ch, int escCode)
throws JacksonException
{
if (escCode >= 0) { // \\N (2 char)
if ((_outputTail + 2) > _outputEnd) {
_flushBuffer();
}
_outputBuffer[_outputTail++] = '\\';
_outputBuffer[_outputTail++] = (char) escCode;
return;
}
if (escCode != CharacterEscapes.ESCAPE_CUSTOM) { // std, \\uXXXX
if ((_outputTail + 5) >= _outputEnd) {
_flushBuffer();
}
int ptr = _outputTail;
char[] buf = _outputBuffer;
char[] HEX_CHARS = getHexChars();
buf[ptr++] = '\\';
buf[ptr++] = 'u';
// We know it's a control char, so only the last 2 chars are non-0
if (ch > 0xFF) { // beyond 8 bytes
int hi = (ch >> 8) & 0xFF;
buf[ptr++] = HEX_CHARS[hi >> 4];
buf[ptr++] = HEX_CHARS[hi & 0xF];
ch &= 0xFF;
} else {
buf[ptr++] = '0';
buf[ptr++] = '0';
}
buf[ptr++] = HEX_CHARS[ch >> 4];
buf[ptr++] = HEX_CHARS[ch & 0xF];
_outputTail = ptr;
return;
}
String escape;
if (_currentEscape == null) {
escape = _characterEscapes.getEscapeSequence(ch).getValue();
} else {
escape = _currentEscape.getValue();
_currentEscape = null;
}
int len = escape.length();
if ((_outputTail + len) > _outputEnd) {
_flushBuffer();
if (len > _outputEnd) { // very very long escape; unlikely but theoretically possible
try {
_writer.write(escape);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
return;
}
}
escape.getChars(0, len, _outputBuffer, _outputTail);
_outputTail += len;
}
private char[] _allocateEntityBuffer()
{
char[] buf = new char[14];
// first 2 chars, non-numeric escapes (like \n)
buf[0] = '\\';
// next 6; 8-bit escapes (control chars mostly)
buf[2] = '\\';
buf[3] = 'u';
buf[4] = '0';
buf[5] = '0';
// last 6, beyond 8 bits
buf[8] = '\\';
buf[9] = 'u';
_entityBuffer = buf;
return buf;
}
private char[] _allocateCopyBuffer() {
if (_copyBuffer == null) {
_copyBuffer = _ioContext.allocNameCopyBuffer(2000);
}
return _copyBuffer;
}
protected void _flushBuffer() throws JacksonException
{
int len = _outputTail - _outputHead;
if (len > 0) {
int offset = _outputHead;
_outputTail = _outputHead = 0;
try {
_writer.write(_outputBuffer, offset, len);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
}
}
|
WriterBasedJsonGenerator
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderFactory.java
|
{
"start": 4050,
"end": 4133
}
|
class ____ create BlockReader implementations.
*/
@InterfaceAudience.Private
public
|
to
|
java
|
qos-ch__slf4j
|
slf4j-api/src/main/java/org/slf4j/MarkerFactory.java
|
{
"start": 1696,
"end": 1761
}
|
class ____ static.
*
* @author Ceki Gülcü
*/
public
|
are
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/MethodCanBeStaticTest.java
|
{
"start": 3393,
"end": 3985
}
|
class ____ {
private static final int FOO = 1;
private static int a() {
return FOO;
}
private static int b() {
return a();
}
private static int c() {
return b();
}
private static int d() {
return c();
}
}
""")
.doTest();
}
@Test
public void positiveChain_oneFix() {
testHelper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bootstrap/binding/annotations/override/InheritedAttributeOverridingTest.java
|
{
"start": 2039,
"end": 2356
}
|
class ____ {
private Integer id;
private String name;
@Id
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Entity( name = "B" )
public static
|
A
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/factory/DefaultListableBeanFactoryTests.java
|
{
"start": 145241,
"end": 145328
}
|
class ____ implements PriorityService {
}
@Priority(5)
private static
|
PriorityService3
|
java
|
google__guice
|
core/src/com/google/inject/internal/aop/ChildClassDefiner.java
|
{
"start": 1391,
"end": 2519
}
|
class ____ {
static final LoadingCache<ClassLoader, ChildLoader> CHILD_LOADER_CACHE =
CacheBuilder.newBuilder()
.weakKeys()
.weakValues()
.build(CacheLoader.from(ChildClassDefiner::childLoader));
}
@Override
public Class<?> define(Class<?> hostClass, byte[] bytecode) throws Exception {
ClassLoader hostLoader = hostClass.getClassLoader();
ChildLoader childLoader =
hostLoader != null
? ChildLoaderCacheHolder.CHILD_LOADER_CACHE.get(hostLoader)
: SystemChildLoaderHolder.SYSTEM_CHILD_LOADER;
return childLoader.defineInChild(bytecode);
}
/** Utility method to remove doPrivileged ambiguity */
static <T> T doPrivileged(PrivilegedAction<T> action) {
return AccessController.doPrivileged(action);
}
/** Creates a child loader for the given host loader */
static ChildLoader childLoader(ClassLoader hostLoader) {
logger.fine("Creating a child loader for " + hostLoader);
return doPrivileged(() -> hostLoader == null ? new ChildLoader() : new ChildLoader(hostLoader));
}
/** Custom
|
ChildLoaderCacheHolder
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/CxfComponentBuilderFactory.java
|
{
"start": 7911,
"end": 9305
}
|
class ____
extends AbstractComponentBuilder<CxfComponent>
implements CxfComponentBuilder {
@Override
protected CxfComponent buildConcreteComponent() {
return new CxfComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "bridgeErrorHandler": ((CxfComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "lazyStartProducer": ((CxfComponent) component).setLazyStartProducer((boolean) value); return true;
case "synchronous": ((CxfComponent) component).setSynchronous((boolean) value); return true;
case "allowStreaming": ((CxfComponent) component).setAllowStreaming((java.lang.Boolean) value); return true;
case "autowiredEnabled": ((CxfComponent) component).setAutowiredEnabled((boolean) value); return true;
case "headerFilterStrategy": ((CxfComponent) component).setHeaderFilterStrategy((org.apache.camel.spi.HeaderFilterStrategy) value); return true;
case "useGlobalSslContextParameters": ((CxfComponent) component).setUseGlobalSslContextParameters((boolean) value); return true;
default: return false;
}
}
}
}
|
CxfComponentBuilderImpl
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/commons/util/AnnotationUtilsTests.java
|
{
"start": 29068,
"end": 29161
}
|
class ____ extends MultiComposedExtensionClass {
}
|
ContainerPlusSubMultiComposedExtensionClass
|
java
|
junit-team__junit5
|
junit-platform-commons/src/main/java/org/junit/platform/commons/util/ClassUtils.java
|
{
"start": 1183,
"end": 1282
}
|
class ____ name should be retrieved, potentially
* {@code null}
* @return the fully qualified
|
whose
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/Hashing.java
|
{
"start": 980,
"end": 2568
}
|
class ____ {
private Hashing() {}
/*
* These should be ints, but we need to use longs to force GWT to do the multiplications with
* enough precision.
*/
private static final long C1 = 0xcc9e2d51;
private static final long C2 = 0x1b873593;
/*
* This method was rewritten in Java from an intermediate step of the Murmur hash function in
* http://code.google.com/p/smhasher/source/browse/trunk/MurmurHash3.cpp, which contained the
* following header:
*
* MurmurHash3 was written by Austin Appleby, and is placed in the public domain. The author
* hereby disclaims copyright to this source code.
*/
static int smear(int hashCode) {
return (int) (C2 * Integer.rotateLeft((int) (hashCode * C1), 15));
}
static int smearedHash(@Nullable Object o) {
return smear((o == null) ? 0 : o.hashCode());
}
private static final int MAX_TABLE_SIZE = Ints.MAX_POWER_OF_TWO;
static int closedTableSize(int expectedEntries, double loadFactor) {
// Get the recommended table size.
// Round down to the nearest power of 2.
expectedEntries = max(expectedEntries, 2);
int tableSize = Integer.highestOneBit(expectedEntries);
// Check to make sure that we will not exceed the maximum load factor.
if (expectedEntries > (int) (loadFactor * tableSize)) {
tableSize <<= 1;
return (tableSize > 0) ? tableSize : MAX_TABLE_SIZE;
}
return tableSize;
}
static boolean needsResizing(int size, int tableSize, double loadFactor) {
return size > loadFactor * tableSize && tableSize < MAX_TABLE_SIZE;
}
}
|
Hashing
|
java
|
apache__flink
|
flink-python/src/main/java/org/apache/beam/runners/fnexecution/control/DefaultJobBundleFactory.java
|
{
"start": 39238,
"end": 39869
}
|
class ____ {
abstract Builder setControlServer(GrpcFnServer<FnApiControlClientPoolService> server);
abstract Builder setLoggingServer(GrpcFnServer<GrpcLoggingService> server);
abstract Builder setRetrievalServer(GrpcFnServer<ArtifactRetrievalService> server);
abstract Builder setProvisioningServer(GrpcFnServer<StaticGrpcProvisionService> server);
abstract Builder setDataServer(GrpcFnServer<GrpcDataService> server);
abstract Builder setStateServer(GrpcFnServer<GrpcStateService> server);
abstract ServerInfo build();
}
}
}
|
Builder
|
java
|
elastic__elasticsearch
|
libs/native/src/main/java/org/elasticsearch/nativeaccess/AbstractNativeAccess.java
|
{
"start": 781,
"end": 2100
}
|
class ____ implements NativeAccess {
protected static final Logger logger = LogManager.getLogger(NativeAccess.class);
private final String name;
private final JavaLibrary javaLib;
private final Zstd zstd;
protected boolean isMemoryLocked = false;
protected ExecSandboxState execSandboxState = ExecSandboxState.NONE;
protected AbstractNativeAccess(String name, NativeLibraryProvider libraryProvider) {
this.name = name;
this.javaLib = libraryProvider.getLibrary(JavaLibrary.class);
this.zstd = new Zstd(libraryProvider.getLibrary(ZstdLibrary.class));
}
String getName() {
return name;
}
@Override
public Systemd systemd() {
return null;
}
@Override
public Zstd getZstd() {
return zstd;
}
@Override
public CloseableByteBuffer newSharedBuffer(int len) {
assert len > 0;
return javaLib.newSharedBuffer(len);
}
@Override
public CloseableByteBuffer newConfinedBuffer(int len) {
assert len > 0;
return javaLib.newConfinedBuffer(len);
}
@Override
public boolean isMemoryLocked() {
return isMemoryLocked;
}
@Override
public ExecSandboxState getExecSandboxState() {
return execSandboxState;
}
}
|
AbstractNativeAccess
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/parallel/ResourceLocksProvider.java
|
{
"start": 1546,
"end": 1732
}
|
class ____ annotated with
* {@code @ResourceLock(providers)}.
*
* @apiNote Adding {@linkplain Lock a shared resource} via this method has
* the same semantics as annotating a test
|
is
|
java
|
apache__camel
|
dsl/camel-xml-io-dsl/src/test/java/org/apache/camel/dsl/xml/io/beans/MyBean.java
|
{
"start": 854,
"end": 1230
}
|
class ____ {
private String field1;
private String field2;
private int age;
public MyBean(String field1, String field2, int age) {
this.field1 = field1;
this.field2 = field2;
this.age = age;
}
public String hi(String body) {
return body + " " + field1 + ". I am " + field2 + " and " + age + " years old!";
}
}
|
MyBean
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/DoubleBraceInitializationTest.java
|
{
"start": 7363,
"end": 7921
}
|
class ____ {
static final ImmutableCollection<Integer> a = ImmutableList.of(1, 2);
static final ImmutableCollection<Integer> b = ImmutableList.of(1, 2);
Deque<Integer> c = new ArrayDeque<Integer>(ImmutableList.of(1, 2));
}
""")
.doTest();
}
@Test
public void map() {
testHelper
.addInputLines(
"in/Test.java",
"""
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
|
Test
|
java
|
bumptech__glide
|
samples/imgur/src/main/java/com/bumptech/glide/samples/imgur/api/ImgurService.java
|
{
"start": 690,
"end": 804
}
|
enum ____ {
hot,
top,
user
}
/** The sort order for content within a particular section. */
|
Section
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/deser/asm/TestASM.java
|
{
"start": 323,
"end": 1334
}
|
class ____ extends TestCase {
public void test_asm() throws Exception {
String text = JSON.toJSONString(EishayEncode.mediaContent);
System.out.println(text);
}
public void test_0() throws Exception {
Department department = new Department();
Person person = new Person();
person.setId(123);
person.setName("刘伟加");
person.setAge(40);
person.setSalary(new BigDecimal("123456"));
person.getValues().add("A");
person.getValues().add("B");
person.getValues().add("C");
department.getPersons().add(person);
department.getPersons().add(new Person());
department.getPersons().add(new Person());
{
String text = JSON.toJSONString(department);
System.out.println(text);
}
{
String text = JSON.toJSONString(department, SerializerFeature.WriteMapNullValue);
System.out.println(text);
}
}
public static
|
TestASM
|
java
|
apache__camel
|
core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedThrottlingExceptionRoutePolicy.java
|
{
"start": 1273,
"end": 4210
}
|
class ____ extends ManagedService
implements ManagedThrottlingExceptionRoutePolicyMBean {
private final ThrottlingExceptionRoutePolicy policy;
public ManagedThrottlingExceptionRoutePolicy(CamelContext context, ThrottlingExceptionRoutePolicy policy) {
super(context, policy);
this.policy = policy;
}
public ThrottlingExceptionRoutePolicy getPolicy() {
return policy;
}
@Override
public String[] getExceptionTypes() {
if (policy.getThrottledExceptions() != null) {
List<String> types = policy.getThrottledExceptions().stream().map(Class::getName)
.toList();
return types.toArray(new String[0]);
} else {
return null;
}
}
@Override
public Long getHalfOpenAfter() {
return getPolicy().getHalfOpenAfter();
}
@Override
public void setHalfOpenAfter(Long milliseconds) {
getPolicy().setHalfOpenAfter(milliseconds);
}
@Override
public Long getFailureWindow() {
return getPolicy().getFailureWindow();
}
@Override
public void setFailureWindow(Long milliseconds) {
getPolicy().setFailureWindow(milliseconds);
}
@Override
public Integer getFailureThreshold() {
return getPolicy().getFailureThreshold();
}
@Override
public void setFailureThreshold(Integer numberOfFailures) {
getPolicy().setFailureThreshold(numberOfFailures);
}
@Override
public boolean getKeepOpen() {
return getPolicy().getKeepOpen();
}
@Override
public void setKeepOpen(boolean keepOpen) {
getPolicy().setKeepOpen(keepOpen);
}
@Override
public String getStateLoggingLevel() {
return getPolicy().getStateLoggingLevel().name();
}
@Override
public void setStateLoggingLevel(String stateLoggingLevel) {
getPolicy().setStateLoggingLevel(stateLoggingLevel);
}
@Override
public String currentState() {
return getPolicy().dumpState();
}
@Override
public String getHalfOpenHandlerName() {
ThrottlingExceptionHalfOpenHandler obj = getPolicy().getHalfOpenHandler();
if (obj != null) {
return obj.getClass().getSimpleName();
} else {
return "";
}
}
@Override
public Integer getCurrentFailures() {
return getPolicy().getFailures();
}
@Override
public Long getLastFailure() {
if (getPolicy().getLastFailure() == 0) {
return 0L;
} else {
return System.currentTimeMillis() - getPolicy().getLastFailure();
}
}
@Override
public Long getOpenAt() {
if (getPolicy().getOpenedAt() == 0) {
return 0L;
} else {
return System.currentTimeMillis() - getPolicy().getOpenedAt();
}
}
}
|
ManagedThrottlingExceptionRoutePolicy
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/state/internals/TimestampedWindowStoreBuilder.java
|
{
"start": 4976,
"end": 8922
}
|
class ____
extends WrappedStateStore<WindowStore<Bytes, byte[]>, Bytes, byte[]>
implements WindowStore<Bytes, byte[]>, TimestampedBytesStore {
private InMemoryTimestampedWindowStoreMarker(final WindowStore<Bytes, byte[]> wrapped) {
super(wrapped);
if (wrapped.persistent()) {
throw new IllegalArgumentException("Provided store must not be a persistent store, but it is.");
}
}
@Override
public void put(final Bytes key,
final byte[] value,
final long windowStartTimestamp) {
wrapped().put(key, value, windowStartTimestamp);
}
@Override
public byte[] fetch(final Bytes key,
final long time) {
return wrapped().fetch(key, time);
}
@Override
public WindowStoreIterator<byte[]> fetch(final Bytes key,
final long timeFrom,
final long timeTo) {
return wrapped().fetch(key, timeFrom, timeTo);
}
@Override
public WindowStoreIterator<byte[]> backwardFetch(final Bytes key,
final long timeFrom,
final long timeTo) {
return wrapped().backwardFetch(key, timeFrom, timeTo);
}
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> fetch(final Bytes keyFrom,
final Bytes keyTo,
final long timeFrom,
final long timeTo) {
return wrapped().fetch(keyFrom, keyTo, timeFrom, timeTo);
}
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> backwardFetch(final Bytes keyFrom,
final Bytes keyTo,
final long timeFrom,
final long timeTo) {
return wrapped().backwardFetch(keyFrom, keyTo, timeFrom, timeTo);
}
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> fetchAll(final long timeFrom,
final long timeTo) {
return wrapped().fetchAll(timeFrom, timeTo);
}
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> backwardFetchAll(final long timeFrom,
final long timeTo) {
return wrapped().backwardFetchAll(timeFrom, timeTo);
}
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> all() {
return wrapped().all();
}
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> backwardAll() {
return wrapped().backwardAll();
}
@Override
public <R> QueryResult<R> query(final Query<R> query,
final PositionBound positionBound,
final QueryConfig config) {
final long start = config.isCollectExecutionInfo() ? System.nanoTime() : -1L;
final QueryResult<R> result = wrapped().query(query, positionBound, config);
if (config.isCollectExecutionInfo()) {
final long end = System.nanoTime();
result.addExecutionInfo("Handled in " + getClass() + " in " + (end - start) + "ns");
}
return result;
}
@Override
public boolean persistent() {
return false;
}
}
}
|
InMemoryTimestampedWindowStoreMarker
|
java
|
apache__camel
|
components/camel-cxf/camel-cxf-spring-soap/src/test/java/org/apache/camel/component/cxf/mtom/CxfMtomPOJOProducerTest.java
|
{
"start": 2024,
"end": 4248
}
|
class ____ {
static int port = CXFTestSupport.getPort1();
private static final Logger LOG = LoggerFactory.getLogger(CxfMtomPOJOProducerTest.class);
@Autowired
protected CamelContext context;
private Endpoint endpoint;
@BeforeEach
public void setUp() throws Exception {
endpoint = Endpoint.publish("http://localhost:" + port + "/CxfMtomPOJOProducerTest/jaxws-mtom/hello", getImpl());
SOAPBinding binding = (SOAPBinding) endpoint.getBinding();
binding.setMTOMEnabled(true);
}
@AfterEach
public void tearDown() throws Exception {
if (endpoint != null) {
endpoint.stop();
}
}
@SuppressWarnings("unchecked")
@Test
public void testInvokingServiceFromCxfProducer() throws Exception {
if (MtomTestHelper.isAwtHeadless(null, LOG)) {
return;
}
final Holder<byte[]> photo = new Holder<>(MtomTestHelper.REQ_PHOTO_DATA);
final Holder<Image> image = new Holder<>(getImage("/java.jpg"));
Exchange exchange = context.createProducerTemplate().send("direct://testEndpoint", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setBody(new Object[] { photo, image });
}
});
assertEquals(2, exchange.getMessage(AttachmentMessage.class).getAttachments().size(),
"The attachement size should be 2");
Object[] result = exchange.getMessage().getBody(Object[].class);
Holder<byte[]> photo1 = (Holder<byte[]>) result[1];
assertArrayEquals(MtomTestHelper.RESP_PHOTO_DATA, photo1.value);
Holder<Image> image1 = (Holder<Image>) result[2];
assertNotNull(image1.value);
if (image.value instanceof BufferedImage) {
assertEquals(560, ((BufferedImage) image1.value).getWidth());
assertEquals(300, ((BufferedImage) image1.value).getHeight());
}
}
private Image getImage(String name) throws Exception {
return ImageIO.read(getClass().getResource(name));
}
protected Object getImpl() {
return new HelloImpl();
}
}
|
CxfMtomPOJOProducerTest
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/util/InstantiatorTests.java
|
{
"start": 5835,
"end": 5953
}
|
class ____ extends WithDefaultConstructor {
}
@Order(Ordered.LOWEST_PRECEDENCE)
static
|
WithDefaultConstructorSubclass
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/MvcUriComponentsBuilderTests.java
|
{
"start": 27044,
"end": 27205
}
|
class ____ {
@RequestMapping("/create")
public String showCreate(@PathVariable Integer userId) {
return null;
}
}
abstract static
|
UserContactController
|
java
|
quarkusio__quarkus
|
extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/binder/RedisClientMetricsDisabledTest.java
|
{
"start": 389,
"end": 1166
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withConfigurationResource("test-logging.properties")
.overrideConfigKey("quarkus.micrometer.binder.redis.enabled", "false")
.overrideConfigKey("quarkus.micrometer.binder-enabled-default", "false")
.overrideConfigKey("quarkus.micrometer.registry-enabled-default", "false")
.overrideConfigKey("quarkus.redis.devservices.enabled", "false")
.withEmptyApplication();
@Inject
Instance<ObservableRedisMetrics> bean;
@Test
void testNoInstancePresentIfNoRedisClientsClass() {
assertTrue(bean.isUnsatisfied(),
"No redis metrics bean");
}
}
|
RedisClientMetricsDisabledTest
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GrapeEndpointBuilderFactory.java
|
{
"start": 1478,
"end": 1599
}
|
interface ____ {
/**
* Builder for endpoint for the Grape component.
*/
public
|
GrapeEndpointBuilderFactory
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/request/AcceptHeaders.java
|
{
"start": 317,
"end": 10433
}
|
class ____ {
/**
* Gets the strings from a comma-separated list.
* All "*" entries are replaced with {@code null} keys.
*
* @param header the header value.
* @return the listed items in order of appearance or {@code null} if the header didn't contain any entries.
*/
public static Map<String, QualityValue> getStringQualityValues(String header) {
if (header == null) {
return null;
}
header = header.trim();
if (header.length() == 0) {
return null;
}
Map<String, QualityValue> result = new LinkedHashMap<String, QualityValue>();
int offset = 0;
while (true) {
int endIndex = header.indexOf(',', offset);
String content;
if (endIndex < 0) {
content = header.substring(offset);
} else {
content = header.substring(offset, endIndex);
}
QualityValue qualityValue = QualityValue.DEFAULT;
int qualityIndex = content.indexOf(';');
if (qualityIndex >= 0) {
String parameter = content.substring(qualityIndex + 1);
content = content.substring(0, qualityIndex);
int equalsIndex = parameter.indexOf('=');
if (equalsIndex < 0) {
throw new BadRequestException("Malformed parameter: " + parameter);
}
String name = parameter.substring(0, equalsIndex).trim();
if (!"q".equals(name)) {
throw new BadRequestException("Unsupported parameter: " + parameter);
}
String value = parameter.substring(equalsIndex + 1).trim();
qualityValue = QualityValue.valueOf(value);
}
content = content.trim();
if (content.length() == 0) {
throw new BadRequestException("Empty Field in header: " + header);
}
if (content.equals("*")) {
result.put(null, qualityValue);
} else {
result.put(content, qualityValue);
}
if (endIndex < 0) {
break;
}
offset = endIndex + 1;
}
return result;
}
/**
* Gets the locales from a comma-separated list.
* Any "*" entries are replaced with {@code null} keys.
*
* @param header the header value.
* @return the listed items in order of appearance or {@code null} if the header didn't contain any entries.
*/
public static Map<Locale, QualityValue> getLocaleQualityValues(String header) {
Map<String, QualityValue> stringResult = getStringQualityValues(header);
if (stringResult == null)
return null;
Map<Locale, QualityValue> result = new LinkedHashMap<Locale, QualityValue>(stringResult.size() * 2);
for (Entry<String, QualityValue> entry : stringResult.entrySet()) {
QualityValue quality = entry.getValue();
Locale locale = null;
String value = entry.getKey();
if (value != null) {
int length = value.length();
if (length == 2) {
locale = new Locale(value);
} else if (length == 5 && value.charAt(2) == '-') {
String language = value.substring(0, 2);
String country = value.substring(3, 5);
locale = new Locale(language, country);
} else {
//LogMessages.LOGGER.ignoringUnsupportedLocale(value);
continue;
}
}
result.put(locale, quality);
}
//LogMessages.LOGGER.debug(result.toString());
return result;
}
/**
* Gets the media types from a comma-separated list.
*
* @param header the header value.
* @return the listed items in order of appearance or {@code null} if the header didn't contain any entries.
*/
public static Map<MediaType, QualityValue> getMediaTypeQualityValues(String header) {
if (header == null)
return null;
header = header.trim();
if (header.length() == 0)
return null;
Map<MediaType, QualityValue> result = new LinkedHashMap<MediaType, QualityValue>();
int offset = 0;
while (offset >= 0) {
int slashIndex = header.indexOf('/', offset);
if (slashIndex < 0)
throw new BadRequestException("Malformed media type: " + header);
String type = header.substring(offset, slashIndex);
String subtype;
Map<String, String> parameters = null;
QualityValue qualityValue = QualityValue.DEFAULT;
offset = slashIndex + 1;
int parameterStartIndex = header.indexOf(';', offset);
int itemEndIndex = header.indexOf(',', offset);
if (parameterStartIndex == itemEndIndex) {
assert itemEndIndex == -1;
subtype = header.substring(offset);
offset = -1;
} else if (itemEndIndex < 0 || (parameterStartIndex >= 0 && parameterStartIndex < itemEndIndex)) {
subtype = header.substring(offset, parameterStartIndex);
offset = parameterStartIndex + 1;
parameters = new LinkedHashMap<String, String>();
offset = parseParameters(parameters, header, offset);
qualityValue = evaluateAcceptParameters(parameters);
} else {
subtype = header.substring(offset, itemEndIndex);
offset = itemEndIndex + 1;
}
result.put(new MediaType(type.trim(), subtype.trim(), parameters), qualityValue);
}
//LogMessages.LOGGER.debug(result.toString());
return result;
}
private static int parseParameters(Map<String, String> parameters, String header, int offset) {
while (true) {
int equalsIndex = header.indexOf('=', offset);
if (equalsIndex < 0)
throw new BadRequestException("Malformed parameters: " + header);
String name = header.substring(offset, equalsIndex).trim();
offset = equalsIndex + 1;
if (header.charAt(offset) == '"') {
int end = offset;
++offset;
do {
end = header.indexOf('"', ++end);
if (end < 0)
throw new BadRequestException("Unclosed quotes:" + header);
} while (header.charAt(end - 1) == '\\');
String value = header.substring(offset, end);
parameters.put(name, value);
offset = end + 1;
int parameterEndIndex = header.indexOf(';', offset);
int itemEndIndex = header.indexOf(',', offset);
if (parameterEndIndex == itemEndIndex) {
assert itemEndIndex == -1;
if (header.substring(offset).trim().length() != 0)
throw new BadRequestException("Extra characters after quoted string:" + header);
return -1;
} else if (parameterEndIndex < 0 || (itemEndIndex >= 0 && itemEndIndex < parameterEndIndex)) {
if (header.substring(offset, itemEndIndex).trim().length() != 0)
throw new BadRequestException("Extra characters after quoted string:" + header);
return itemEndIndex + 1;
} else {
if (header.substring(offset, parameterEndIndex).trim().length() != 0)
throw new BadRequestException("Extra characters after quoted string:" + header);
offset = parameterEndIndex + 1;
}
} else {
int parameterEndIndex = header.indexOf(';', offset);
int itemEndIndex = header.indexOf(',', offset);
if (parameterEndIndex == itemEndIndex) {
assert itemEndIndex == -1;
String value = header.substring(offset).trim();
parameters.put(name, value);
return -1;
} else if (parameterEndIndex < 0 || (itemEndIndex >= 0 && itemEndIndex < parameterEndIndex)) {
String value = header.substring(offset, itemEndIndex).trim();
parameters.put(name, value);
return itemEndIndex + 1;
} else {
String value = header.substring(offset, parameterEndIndex).trim();
parameters.put(name, value);
offset = parameterEndIndex + 1;
}
}
}
}
/**
* Evaluates and removes the accept parameters.
*
* <pre>
* accept-params = ";" "q" "=" qvalue *( accept-extension )
* accept-extension = ";" token [ "=" ( token | quoted-string ) ]
* </pre>
*
* @param parameters all parameters in order of appearance.
* @return the qvalue.
* @see "accept-params
*/
private static QualityValue evaluateAcceptParameters(Map<String, String> parameters) {
Iterator<String> i = parameters.keySet().iterator();
while (i.hasNext()) {
String name = i.next();
if ("q".equals(name)) {
if (i.hasNext()) {
//LogMessages.LOGGER.acceptExtensionsNotSupported();
i.remove();
do {
i.next();
i.remove();
} while (i.hasNext());
return QualityValue.NOT_ACCEPTABLE;
} else {
String value = parameters.get(name);
i.remove();
return QualityValue.valueOf(value);
}
}
}
return QualityValue.DEFAULT;
}
}
|
AcceptHeaders
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/SchemaCompatibility.java
|
{
"start": 19557,
"end": 19755
}
|
enum ____ {
NAME_MISMATCH, FIXED_SIZE_MISMATCH, MISSING_ENUM_SYMBOLS, READER_FIELD_MISSING_DEFAULT_VALUE, TYPE_MISMATCH,
MISSING_UNION_BRANCH;
}
/**
* Immutable
|
SchemaIncompatibilityType
|
java
|
spring-projects__spring-security
|
web/src/main/java/org/springframework/security/web/authentication/AuthenticationEntryPointFailureHandler.java
|
{
"start": 1258,
"end": 2667
}
|
class ____ implements AuthenticationFailureHandler {
private boolean rethrowAuthenticationServiceException = true;
private final AuthenticationEntryPoint authenticationEntryPoint;
public AuthenticationEntryPointFailureHandler(AuthenticationEntryPoint authenticationEntryPoint) {
Assert.notNull(authenticationEntryPoint, "authenticationEntryPoint cannot be null");
this.authenticationEntryPoint = authenticationEntryPoint;
}
@Override
public void onAuthenticationFailure(HttpServletRequest request, HttpServletResponse response,
AuthenticationException exception) throws IOException, ServletException {
if (!this.rethrowAuthenticationServiceException) {
this.authenticationEntryPoint.commence(request, response, exception);
return;
}
if (!AuthenticationServiceException.class.isAssignableFrom(exception.getClass())) {
this.authenticationEntryPoint.commence(request, response, exception);
return;
}
throw exception;
}
/**
* Set whether to rethrow {@link AuthenticationServiceException}s (defaults to true)
* @param rethrowAuthenticationServiceException whether to rethrow
* {@link AuthenticationServiceException}s
* @since 5.8
*/
public void setRethrowAuthenticationServiceException(boolean rethrowAuthenticationServiceException) {
this.rethrowAuthenticationServiceException = rethrowAuthenticationServiceException;
}
}
|
AuthenticationEntryPointFailureHandler
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/factory/xml/ResourceEntityResolverTests.java
|
{
"start": 1241,
"end": 3089
}
|
class ____ {
@ParameterizedTest
@ValueSource(strings = { "https://example.org/schema/", "https://example.org/schema.xml" })
void resolveEntityDoesNotCallFallbackIfNotSchema(String systemId) throws Exception {
ConfigurableFallbackEntityResolver resolver = new ConfigurableFallbackEntityResolver(true);
assertThat(resolver.resolveEntity("testPublicId", systemId)).isNull();
assertThat(resolver.fallbackInvoked).isFalse();
}
@ParameterizedTest
@ValueSource(strings = { "https://example.org/schema.dtd", "https://example.org/schema.xsd" })
void resolveEntityCallsFallbackThatReturnsNull(String systemId) throws Exception {
ConfigurableFallbackEntityResolver resolver = new ConfigurableFallbackEntityResolver(null);
assertThat(resolver.resolveEntity("testPublicId", systemId)).isNull();
assertThat(resolver.fallbackInvoked).isTrue();
}
@ParameterizedTest
@ValueSource(strings = { "https://example.org/schema.dtd", "https://example.org/schema.xsd" })
void resolveEntityCallsFallbackThatThrowsException(String systemId) {
ConfigurableFallbackEntityResolver resolver = new ConfigurableFallbackEntityResolver(true);
assertThatExceptionOfType(ResolutionRejectedException.class)
.isThrownBy(() -> resolver.resolveEntity("testPublicId", systemId));
assertThat(resolver.fallbackInvoked).isTrue();
}
@ParameterizedTest
@ValueSource(strings = { "https://example.org/schema.dtd", "https://example.org/schema.xsd" })
void resolveEntityCallsFallbackThatReturnsInputSource(String systemId) throws Exception {
InputSource expected = mock();
ConfigurableFallbackEntityResolver resolver = new ConfigurableFallbackEntityResolver(expected);
assertThat(resolver.resolveEntity("testPublicId", systemId)).isSameAs(expected);
assertThat(resolver.fallbackInvoked).isTrue();
}
private static final
|
ResourceEntityResolverTests
|
java
|
apache__maven
|
compat/maven-model-builder/src/main/java/org/apache/maven/model/inheritance/DefaultInheritanceAssembler.java
|
{
"start": 1817,
"end": 5736
}
|
class ____ implements InheritanceAssembler {
private InheritanceModelMerger merger = new InheritanceModelMerger();
private static final String CHILD_DIRECTORY = "child-directory";
private static final String CHILD_DIRECTORY_PROPERTY = "project.directory";
@Override
public void assembleModelInheritance(
Model child, Model parent, ModelBuildingRequest request, ModelProblemCollector problems) {
Map<Object, Object> hints = new HashMap<>();
String childPath = child.getProperties().getProperty(CHILD_DIRECTORY_PROPERTY, child.getArtifactId());
hints.put(CHILD_DIRECTORY, childPath);
hints.put(MavenModelMerger.CHILD_PATH_ADJUSTMENT, getChildPathAdjustment(child, parent, childPath));
merger.merge(child, parent, false, hints);
}
/**
* Calculates the relative path from the base directory of the parent to the parent directory of the base directory
* of the child. The general idea is to adjust inherited URLs to match the project layout (in SCM).
*
* <p>This calculation is only a heuristic based on our conventions.
* In detail, the algo relies on the following assumptions: <ul>
* <li>The parent uses aggregation and refers to the child via the modules section</li>
* <li>The module path to the child is considered to
* point at the POM rather than its base directory if the path ends with ".xml" (ignoring case)</li>
* <li>The name of the child's base directory matches the artifact id of the child.</li>
* </ul>
* Note that for the sake of independence from the user
* environment, the filesystem is intentionally not used for the calculation.</p>
*
* @param child The child model, must not be <code>null</code>.
* @param parent The parent model, may be <code>null</code>.
* @param childDirectory The directory defined in child model, may be <code>null</code>.
* @return The path adjustment, can be empty but never <code>null</code>.
*/
private String getChildPathAdjustment(Model child, Model parent, String childDirectory) {
String adjustment = "";
if (parent != null) {
String childName = child.getArtifactId();
/*
* This logic (using filesystem, against wanted independence from the user environment) exists only for the
* sake of backward-compat with 2.x (MNG-5000). In general, it is wrong to
* base URL inheritance on the module directory names as this information is unavailable for POMs in the
* repository. In other words, modules where artifactId != moduleDirName will see different effective URLs
* depending on how the model was constructed (from filesystem or from repository).
*/
if (child.getProjectDirectory() != null) {
childName = child.getProjectDirectory().getName();
}
for (String module : parent.getModules()) {
module = module.replace('\\', '/');
if (module.regionMatches(true, module.length() - 4, ".xml", 0, 4)) {
module = module.substring(0, module.lastIndexOf('/') + 1);
}
String moduleName = module;
if (moduleName.endsWith("/")) {
moduleName = moduleName.substring(0, moduleName.length() - 1);
}
int lastSlash = moduleName.lastIndexOf('/');
moduleName = moduleName.substring(lastSlash + 1);
if ((moduleName.equals(childName) || (moduleName.equals(childDirectory))) && lastSlash >= 0) {
adjustment = module.substring(0, lastSlash);
break;
}
}
}
return adjustment;
}
/**
* InheritanceModelMerger
*/
protected static
|
DefaultInheritanceAssembler
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleIndexerAction.java
|
{
"start": 1802,
"end": 6847
}
|
class ____ extends BroadcastRequest<Request> implements IndicesRequest, ToXContentObject {
private DownsampleAction.Request downsampleRequest;
private long indexStartTimeMillis;
private long indexEndTimeMillis;
private String[] dimensionFields;
private String[] metricFields;
private String[] labelFields;
public Request(
DownsampleAction.Request downsampleRequest,
final long indexStartTimeMillis,
final long indexEndTimeMillis,
final String[] dimensionFields,
final String[] metricFields,
final String[] labelFields
) {
super(downsampleRequest.indices());
this.indexStartTimeMillis = indexStartTimeMillis;
this.indexEndTimeMillis = indexEndTimeMillis;
this.downsampleRequest = downsampleRequest;
this.dimensionFields = dimensionFields;
this.metricFields = metricFields;
this.labelFields = labelFields;
}
public Request() {}
public Request(StreamInput in) throws IOException {
super(in);
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) && in.readBoolean()) {
this.indexStartTimeMillis = in.readVLong();
this.indexEndTimeMillis = in.readVLong();
} else {
this.indexStartTimeMillis = 0;
this.indexEndTimeMillis = 0;
}
this.downsampleRequest = new DownsampleAction.Request(in);
this.dimensionFields = in.readStringArray();
this.metricFields = in.readStringArray();
this.labelFields = in.readStringArray();
}
@Override
public String[] indices() {
return downsampleRequest.indices();
}
@Override
public IndicesOptions indicesOptions() {
return downsampleRequest.indicesOptions();
}
public DownsampleAction.Request getDownsampleRequest() {
return downsampleRequest;
}
public long getIndexStartTimeMillis() {
return indexStartTimeMillis;
}
public long getIndexEndTimeMillis() {
return indexEndTimeMillis;
}
public String[] getDimensionFields() {
return this.dimensionFields;
}
public String[] getMetricFields() {
return this.metricFields;
}
public String[] getLabelFields() {
return labelFields;
}
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId, Map<String, String> headers) {
return new DownsampleTask(
id,
type,
action,
parentTaskId,
downsampleRequest.getTargetIndex(),
downsampleRequest.getDownsampleConfig(),
headers
);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) {
out.writeBoolean(true);
out.writeVLong(indexStartTimeMillis);
out.writeVLong(indexEndTimeMillis);
} else {
out.writeBoolean(false);
}
downsampleRequest.writeTo(out);
out.writeStringArray(dimensionFields);
out.writeStringArray(metricFields);
out.writeStringArray(labelFields);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("downsample_request", downsampleRequest);
builder.array("dimension_fields", dimensionFields);
builder.array("metric_fields", metricFields);
builder.array("label_fields", labelFields);
builder.endObject();
return builder;
}
@Override
public int hashCode() {
int result = downsampleRequest.hashCode();
result = 31 * result + Arrays.hashCode(dimensionFields);
result = 31 * result + Arrays.hashCode(metricFields);
result = 31 * result + Arrays.hashCode(labelFields);
return result;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o;
if (downsampleRequest.equals(request.downsampleRequest) == false) return false;
if (Arrays.equals(dimensionFields, request.dimensionFields) == false) return false;
if (Arrays.equals(labelFields, request.labelFields) == false) return false;
return Arrays.equals(metricFields, request.metricFields);
}
}
public static
|
Request
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
|
{
"start": 83755,
"end": 84223
}
|
class ____
extends BaseTransition {
@Override
public void
transition(RMAppAttemptImpl appAttempt, RMAppAttemptEvent event) {
RMAppAttemptContainerFinishedEvent containerFinishedEvent =
(RMAppAttemptContainerFinishedEvent) event;
// Normal container. Add it in completed containers list
addJustFinishedContainer(appAttempt, containerFinishedEvent);
}
}
private static
|
ContainerFinishedAtFinalStateTransition
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/transport/TransportInterceptor.java
|
{
"start": 772,
"end": 2186
}
|
interface ____ {
/**
* This is called for each handler that is registered via
* {@link TransportService#registerRequestHandler(String, Executor, boolean, boolean, Reader, TransportRequestHandler)} or
* {@link TransportService#registerRequestHandler(String, Executor, Reader, TransportRequestHandler)}. The returned handler is
* used instead of the passed in handler. By default the provided handler is returned.
*/
default <T extends TransportRequest> TransportRequestHandler<T> interceptHandler(
String action,
Executor executor,
boolean forceExecution,
TransportRequestHandler<T> actualHandler
) {
return actualHandler;
}
/**
* This is called up-front providing the actual low level {@link AsyncSender} that performs the low level send request.
* The returned sender is used to send all requests that come in via
* {@link TransportService#sendRequest(DiscoveryNode, String, TransportRequest, TransportResponseHandler)} or
* {@link TransportService#sendRequest(DiscoveryNode, String, TransportRequest, TransportRequestOptions, TransportResponseHandler)}.
* This allows plugins to perform actions on each send request including modifying the request context etc.
*/
default AsyncSender interceptSender(AsyncSender sender) {
return sender;
}
/**
* A simple
|
TransportInterceptor
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/sqm/internal/SqmCriteriaNodeBuilder.java
|
{
"start": 76524,
"end": 199776
}
|
enum ____ to a PostgreSQL named 'enum' type
}
@Override
public <V, M extends Map<?, V>> Expression<Collection<V>> values(M map) {
return value( map.values() );
}
@Override
public <C extends Collection<?>> SqmExpression<Integer> size(Expression<C> collection) {
return new SqmCollectionSize( (SqmPath<C>) collection, this );
}
@Override
public <C extends Collection<?>> SqmExpression<Integer> size(C collection) {
return new SqmLiteral<>( collection.size(), getIntegerType(), this );
}
@Override
public <T> SqmCoalesce<T> coalesce() {
return new SqmCoalesce<>( this );
}
@Override
public <Y> JpaCoalesce<Y> coalesce(Expression<? extends Y> x, Expression<? extends Y> y) {
@SuppressWarnings("unchecked")
final var sqmExpressible = (SqmBindableType<Y>) highestPrecedenceType(
( (SqmExpression<? extends Y>) x ).getExpressible(),
( (SqmExpression<? extends Y>) y ).getExpressible()
);
return new SqmCoalesce<>( sqmExpressible, 2, this ).value(x).value(y);
}
@Override
public <Y> JpaCoalesce<Y> coalesce(Expression<? extends Y> x, Y y) {
return coalesce( x, value( y, (SqmExpression<? extends Y>) x ) );
}
@Override
public <Y> SqmExpression<Y> nullif(Expression<Y> x, Expression<?> y) {
//noinspection unchecked
return createNullifFunctionNode( (SqmExpression<Y>) x, (SqmExpression<Y>) y );
}
@Override
public <Y> SqmExpression<Y> nullif(Expression<Y> x, Y y) {
return createNullifFunctionNode( (SqmExpression<Y>) x, value( y, (SqmExpression<Y>) x ) );
}
private <Y> SqmExpression<Y> createNullifFunctionNode(SqmExpression<Y> first, SqmExpression<Y> second) {
final SqmBindableType<? extends Y> bindableType =
highestPrecedenceType( first.getExpressible(), second.getExpressible() );
@SuppressWarnings("unchecked")
final ReturnableType<Y> resultType =
bindableType == null ? null : (ReturnableType<Y>) bindableType.getSqmType();
return getFunctionDescriptor( "nullif" ).generateSqmExpression(
asList( first, second ),
resultType,
getQueryEngine()
);
}
private SqmFunctionDescriptor getFunctionDescriptor(String name) {
return queryEngine.getSqmFunctionRegistry().findFunctionDescriptor( name );
}
private SqmSetReturningFunctionDescriptor getSetReturningFunctionDescriptor(String name) {
return queryEngine.getSqmFunctionRegistry().findSetReturningFunctionDescriptor( name );
}
@Override
public <C, R> SqmCaseSimple<C, R> selectCase(Expression<? extends C> expression) {
//noinspection unchecked
return new SqmCaseSimple<>( (SqmExpression<C>) expression, this );
}
@Override
public <R> SqmCaseSearched<R> selectCase() {
return new SqmCaseSearched<>( this );
}
@Override
public <M extends Map<?, ?>> SqmExpression<Integer> mapSize(JpaExpression<M> mapExpression) {
return new SqmCollectionSize( (SqmPath<?>) mapExpression, this );
}
@Override
public <M extends Map<?, ?>> SqmExpression<Integer> mapSize(M map) {
return new SqmLiteral<>( map.size(), getIntegerType(), this );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Predicates
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public SqmPredicate and(Expression<Boolean> x, Expression<Boolean> y) {
return new SqmJunctionPredicate(
Predicate.BooleanOperator.AND,
wrap( x ),
wrap( y ),
this
);
}
@Override
public SqmPredicate and(Predicate... restrictions) {
if ( restrictions == null || restrictions.length == 0 ) {
return conjunction();
}
final List<SqmPredicate> predicates = new ArrayList<>( restrictions.length );
for ( Predicate expression : restrictions ) {
predicates.add( (SqmPredicate) expression );
}
return new SqmJunctionPredicate( Predicate.BooleanOperator.AND, predicates, this );
}
@Override
public SqmPredicate and(List<Predicate> restrictions) {
if ( restrictions == null || restrictions.isEmpty() ) {
return conjunction();
}
final List<SqmPredicate> predicates = new ArrayList<>( restrictions.size() );
for ( Predicate expression : restrictions ) {
predicates.add( (SqmPredicate) expression );
}
return new SqmJunctionPredicate( Predicate.BooleanOperator.AND, predicates, this );
}
@Override
public SqmPredicate or(Expression<Boolean> x, Expression<Boolean> y) {
return new SqmJunctionPredicate(
Predicate.BooleanOperator.OR,
wrap( x ),
wrap( y ),
this
);
}
@Override
public SqmPredicate or(Predicate... restrictions) {
if ( restrictions == null || restrictions.length == 0 ) {
return disjunction();
}
final List<SqmPredicate> predicates = new ArrayList<>( restrictions.length );
for ( Predicate expression : restrictions ) {
predicates.add( (SqmPredicate) expression );
}
return new SqmJunctionPredicate( Predicate.BooleanOperator.OR, predicates, this );
}
@Override
public SqmPredicate or(List<Predicate> restrictions) {
if ( restrictions == null || restrictions.isEmpty() ) {
return disjunction();
}
final List<SqmPredicate> predicates = new ArrayList<>( restrictions.size() );
for ( Predicate expression : restrictions ) {
predicates.add( (SqmPredicate) expression );
}
return new SqmJunctionPredicate( Predicate.BooleanOperator.OR, predicates, this );
}
@Override
public SqmPredicate not(Expression<Boolean> restriction) {
return wrap( restriction ).not();
}
@Override
public SqmPredicate conjunction() {
return new SqmComparisonPredicate(
new SqmLiteral<>( 1, getIntegerType(), this ),
ComparisonOperator.EQUAL,
new SqmLiteral<>( 1, getIntegerType(), this ),
this
);
}
@Override
public SqmPredicate disjunction() {
return new SqmComparisonPredicate(
new SqmLiteral<>( 1, getIntegerType(), this ),
ComparisonOperator.NOT_EQUAL,
new SqmLiteral<>( 1, getIntegerType(), this ),
this
);
}
@Override
public SqmPredicate isTrue(Expression<Boolean> x) {
return wrap( x );
}
@Override
public SqmPredicate isFalse(Expression<Boolean> x) {
return wrap( x ).not();
}
@Override
public SqmPredicate isNull(Expression<?> x) {
return new SqmNullnessPredicate( (SqmExpression<?>) x, this );
}
@Override
public SqmPredicate isNotNull(Expression<?> x) {
return new SqmNullnessPredicate( (SqmExpression<?>) x, this ).not();
}
@Override
public <Y extends Comparable<? super Y>> SqmPredicate between(Expression<? extends Y> value, Expression<? extends Y> lower, Expression<? extends Y> upper) {
return new SqmBetweenPredicate(
(SqmExpression<? extends Y>) value,
(SqmExpression<? extends Y>) lower,
(SqmExpression<? extends Y>) upper,
false,
this
);
}
@Override
public <Y extends Comparable<? super Y>> SqmPredicate between(Expression<? extends Y> value, Y lower, Y upper) {
final SqmExpression<? extends Y> valueExpression = (SqmExpression<? extends Y>) value;
final SqmExpression<?> lowerExpr = value( lower, valueExpression );
final SqmExpression<?> upperExpr = value( upper, valueExpression );
return new SqmBetweenPredicate(
valueExpression,
lowerExpr,
upperExpr,
false,
this
);
}
@Override
public SqmPredicate equal(Expression<?> x, Expression<?> y) {
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.EQUAL,
(SqmExpression<?>) y,
this
);
}
@Override
public SqmPredicate equal(Expression<?> x, Object y) {
final SqmExpression<?> yExpr = value( y, (SqmExpression<?>) x );
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.EQUAL,
yExpr,
this
);
}
@Override
public SqmPredicate notEqual(Expression<?> x, Expression<?> y) {
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.NOT_EQUAL,
(SqmExpression<?>) y,
this
);
}
@Override
public SqmPredicate notEqual(Expression<?> x, Object y) {
final SqmExpression<?> yExpr = value( y, (SqmExpression<?>) x );
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.NOT_EQUAL,
yExpr,
this
);
}
@Override
public SqmPredicate distinctFrom(Expression<?> x, Expression<?> y) {
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.DISTINCT_FROM,
(SqmExpression<?>) y,
this
);
}
@Override
public SqmPredicate distinctFrom(Expression<?> x, Object y) {
final SqmExpression<?> yExpr = value( y, (SqmExpression<?>) x );
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.DISTINCT_FROM,
yExpr,
this
);
}
@Override
public SqmPredicate notDistinctFrom(Expression<?> x, Expression<?> y) {
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.NOT_DISTINCT_FROM,
(SqmExpression<?>) y,
this
);
}
@Override
public SqmPredicate notDistinctFrom(Expression<?> x, Object y) {
final SqmExpression<?> yExpr = value( y, (SqmExpression<?>) x );
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.NOT_DISTINCT_FROM,
yExpr,
this
);
}
@Override
public <Y extends Comparable<? super Y>> SqmPredicate greaterThan(Expression<? extends Y> x, Expression<? extends Y> y) {
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.GREATER_THAN,
(SqmExpression<?>) y,
this
);
}
@Override
public <Y extends Comparable<? super Y>> SqmPredicate greaterThan(Expression<? extends Y> x, Y y) {
final SqmExpression<?> yExpr = value( y, (SqmExpression<?>) x );
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.GREATER_THAN,
yExpr,
this
);
}
@Override
public <Y extends Comparable<? super Y>> SqmPredicate greaterThanOrEqualTo(Expression<? extends Y> x, Expression<? extends Y> y) {
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.GREATER_THAN_OR_EQUAL,
(SqmExpression<?>) y,
this
);
}
@Override
public <Y extends Comparable<? super Y>> SqmPredicate greaterThanOrEqualTo(Expression<? extends Y> x, Y y) {
final SqmExpression<?> yExpr = value( y, (SqmExpression<?>) x );
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.GREATER_THAN_OR_EQUAL,
yExpr,
this
);
}
@Override
public <Y extends Comparable<? super Y>> SqmPredicate lessThan(Expression<? extends Y> x, Expression<? extends Y> y) {
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.LESS_THAN,
(SqmExpression<?>) y,
this
);
}
@Override
public <Y extends Comparable<? super Y>> SqmPredicate lessThan(Expression<? extends Y> x, Y y) {
final SqmExpression<?> yExpr = value( y, (SqmExpression<?>) x );
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.LESS_THAN,
yExpr,
this
);
}
@Override
public <Y extends Comparable<? super Y>> SqmPredicate lessThanOrEqualTo(Expression<? extends Y> x, Expression<? extends Y> y) {
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.LESS_THAN_OR_EQUAL,
(SqmExpression<?>) y,
this
);
}
@Override
public <Y extends Comparable<? super Y>> SqmPredicate lessThanOrEqualTo(Expression<? extends Y> x, Y y) {
final SqmExpression<?> yExpr = value( y, (SqmExpression<?>) x );
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.LESS_THAN_OR_EQUAL,
yExpr,
this
);
}
@Override
public SqmPredicate gt(Expression<? extends Number> x, Expression<? extends Number> y) {
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.GREATER_THAN,
(SqmExpression<?>) y,
this
);
}
@Override
public SqmPredicate gt(Expression<? extends Number> x, Number y) {
final SqmExpression<?> yExpr = value( y, (SqmExpression<?>) x );
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.GREATER_THAN,
yExpr,
this
);
}
@Override
public SqmPredicate ge(Expression<? extends Number> x, Expression<? extends Number> y) {
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.GREATER_THAN_OR_EQUAL,
(SqmExpression<?>) y,
this
);
}
@Override
public SqmPredicate ge(Expression<? extends Number> x, Number y) {
final SqmExpression<?> yExpr = value( y, (SqmExpression<?>) x );
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.GREATER_THAN_OR_EQUAL,
yExpr,
this
);
}
@Override
public SqmPredicate lt(Expression<? extends Number> x, Expression<? extends Number> y) {
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.LESS_THAN,
(SqmExpression<?>) y,
this
);
}
@Override
public SqmPredicate lt(Expression<? extends Number> x, Number y) {
final SqmExpression<?> yExpr = value( y, (SqmExpression<?>) x );
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.LESS_THAN,
yExpr,
this
);
}
@Override
public SqmPredicate le(Expression<? extends Number> x, Expression<? extends Number> y) {
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.LESS_THAN_OR_EQUAL,
(SqmExpression<?>) y,
this
);
}
@Override
public SqmPredicate le(Expression<? extends Number> x, Number y) {
final SqmExpression<?> yExpr = value( y, (SqmExpression<?>) x );
return new SqmComparisonPredicate(
(SqmExpression<?>) x,
ComparisonOperator.LESS_THAN_OR_EQUAL,
yExpr,
this
);
}
@Override
public <C extends Collection<?>> SqmPredicate isEmpty(Expression<C> collection) {
return new SqmEmptinessPredicate( (SqmPluralValuedSimplePath<C>) collection, false, this );
}
@Override
public <C extends Collection<?>> SqmPredicate isNotEmpty(Expression<C> collection) {
return new SqmEmptinessPredicate( (SqmPluralValuedSimplePath<C>) collection, true, this );
}
@Override
public <E, C extends Collection<E>> SqmPredicate isMember(Expression<E> elem, Expression<C> collection) {
return createSqmMemberOfPredicate( (SqmExpression<?>) elem, (SqmPath<?>) collection, false);
}
@Override
public <E, C extends Collection<E>> SqmPredicate isMember(E elem, Expression<C> collection) {
return createSqmMemberOfPredicate( value( elem ), (SqmPath<?>) collection, false);
}
@Override
public <E, C extends Collection<E>> SqmPredicate isNotMember(Expression<E> elem, Expression<C> collection) {
return createSqmMemberOfPredicate( (SqmExpression<?>) elem, (SqmPath<?>) collection, true);
}
@Override
public <E, C extends Collection<E>> SqmPredicate isNotMember(E elem, Expression<C> collection) {
return createSqmMemberOfPredicate( value( elem ), (SqmPath<?>) collection, true);
}
private SqmMemberOfPredicate createSqmMemberOfPredicate(SqmExpression<?> elem, SqmPath<?> collection, boolean negated) {
if ( collection instanceof SqmPluralValuedSimplePath<?> pluralValuedSimplePath ) {
return new SqmMemberOfPredicate( elem, pluralValuedSimplePath, negated, this );
}
else {
throw new SemanticException( "Operand of 'member of' operator must be a plural path" );
}
}
@Override
public SqmPredicate like(Expression<String> searchString, Expression<String> pattern) {
return new SqmLikePredicate(
(SqmExpression<?>) searchString,
(SqmExpression<?>) pattern,
this
);
}
@Override
public SqmPredicate like(Expression<String> searchString, String pattern) {
return new SqmLikePredicate(
(SqmExpression<?>) searchString,
value( pattern, (SqmExpression<?>) searchString ),
this
);
}
@Override
public SqmPredicate like(Expression<String> searchString, Expression<String> pattern, Expression<Character> escapeChar) {
return new SqmLikePredicate(
(SqmExpression<?>) searchString,
(SqmExpression<?>) pattern,
(SqmExpression<?>) escapeChar,
this
);
}
@Override
public SqmPredicate like(Expression<String> searchString, Expression<String> pattern, char escapeChar) {
return new SqmLikePredicate(
(SqmExpression<?>) searchString,
(SqmExpression<?>) pattern,
literal( escapeChar ),
this
);
}
@Override
public SqmPredicate like(Expression<String> searchString, String pattern, Expression<Character> escapeChar) {
return new SqmLikePredicate(
(SqmExpression<?>) searchString,
value( pattern, (SqmExpression<?>) searchString ),
(SqmExpression<?>) escapeChar,
this
);
}
@Override
public SqmPredicate like(Expression<String> searchString, String pattern, char escapeChar) {
return new SqmLikePredicate(
(SqmExpression<?>) searchString,
value( pattern, (SqmExpression<?>) searchString ),
literal( escapeChar ),
this
);
}
@Override
public SqmPredicate ilike(Expression<String> searchString, Expression<String> pattern) {
return new SqmLikePredicate(
(SqmExpression<?>) searchString,
(SqmExpression<?>) pattern,
false,
false,
this
);
}
@Override
public SqmPredicate ilike(Expression<String> searchString, String pattern) {
return new SqmLikePredicate(
(SqmExpression<?>) searchString,
value( pattern, (SqmExpression<?>) searchString ),
false,
false,
this
);
}
@Override
public SqmPredicate ilike(
Expression<String> searchString,
Expression<String> pattern,
Expression<Character> escapeChar) {
return new SqmLikePredicate(
(SqmExpression<?>) searchString,
(SqmExpression<?>) pattern,
(SqmExpression<?>) escapeChar,
false,
false,
this
);
}
@Override
public SqmPredicate ilike(Expression<String> searchString, Expression<String> pattern, char escapeChar) {
return new SqmLikePredicate(
(SqmExpression<?>) searchString,
(SqmExpression<?>) pattern,
literal( escapeChar ),
false,
false,
this
);
}
@Override
public SqmPredicate ilike(Expression<String> searchString, String pattern, Expression<Character> escapeChar) {
return new SqmLikePredicate(
(SqmExpression<?>) searchString,
value( pattern, (SqmExpression<?>) searchString ),
(SqmExpression<?>) escapeChar,
false,
false,
this
);
}
@Override
public SqmPredicate ilike(Expression<String> searchString, String pattern, char escapeChar) {
return new SqmLikePredicate(
(SqmExpression<?>) searchString,
value( pattern, (SqmExpression<?>) searchString ),
literal( escapeChar ),
false,
false,
this
);
}
@Override
public SqmPredicate notLike(Expression<String> x, Expression<String> pattern) {
return not( like( x, pattern ) );
}
@Override
public SqmPredicate notLike(Expression<String> x, String pattern) {
return not( like( x, pattern ) );
}
@Override
public SqmPredicate notLike(Expression<String> x, Expression<String> pattern, Expression<Character> escapeChar) {
return not( like( x, pattern, escapeChar ) );
}
@Override
public SqmPredicate notLike(Expression<String> x, Expression<String> pattern, char escapeChar) {
return not( like( x, pattern, escapeChar ) );
}
@Override
public SqmPredicate notLike(Expression<String> x, String pattern, Expression<Character> escapeChar) {
return not( like( x, pattern, escapeChar ) );
}
@Override
public SqmPredicate notLike(Expression<String> x, String pattern, char escapeChar) {
return not( like( x, pattern, escapeChar ) );
}
@Override
public SqmPredicate notIlike(Expression<String> x, Expression<String> pattern) {
return not( ilike( x, pattern ) );
}
@Override
public SqmPredicate notIlike(Expression<String> x, String pattern) {
return not( ilike( x, pattern ) );
}
@Override
public SqmPredicate notIlike(Expression<String> x, Expression<String> pattern, Expression<Character> escapeChar) {
return not( ilike( x, pattern, escapeChar ) );
}
@Override
public SqmPredicate notIlike(Expression<String> x, Expression<String> pattern, char escapeChar) {
return not( ilike( x, pattern, escapeChar ) );
}
@Override
public SqmPredicate notIlike(Expression<String> x, String pattern, Expression<Character> escapeChar) {
return not( ilike( x, pattern, escapeChar ) );
}
@Override
public SqmPredicate notIlike(Expression<String> x, String pattern, char escapeChar) {
return not( ilike( x, pattern, escapeChar ) );
}
@Override
public JpaPredicate likeRegexp(Expression<String> x, String pattern) {
return new SqmBooleanExpressionPredicate(
getFunctionDescriptor( "regexp_like" )
.generateSqmExpression(
asList( (SqmExpression<String>) x,
literal( pattern ) ),
null,
getQueryEngine()
),
this
);
}
@Override
public JpaPredicate ilikeRegexp(Expression<String> x, String pattern) {
return new SqmBooleanExpressionPredicate(
getFunctionDescriptor( "regexp_like" )
.generateSqmExpression(
asList( (SqmExpression<String>) x,
literal( pattern ),
literal( "i" ) ),
null,
getQueryEngine()
),
this
);
}
@Override
public JpaPredicate notLikeRegexp(Expression<String> x, String pattern) {
return new SqmBooleanExpressionPredicate(
getFunctionDescriptor( "regexp_like" )
.generateSqmExpression(
asList( (SqmExpression<String>) x,
literal( pattern ) ),
null,
getQueryEngine()
),
true,
this
);
}
@Override
public JpaPredicate notIlikeRegexp(Expression<String> x, String pattern) {
return new SqmBooleanExpressionPredicate(
getFunctionDescriptor( "regexp_like" )
.generateSqmExpression(
asList( (SqmExpression<String>) x,
literal( pattern ),
literal( "i" ) ),
null,
getQueryEngine()
),
true,
this
);
}
@Override
@SuppressWarnings("unchecked")
public <T> SqmInPredicate<T> in(Expression<? extends T> expression) {
return new SqmInListPredicate<>( (SqmExpression<T>) expression, this );
}
@Override
@SuppressWarnings("unchecked")
public <T> SqmInPredicate<T> in(Expression<? extends T> expression, Expression<? extends T>... values) {
final List<SqmExpression<T>> listExpressions = new ArrayList<>( values.length );
for ( Expression<? extends T> value : values ) {
listExpressions.add( (SqmExpression<T>) value );
}
return new SqmInListPredicate<>( (SqmExpression<T>) expression, listExpressions, this );
}
@Override
@SuppressWarnings("unchecked")
public <T> SqmInPredicate<T> in(Expression<? extends T> expression, T... values) {
final SqmExpression<T> sqmExpression = (SqmExpression<T>) expression;
final List<SqmExpression<T>> listExpressions = new ArrayList<>( values.length );
for ( T value : values ) {
listExpressions.add( value( value, sqmExpression ) );
}
return new SqmInListPredicate<>( sqmExpression, listExpressions, this );
}
@Override
@SuppressWarnings("unchecked")
public <T> SqmInPredicate<T> in(Expression<? extends T> expression, Collection<T> values) {
final SqmExpression<T> sqmExpression = (SqmExpression<T>) expression;
final List<SqmExpression<T>> listExpressions = new ArrayList<>( values.size() );
for ( T value : values ) {
listExpressions.add( value( value, sqmExpression ) );
}
return new SqmInListPredicate<>( sqmExpression, listExpressions, this );
}
@Override
@SuppressWarnings("unchecked")
public <T> SqmInPredicate<T> in(Expression<? extends T> expression, SqmSubQuery<T> subQuery) {
return new SqmInSubQueryPredicate<>( (SqmExpression<T>) expression, subQuery, this );
}
@Override
public SqmPredicate exists(Subquery<?> subQuery) {
return new SqmExistsPredicate( (SqmExpression<?>) subQuery, this );
}
@Override
public <M extends Map<?, ?>> SqmPredicate isMapEmpty(JpaExpression<M> mapExpression) {
return new SqmEmptinessPredicate( (SqmPluralValuedSimplePath<?>) mapExpression, false, this );
}
@Override
public <M extends Map<?, ?>> SqmPredicate isMapNotEmpty(JpaExpression<M> mapExpression) {
return new SqmEmptinessPredicate( (SqmPluralValuedSimplePath<?>) mapExpression, true, this );
}
/**
* Custom serialization hook defined by Java spec. Used when the node builder is directly deserialized.
* Here we resolve the uuid/name read from the stream previously to resolve the SessionFactory
* instance to use based on the registrations with the {@link SessionFactoryRegistry}
*
* @return The resolved node builder to use.
*
* @throws InvalidObjectException Thrown if we could not resolve the factory by uuid/name.
*/
@Serial
private Object readResolve() throws InvalidObjectException {
CORE_LOGGER.trace( "Resolving serialized SqmCriteriaNodeBuilder" );
return locateSessionFactoryOnDeserialization( uuid, name ).getCriteriaBuilder();
}
private static SessionFactory locateSessionFactoryOnDeserialization(String uuid, String name) throws InvalidObjectException{
final SessionFactory uuidResult = SessionFactoryRegistry.INSTANCE.getSessionFactory( uuid );
if ( uuidResult != null ) {
CORE_LOGGER.tracef( "Resolved SessionFactory by UUID [%s]", uuid );
return uuidResult;
}
// in case we were deserialized in a different JVM, look for an instance with the same name
// (provided we were given a name)
if ( name != null ) {
final SessionFactory namedResult = SessionFactoryRegistry.INSTANCE.getNamedSessionFactory( name );
if ( namedResult != null ) {
CORE_LOGGER.tracef( "Resolved SessionFactory by name [%s]", name );
return namedResult;
}
}
throw new InvalidObjectException( "Could not find a SessionFactory [uuid=" + uuid + ",name=" + name + "]" );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Non-standard HQL functions
@Override
public <T> SqmFunction<T> sql(String pattern, Class<T> type, Expression<?>... arguments) {
final List<SqmExpression<?>> sqmArguments = new ArrayList<>( expressionList( arguments ) );
sqmArguments.add( 0, literal( pattern ) );
return getFunctionDescriptor( "sql" ).generateSqmExpression(
sqmArguments,
getTypeConfiguration().standardBasicTypeForJavaType( type ),
queryEngine
);
}
@Override
public SqmFunction<String> format(Expression<? extends TemporalAccessor> datetime, String pattern) {
final SqmFormat sqmFormat = new SqmFormat( pattern, getStringType(), this );
return getFunctionDescriptor( "format" ).generateSqmExpression(
asList( (SqmExpression<?>) datetime, sqmFormat ),
null,
getQueryEngine()
);
}
@Override
public <N, T extends Temporal> SqmExpression<N> extract(TemporalField<N, T> field, Expression<T> temporal) {
Class<?> resultType = Integer.class;
final TemporalUnit temporalUnit;
switch ( field.toString() ) {
case "year":
temporalUnit = TemporalUnit.YEAR;
break;
case "quarter":
temporalUnit = TemporalUnit.QUARTER;
break;
case "month":
temporalUnit = TemporalUnit.MONTH;
break;
case "week":
temporalUnit = TemporalUnit.WEEK;
break;
case "day":
temporalUnit = TemporalUnit.DAY;
break;
case "hour":
temporalUnit = TemporalUnit.HOUR;
break;
case "minute":
temporalUnit = TemporalUnit.MINUTE;
break;
case "second":
temporalUnit = TemporalUnit.SECOND;
resultType = Double.class;
break;
case "date":
temporalUnit = TemporalUnit.DATE;
resultType = LocalDate.class;
break;
case "time":
temporalUnit = TemporalUnit.TIME;
resultType = LocalTime.class;
break;
default:
throw new IllegalArgumentException( "Invalid temporal field [" + field + "]" );
}
//noinspection unchecked
return extract( temporal, temporalUnit, (Class<N>) resultType );
}
private <T> SqmFunction<T> extract(
Expression<? extends TemporalAccessor> datetime,
TemporalUnit temporalUnit,
Class<T> type) {
return getFunctionDescriptor( "extract" ).generateSqmExpression(
asList(
new SqmExtractUnit<>(
temporalUnit,
getTypeConfiguration().standardBasicTypeForJavaType( type ),
this
),
(SqmTypedNode<?>) datetime
),
null,
queryEngine
);
}
@Override
public SqmFunction<Integer> year(Expression<? extends TemporalAccessor> datetime) {
return extract( datetime, TemporalUnit.YEAR, Integer.class );
}
@Override
public SqmFunction<Integer> month(Expression<? extends TemporalAccessor> datetime) {
return extract( datetime, TemporalUnit.MONTH, Integer.class );
}
@Override
public SqmFunction<Integer> day(Expression<? extends TemporalAccessor> datetime) {
return extract( datetime, TemporalUnit.DAY, Integer.class );
}
@Override
public SqmFunction<Integer> hour(Expression<? extends TemporalAccessor> datetime) {
return extract( datetime, TemporalUnit.HOUR, Integer.class );
}
@Override
public SqmFunction<Integer> minute(Expression<? extends TemporalAccessor> datetime) {
return extract( datetime, TemporalUnit.MINUTE, Integer.class );
}
@Override
public SqmFunction<Float> second(Expression<? extends TemporalAccessor> datetime) {
return extract( datetime, TemporalUnit.SECOND, Float.class );
}
@Override
public <T extends TemporalAccessor> SqmFunction<T> truncate(Expression<T> datetime, TemporalUnit temporalUnit) {
return getFunctionDescriptor( "trunc" ).generateSqmExpression(
asList(
(SqmTypedNode<?>) datetime,
new SqmExtractUnit<>( temporalUnit, getIntegerType(), this )
),
null,
queryEngine
);
}
@Override
public SqmFunction<String> overlay(Expression<String> string, String replacement, int start) {
return overlay( string, replacement, value( start ), null );
}
@Override
public SqmFunction<String> overlay(Expression<String> string, Expression<String> replacement, int start) {
return overlay( string, replacement, value( start ), null );
}
@Override
public SqmFunction<String> overlay(Expression<String> string, String replacement, Expression<Integer> start) {
return overlay( string, value( replacement ), start, null );
}
@Override
public SqmFunction<String> overlay(
Expression<String> string,
Expression<String> replacement,
Expression<Integer> start) {
return overlay( string, replacement, start, null );
}
@Override
public SqmFunction<String> overlay(Expression<String> string, String replacement, int start, int length) {
return overlay( string, value( replacement ), value( start ), value( length ) );
}
@Override
public SqmFunction<String> overlay(
Expression<String> string,
Expression<String> replacement,
int start,
int length) {
return overlay( string, replacement, value( start ), value( length ) );
}
@Override
public SqmFunction<String> overlay(
Expression<String> string,
String replacement,
Expression<Integer> start,
int length) {
return overlay( string, value( replacement ), start, value( length ) );
}
@Override
public SqmFunction<String> overlay(
Expression<String> string,
Expression<String> replacement,
Expression<Integer> start,
int length) {
return overlay( string, replacement, start, value( length ) );
}
@Override
public SqmFunction<String> overlay(
Expression<String> string,
String replacement,
int start,
Expression<Integer> length) {
return overlay( string, value( replacement ), value( start ), length );
}
@Override
public SqmFunction<String> overlay(
Expression<String> string,
Expression<String> replacement,
int start,
Expression<Integer> length) {
return overlay( string, replacement, value( start ), length );
}
@Override
public SqmFunction<String> overlay(
Expression<String> string,
String replacement,
Expression<Integer> start,
Expression<Integer> length) {
return overlay( string, value( replacement ), start, length );
}
@Override
public SqmFunction<String> overlay(
Expression<String> string,
Expression<String> replacement,
Expression<Integer> start,
@Nullable Expression<Integer> length) {
SqmExpression<String> sqmString = (SqmExpression<String>) string;
SqmExpression<String> sqmReplacement = (SqmExpression<String>) replacement;
SqmExpression<Integer> sqmStart = (SqmExpression<Integer>) start;
return getFunctionDescriptor( "overlay" ).generateSqmExpression(
( length == null
? asList( sqmString, sqmReplacement, sqmStart )
: asList( sqmString, sqmReplacement, sqmStart, (SqmExpression<Integer>) length ) ),
null,
getQueryEngine()
);
}
@Override
public SqmFunction<String> pad(Expression<String> x, int length) {
return pad( null, x, value( length ), null );
}
@Override
public SqmFunction<String> pad(Trimspec ts, Expression<String> x, int length) {
return pad( ts, x, value( length ), null );
}
@Override
public SqmFunction<String> pad(Expression<String> x, Expression<Integer> length) {
return pad( null, x, length, null );
}
@Override
public SqmFunction<String> pad(Trimspec ts, Expression<String> x, Expression<Integer> length) {
return pad( ts, x, length, null );
}
@Override
public SqmFunction<String> pad(Expression<String> x, int length, char padChar) {
return pad( null, x, value( length ), value( padChar ) );
}
@Override
public SqmFunction<String> pad(Trimspec ts, Expression<String> x, int length, char padChar) {
return pad( ts, x, value( length ), value( padChar ) );
}
@Override
public SqmFunction<String> pad(Expression<String> x, int length, Expression<Character> padChar) {
return pad( null, x, value( length ), padChar );
}
@Override
public SqmFunction<String> pad(Trimspec ts, Expression<String> x, int length, Expression<Character> padChar) {
return pad( ts, x, value( length ), padChar );
}
@Override
public SqmFunction<String> pad(Expression<String> x, Expression<Integer> length, char padChar) {
return pad( null, x, length, value( padChar ) );
}
@Override
public SqmFunction<String> pad(Trimspec ts, Expression<String> x, Expression<Integer> length, char padChar) {
return pad( ts, x, length, value( padChar ) );
}
@Override
public SqmFunction<String> pad(Expression<String> x, Expression<Integer> length, Expression<Character> padChar) {
return pad( null, x, length, padChar );
}
@Override
public SqmFunction<String> pad(
@Nullable Trimspec ts,
Expression<String> x,
Expression<Integer> length,
@Nullable Expression<Character> padChar) {
SqmExpression<String> source = (SqmExpression<String>) x;
SqmExpression<Integer> sqmLength = (SqmExpression<Integer>) length;
SqmTrimSpecification padSpec = new SqmTrimSpecification(
ts == null ? TrimSpec.TRAILING : fromCriteriaTrimSpec( ts ),
this
);
return getFunctionDescriptor( "pad" ).generateSqmExpression(
padChar != null
? asList( source, sqmLength, padSpec, (SqmExpression<Character>) padChar )
: asList( source, sqmLength, padSpec ),
null,
getQueryEngine()
);
}
@Override
public JpaFunction<String> repeat(Expression<String> x, Expression<Integer> times) {
return getFunctionDescriptor( "repeat" ).generateSqmExpression(
asList( (SqmExpression<String>) x, (SqmExpression<Integer>) times ),
null,
getQueryEngine()
);
}
@Override
public JpaFunction<String> repeat(Expression<String> x, int times) {
return repeat( x, value( times ) );
}
@Override
public JpaFunction<String> repeat(String x, Expression<Integer> times) {
return repeat( value( x), times );
}
@Override
public SqmFunction<String> left(Expression<String> x, int length) {
return left( x, value( length ) );
}
@Override
public SqmFunction<String> left(Expression<String> x, Expression<Integer> length) {
return getFunctionDescriptor( "left" ).generateSqmExpression(
asList( (SqmExpression<String>) x, (SqmExpression<Integer>) length ),
null,
getQueryEngine()
);
}
@Override
public SqmFunction<String> right(Expression<String> x, int length) {
return right( x, value( length ) );
}
@Override
public SqmFunction<String> right(Expression<String> x, Expression<Integer> length) {
return getFunctionDescriptor( "right" ).generateSqmExpression(
asList( (SqmExpression<String>) x, (SqmExpression<Integer>) length ),
null,
getQueryEngine()
);
}
@Override
public SqmFunction<String> replace(Expression<String> x, String pattern, String replacement) {
SqmExpression<String> sqmPattern = value( pattern );
return replace( x, sqmPattern, value( replacement, sqmPattern ) );
}
@Override
public SqmFunction<String> replace(Expression<String> x, String pattern, Expression<String> replacement) {
return replace( x, value( pattern ), replacement );
}
@Override
public SqmFunction<String> replace(Expression<String> x, Expression<String> pattern, String replacement) {
return replace( x, pattern, value( replacement ) );
}
@Override
public SqmFunction<String> replace(
Expression<String> x,
Expression<String> pattern,
Expression<String> replacement) {
return getFunctionDescriptor( "replace" ).generateSqmExpression(
asList(
(SqmExpression<String>) x,
(SqmExpression<String>) pattern,
(SqmExpression<String>) replacement
),
null,
getQueryEngine()
);
}
@Override
public SqmFunction<String> collate(Expression<String> x, String collation) {
final SqmCollation sqmCollation = new SqmCollation( collation, null, this );
return getFunctionDescriptor( "collate" ).generateSqmExpression(
asList( (SqmExpression<String>) x, sqmCollation ),
null,
getQueryEngine()
);
}
@Override
public SqmFunction<Double> log10(Expression<? extends Number> x) {
return getFunctionDescriptor( "log10" ).generateSqmExpression(
(SqmTypedNode<?>) x,
null,
queryEngine
);
}
@Override
public SqmFunction<Double> log(Number b, Expression<? extends Number> x) {
return log( value( b ), x );
}
@Override
public SqmFunction<Double> log(Expression<? extends Number> b, Expression<? extends Number> x) {
return getFunctionDescriptor( "log" ).generateSqmExpression(
asList( (SqmTypedNode<?>) b, (SqmTypedNode<?>) x ),
null,
queryEngine
);
}
@Override
public SqmFunction<Double> pi() {
return getFunctionDescriptor( "pi" ).generateSqmExpression(
null,
queryEngine
);
}
@Override
public SqmFunction<Double> sin(Expression<? extends Number> x) {
return getFunctionDescriptor( "sin" ).generateSqmExpression(
(SqmTypedNode<?>) x,
null,
queryEngine
);
}
@Override
public SqmFunction<Double> cos(Expression<? extends Number> x) {
return getFunctionDescriptor( "cos" ).generateSqmExpression(
(SqmTypedNode<?>) x,
null,
queryEngine
);
}
@Override
public SqmFunction<Double> tan(Expression<? extends Number> x) {
return getFunctionDescriptor( "tan" ).generateSqmExpression(
(SqmTypedNode<?>) x,
null,
queryEngine
);
}
@Override
public SqmFunction<Double> asin(Expression<? extends Number> x) {
return getFunctionDescriptor( "asin" ).generateSqmExpression(
(SqmTypedNode<?>) x,
null,
queryEngine
);
}
@Override
public SqmFunction<Double> acos(Expression<? extends Number> x) {
return getFunctionDescriptor( "acos" ).generateSqmExpression(
(SqmTypedNode<?>) x,
null,
queryEngine
);
}
@Override
public SqmFunction<Double> atan(Expression<? extends Number> x) {
return getFunctionDescriptor( "atan" ).generateSqmExpression(
(SqmTypedNode<?>) x,
null,
queryEngine
);
}
@Override
public SqmFunction<Double> atan2(Number y, Expression<? extends Number> x) {
return atan2( value( y ), x );
}
@Override
public SqmFunction<Double> atan2(Expression<? extends Number> y, Number x) {
return atan2( y, value( x ) );
}
@Override
public SqmFunction<Double> atan2(Expression<? extends Number> y, Expression<? extends Number> x) {
return getFunctionDescriptor( "atan2" ).generateSqmExpression(
asList( (SqmTypedNode<?>) y, (SqmTypedNode<?>) x ),
null,
queryEngine
);
}
@Override
public SqmFunction<Double> sinh(Expression<? extends Number> x) {
return getFunctionDescriptor( "sinh" ).generateSqmExpression(
(SqmTypedNode<?>) x,
null,
queryEngine
);
}
@Override
public SqmFunction<Double> cosh(Expression<? extends Number> x) {
return getFunctionDescriptor( "cosh" ).generateSqmExpression(
(SqmTypedNode<?>) x,
null,
queryEngine
);
}
@Override
public SqmFunction<Double> tanh(Expression<? extends Number> x) {
return getFunctionDescriptor( "tanh" ).generateSqmExpression(
(SqmTypedNode<?>) x,
null,
queryEngine
);
}
@Override
public SqmFunction<Double> degrees(Expression<? extends Number> x) {
return getFunctionDescriptor( "degrees" ).generateSqmExpression(
(SqmTypedNode<?>) x,
null,
queryEngine
);
}
@Override
public SqmFunction<Double> radians(Expression<? extends Number> x) {
return getFunctionDescriptor( "radians" ).generateSqmExpression(
(SqmTypedNode<?>) x,
null,
queryEngine
);
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Window functions
@Override
public SqmWindow createWindow() {
return new SqmWindow( this );
}
@Override
public SqmWindowFrame frameUnboundedPreceding() {
return new SqmWindowFrame( this, FrameKind.UNBOUNDED_PRECEDING );
}
@Override
public SqmWindowFrame frameBetweenPreceding(int offset) {
return new SqmWindowFrame( this, FrameKind.OFFSET_PRECEDING, literal( offset ) );
}
@Override
public SqmWindowFrame frameBetweenPreceding(Expression<?> offset) {
return new SqmWindowFrame( this, FrameKind.OFFSET_PRECEDING, (SqmExpression<?>) offset );
}
@Override
public SqmWindowFrame frameCurrentRow() {
return new SqmWindowFrame( this, FrameKind.CURRENT_ROW );
}
@Override
public SqmWindowFrame frameBetweenFollowing(int offset) {
return new SqmWindowFrame( this, FrameKind.OFFSET_FOLLOWING, literal( offset ) );
}
@Override
public SqmWindowFrame frameBetweenFollowing(Expression<?> offset) {
return new SqmWindowFrame( this, FrameKind.OFFSET_FOLLOWING, (SqmExpression<?>) offset );
}
@Override
public SqmWindowFrame frameUnboundedFollowing() {
return new SqmWindowFrame( this, FrameKind.UNBOUNDED_FOLLOWING );
}
@Override
public <T> SqmExpression<T> windowFunction(String name, Class<T> type, JpaWindow window, Expression<?>... args) {
SqmExpression<T> function = getFunctionDescriptor( name ).generateSqmExpression(
expressionList( args ),
null,
queryEngine
);
return new SqmOver<>( function, (SqmWindow) window );
}
@Override
public SqmExpression<Long> rowNumber(JpaWindow window) {
return windowFunction( "row_number", Long.class, window );
}
@Override
@SuppressWarnings("unchecked")
public <T> SqmExpression<T> firstValue(Expression<T> argument, JpaWindow window) {
return (SqmExpression<T>) windowFunction( "first_value", argument.getJavaType(), window, argument );
}
@Override
@SuppressWarnings("unchecked")
public <T> SqmExpression<T> lastValue(Expression<T> argument, JpaWindow window) {
return (SqmExpression<T>) windowFunction( "last_value", argument.getJavaType(), window, argument );
}
@Override
public <T> SqmExpression<T> nthValue(Expression<T> argument, int n, JpaWindow window) {
return nthValue( argument, literal( n ), window );
}
@Override
@SuppressWarnings("unchecked")
public <T> SqmExpression<T> nthValue(Expression<T> argument, Expression<Integer> n, JpaWindow window) {
return (SqmExpression<T>) windowFunction( "nth_value", argument.getJavaType(), window, argument, n );
}
@Override
public SqmExpression<Long> rank(JpaWindow window) {
return windowFunction( "rank", Long.class, window );
}
@Override
public SqmExpression<Long> denseRank(JpaWindow window) {
return windowFunction( "dense_rank", Long.class, window );
}
@Override
public SqmExpression<Double> percentRank(JpaWindow window) {
return windowFunction( "percent_rank", Double.class, window );
}
@Override
public SqmExpression<Double> cumeDist(JpaWindow window) {
return windowFunction( "cume_dist", Double.class, window );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Aggregate functions
@Override
public <T> SqmExpression<T> functionAggregate(
String name,
Class<T> type,
JpaPredicate filter,
Expression<?>... args) {
return functionAggregate( name, type, filter, null, args );
}
@Override
public <T> SqmExpression<T> functionAggregate(
String name,
Class<T> type,
JpaWindow window,
Expression<?>... args) {
return functionAggregate( name, type, null, window, args );
}
@Override
public <T> SqmExpression<T> functionAggregate(
String name,
Class<T> type,
JpaPredicate filter,
JpaWindow window,
Expression<?>... args) {
SqmPredicate sqmFilter = filter != null ? (SqmPredicate) filter : null;
SqmExpression<T> function = getFunctionDescriptor( name ).generateAggregateSqmExpression(
expressionList( args ),
sqmFilter,
null,
queryEngine
);
if ( window == null ) {
return function;
}
else {
return new SqmOver<>( function, (SqmWindow) window );
}
}
@Override
public <N extends Number> SqmExpression<Number> sum(Expression<N> argument, JpaPredicate filter) {
return sum( argument, filter, null );
}
@Override
public <N extends Number> SqmExpression<Number> sum(Expression<N> argument, JpaWindow window) {
return sum( argument, null, window );
}
@Override
public <N extends Number> SqmExpression<Number> sum(Expression<N> argument, JpaPredicate filter, JpaWindow window) {
return functionAggregate( "sum", Number.class, filter, window, argument );
}
@Override
public <N extends Number> SqmExpression<Double> avg(Expression<N> argument, JpaPredicate filter) {
return avg( argument, filter, null );
}
@Override
public <N extends Number> SqmExpression<Double> avg(Expression<N> argument, JpaWindow window) {
return avg( argument, null, window );
}
@Override
public <N extends Number> SqmExpression<Double> avg(Expression<N> argument, JpaPredicate filter, JpaWindow window) {
return functionAggregate( "avg", Double.class, filter, window, argument );
}
@Override
public SqmExpression<Long> count(Expression<?> argument, JpaPredicate filter) {
return count( argument, filter, null );
}
@Override
public SqmExpression<Long> count(Expression<?> argument, JpaWindow window) {
return count( argument, null, window );
}
@Override
public SqmExpression<Long> count(Expression<?> argument, JpaPredicate filter, JpaWindow window) {
return functionAggregate( "count", Long.class, filter, window, argument );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Ordered-Set Aggregate functions
@Override
public <T> SqmExpression<T> functionWithinGroup(String name, Class<T> type, JpaOrder order, Expression<?>... args) {
return functionWithinGroup( name, type, order, null, null, args );
}
@Override
public <T> SqmExpression<T> functionWithinGroup(
String name,
Class<T> type,
JpaOrder order,
JpaPredicate filter,
Expression<?>... args) {
return functionWithinGroup( name, type, order, filter, null, args );
}
@Override
public <T> SqmExpression<T> functionWithinGroup(
String name,
Class<T> type,
JpaOrder order,
JpaWindow window,
Expression<?>... args) {
return functionWithinGroup( name, type, order, null, window, args );
}
@Override
public <T> SqmExpression<T> functionWithinGroup(
String name,
Class<T> type,
@Nullable JpaOrder order,
@Nullable JpaPredicate filter,
@Nullable JpaWindow window,
Expression<?>... args) {
SqmOrderByClause withinGroupClause = new SqmOrderByClause();
if ( order != null ) {
withinGroupClause.addSortSpecification( (SqmSortSpecification) order );
}
SqmPredicate sqmFilter = filter != null ? (SqmPredicate) filter : null;
SqmExpression<T> function = getFunctionDescriptor( name ).generateOrderedSetAggregateSqmExpression(
expressionList( args ),
sqmFilter,
withinGroupClause,
null,
queryEngine
);
if ( window == null ) {
return function;
}
else {
return new SqmOver<>( function, (SqmWindow) window );
}
}
@Override
public SqmExpression<String> listagg(JpaOrder order, Expression<String> argument, String separator) {
return listagg( order, null, null, argument, separator );
}
@Override
public SqmExpression<String> listagg(JpaOrder order, Expression<String> argument, Expression<String> separator) {
return listagg( order, null, null, argument, separator );
}
@Override
public SqmExpression<String> listagg(
JpaOrder order,
JpaPredicate filter,
Expression<String> argument,
String separator) {
return listagg( order, filter, null, argument, separator );
}
@Override
public SqmExpression<String> listagg(
JpaOrder order,
JpaPredicate filter,
Expression<String> argument,
Expression<String> separator) {
return listagg( order, filter, null, argument, separator );
}
@Override
public SqmExpression<String> listagg(
JpaOrder order,
JpaWindow window,
Expression<String> argument,
String separator) {
return listagg( order, null, window, argument, separator );
}
@Override
public SqmExpression<String> listagg(
JpaOrder order,
JpaWindow window,
Expression<String> argument,
Expression<String> separator) {
return listagg( order, null, window, argument, separator );
}
@Override
public SqmExpression<String> listagg(
JpaOrder order,
@Nullable JpaPredicate filter,
@Nullable JpaWindow window,
Expression<String> argument,
String separator) {
return listagg( order, filter, window, argument, literal( separator ) );
}
@Override
public SqmExpression<String> listagg(
JpaOrder order,
@Nullable JpaPredicate filter,
@Nullable JpaWindow window,
Expression<String> argument,
Expression<String> separator) {
return functionWithinGroup( "listagg", String.class, order, filter, window, argument, separator );
}
@Override
public <T> SqmExpression<T> mode(Expression<T> sortExpression, SortDirection sortOrder, Nulls nullPrecedence) {
return mode( null, null, sortExpression, sortOrder, nullPrecedence );
}
@Override
public <T> SqmExpression<T> mode(
JpaPredicate filter,
Expression<T> sortExpression,
SortDirection sortOrder,
Nulls nullPrecedence) {
return mode( filter, null, sortExpression, sortOrder, nullPrecedence );
}
@Override
public <T> SqmExpression<T> mode(
JpaWindow window,
Expression<T> sortExpression,
SortDirection sortOrder,
Nulls nullPrecedence) {
return mode( null, window, sortExpression, sortOrder, nullPrecedence );
}
@Override
@SuppressWarnings("unchecked")
public <T> SqmExpression<T> mode(
@Nullable JpaPredicate filter,
@Nullable JpaWindow window,
Expression<T> sortExpression,
SortDirection sortOrder,
Nulls nullPrecedence) {
return (SqmExpression<T>) functionWithinGroup(
"mode",
sortExpression.getJavaType(),
sort( (SqmExpression<T>) sortExpression, sortOrder, nullPrecedence ),
filter,
window
);
}
@Override
public <T> SqmExpression<T> percentileCont(
Expression<? extends Number> argument,
Expression<T> sortExpression,
SortDirection sortOrder,
Nulls nullPrecedence) {
return percentileCont( argument, null, null, sortExpression, sortOrder, nullPrecedence );
}
@Override
public <T> SqmExpression<T> percentileCont(
Expression<? extends Number> argument,
JpaPredicate filter,
Expression<T> sortExpression,
SortDirection sortOrder,
Nulls nullPrecedence) {
return percentileCont( argument, filter, null, sortExpression, sortOrder, nullPrecedence );
}
@Override
public <T> SqmExpression<T> percentileCont(
Expression<? extends Number> argument,
JpaWindow window,
Expression<T> sortExpression,
SortDirection sortOrder,
Nulls nullPrecedence) {
return percentileCont( argument, null, window, sortExpression, sortOrder, nullPrecedence );
}
@Override
@SuppressWarnings("unchecked")
public <T> SqmExpression<T> percentileCont(
Expression<? extends Number> argument,
@Nullable JpaPredicate filter,
@Nullable JpaWindow window,
Expression<T> sortExpression,
SortDirection sortOrder,
Nulls nullPrecedence) {
return (SqmExpression<T>) functionWithinGroup(
"percentile_cont",
sortExpression.getJavaType(),
sort( (SqmExpression<T>) sortExpression, sortOrder, nullPrecedence ),
filter,
window,
argument
);
}
@Override
public <T> SqmExpression<T> percentileDisc(
Expression<? extends Number> argument,
Expression<T> sortExpression,
SortDirection sortOrder,
Nulls nullPrecedence) {
return percentileDisc( argument, null, null, sortExpression, sortOrder, nullPrecedence );
}
@Override
public <T> SqmExpression<T> percentileDisc(
Expression<? extends Number> argument,
JpaPredicate filter,
Expression<T> sortExpression,
SortDirection sortOrder,
Nulls nullPrecedence) {
return percentileDisc( argument, filter, null, sortExpression, sortOrder, nullPrecedence );
}
@Override
public <T> SqmExpression<T> percentileDisc(
Expression<? extends Number> argument,
JpaWindow window,
Expression<T> sortExpression,
SortDirection sortOrder,
Nulls nullPrecedence) {
return percentileDisc( argument, null, window, sortExpression, sortOrder, nullPrecedence );
}
@Override
@SuppressWarnings("unchecked")
public <T> SqmExpression<T> percentileDisc(
Expression<? extends Number> argument,
@Nullable JpaPredicate filter,
@Nullable JpaWindow window,
Expression<T> sortExpression,
SortDirection sortOrder,
Nulls nullPrecedence) {
return (SqmExpression<T>) functionWithinGroup(
"percentile_disc",
sortExpression.getJavaType(),
sort( (SqmExpression<T>) sortExpression, sortOrder, nullPrecedence ),
filter,
window,
argument
);
}
@Override
public SqmExpression<Long> rank(JpaOrder order, Expression<?>... arguments) {
return functionWithinGroup( "rank", Long.class, order, null, null, arguments );
}
@Override
public SqmExpression<Long> rank(JpaOrder order, JpaPredicate filter, Expression<?>... arguments) {
return functionWithinGroup( "rank", Long.class, order, filter, null, arguments );
}
@Override
public SqmExpression<Long> rank(JpaOrder order, JpaWindow window, Expression<?>... arguments) {
return functionWithinGroup( "rank", Long.class, order, null, window, arguments );
}
@Override
public SqmExpression<Long> rank(JpaOrder order, JpaPredicate filter, JpaWindow window, Expression<?>... arguments) {
return functionWithinGroup( "rank", Long.class, order, filter, window, arguments );
}
@Override
public SqmExpression<Double> percentRank(JpaOrder order, Expression<?>... arguments) {
return percentRank( order, null, null, arguments );
}
@Override
public SqmExpression<Double> percentRank(JpaOrder order, JpaPredicate filter, Expression<?>... arguments) {
return percentRank( order, filter, null, arguments );
}
@Override
public SqmExpression<Double> percentRank(JpaOrder order, JpaWindow window, Expression<?>... arguments) {
return percentRank( order, null, window, arguments );
}
@Override
public SqmExpression<Double> percentRank(
JpaOrder order,
@Nullable JpaPredicate filter,
@Nullable JpaWindow window,
Expression<?>... arguments) {
return functionWithinGroup( "percent_rank", Double.class, order, filter, window, arguments );
}
@Override
public <T> SqmExpression<T[]> arrayAgg(JpaOrder order, Expression<? extends T> argument) {
return arrayAgg( order, null, null, argument );
}
@Override
public <T> SqmExpression<T[]> arrayAgg(JpaOrder order, JpaPredicate filter, Expression<? extends T> argument) {
return arrayAgg( order, filter, null, argument );
}
@Override
public <T> SqmExpression<T[]> arrayAgg(JpaOrder order, JpaWindow window, Expression<? extends T> argument) {
return arrayAgg( order, null, window, argument );
}
@Override
public <T> SqmExpression<T[]> arrayAgg(
JpaOrder order,
@Nullable JpaPredicate filter,
@Nullable JpaWindow window,
Expression<? extends T> argument) {
return functionWithinGroup( "array_agg", null, order, filter, window, argument );
}
@Override
public <T> SqmExpression<T[]> arrayLiteral(T... elements) {
return getFunctionDescriptor( "array" ).generateSqmExpression(
literals( elements ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<Integer> arrayPosition(Expression<T[]> arrayExpression, T element) {
return getFunctionDescriptor( "array_position" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( element ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<Integer> arrayPosition(
Expression<T[]> arrayExpression,
Expression<T> elementExpression) {
return getFunctionDescriptor( "array_position" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<int[]> arrayPositions(
Expression<T[]> arrayExpression,
Expression<T> elementExpression) {
return getFunctionDescriptor( "array_positions" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<int[]> arrayPositions(Expression<T[]> arrayExpression, T element) {
return getFunctionDescriptor( "array_positions" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( element ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<List<Integer>> arrayPositionsList(
Expression<T[]> arrayExpression,
Expression<T> elementExpression) {
return getFunctionDescriptor( "array_positions_list" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<List<Integer>> arrayPositionsList(Expression<T[]> arrayExpression, T element) {
return getFunctionDescriptor( "array_positions_list" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( element ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<Integer> arrayLength(Expression<T[]> arrayExpression) {
return getFunctionDescriptor( "array_length" ).generateSqmExpression(
Collections.singletonList( (SqmExpression<?>) arrayExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayConcat(
Expression<T[]> arrayExpression1,
Expression<T[]> arrayExpression2) {
return getFunctionDescriptor( "array_concat" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression1, (SqmExpression<?>) arrayExpression2 ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayConcat(Expression<T[]> arrayExpression1, T[] array2) {
return getFunctionDescriptor( "array_concat" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression1, value( array2, (SqmExpression<?>) arrayExpression1 ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayConcat(T[] array1, Expression<T[]> arrayExpression2) {
return getFunctionDescriptor( "array_concat" ).generateSqmExpression(
asList( value( array1, (SqmExpression<?>) arrayExpression2 ), (SqmExpression<?>) arrayExpression2 ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayAppend(Expression<T[]> arrayExpression, Expression<T> elementExpression) {
return getFunctionDescriptor( "array_append" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayAppend(Expression<T[]> arrayExpression, T element) {
return getFunctionDescriptor( "array_append" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( element ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayPrepend(Expression<T> elementExpression, Expression<T[]> arrayExpression) {
return getFunctionDescriptor( "array_prepend" ).generateSqmExpression(
asList( (SqmExpression<?>) elementExpression, (SqmExpression<?>) arrayExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayPrepend(T element, Expression<T[]> arrayExpression) {
return getFunctionDescriptor( "array_prepend" ).generateSqmExpression(
asList( value( element ), (SqmExpression<?>) arrayExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmPredicate arrayContains(Expression<T[]> arrayExpression, Expression<T> elementExpression) {
return isTrue( getFunctionDescriptor( "array_contains" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayContains(Expression<T[]> arrayExpression, T element) {
return isTrue( getFunctionDescriptor( "array_contains" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( element ) ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayContains(T[] array, Expression<T> elementExpression) {
return isTrue( getFunctionDescriptor( "array_contains" ).generateSqmExpression(
asList( value( array ), (SqmExpression<?>) elementExpression ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayContainsNullable(
Expression<T[]> arrayExpression,
Expression<T> elementExpression) {
return isTrue( getFunctionDescriptor( "array_contains_nullable" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayContainsNullable(Expression<T[]> arrayExpression, T element) {
return isTrue( getFunctionDescriptor( "array_contains_nullable" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( element ) ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayContainsNullable(T[] array, Expression<T> elementExpression) {
return isTrue( getFunctionDescriptor( "array_contains_nullable" ).generateSqmExpression(
asList( value( array ), (SqmExpression<?>) elementExpression ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayIncludes(
Expression<T[]> arrayExpression,
Expression<T[]> subArrayExpression) {
return isTrue( getFunctionDescriptor( "array_includes" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) subArrayExpression ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayIncludes(Expression<T[]> arrayExpression, T[] subArray) {
return isTrue( getFunctionDescriptor( "array_includes" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( subArray, (SqmExpression<?>) arrayExpression ) ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayIncludes(T[] array, Expression<T[]> subArrayExpression) {
return isTrue( getFunctionDescriptor( "array_includes" ).generateSqmExpression(
asList( value( array, (SqmExpression<?>) subArrayExpression ), (SqmExpression<?>) subArrayExpression ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayIncludesNullable(
Expression<T[]> arrayExpression,
Expression<T[]> subArrayExpression) {
return isTrue( getFunctionDescriptor( "array_includes_nullable" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) subArrayExpression ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayIncludesNullable(Expression<T[]> arrayExpression, T[] subArray) {
return isTrue( getFunctionDescriptor( "array_includes_nullable" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( subArray, (SqmExpression<?>) arrayExpression ) ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayIncludesNullable(T[] array, Expression<T[]> subArrayExpression) {
return isTrue( getFunctionDescriptor( "array_includes_nullable" ).generateSqmExpression(
asList( value( array, (SqmExpression<?>) subArrayExpression ), (SqmExpression<?>) subArrayExpression ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayIntersects(Expression<T[]> arrayExpression1, Expression<T[]> arrayExpression2) {
return isTrue( getFunctionDescriptor( "array_intersects" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression1, (SqmExpression<?>) arrayExpression2 ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayIntersects(Expression<T[]> arrayExpression1, T[] array2) {
return isTrue( getFunctionDescriptor( "array_intersects" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression1, value( array2, (SqmExpression<?>) arrayExpression1 ) ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayIntersects(T[] array1, Expression<T[]> arrayExpression2) {
return isTrue( getFunctionDescriptor( "array_intersects" ).generateSqmExpression(
asList( value( array1, (SqmExpression<?>) arrayExpression2 ), (SqmExpression<?>) arrayExpression2 ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayIntersectsNullable(
Expression<T[]> arrayExpression1,
Expression<T[]> arrayExpression2) {
return isTrue( getFunctionDescriptor( "array_intersects_nullable" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression1, (SqmExpression<?>) arrayExpression2 ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayIntersectsNullable(Expression<T[]> arrayExpression1, T[] array2) {
return isTrue( getFunctionDescriptor( "array_intersects_nullable" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression1, value( array2, (SqmExpression<?>) arrayExpression1 ) ),
null,
queryEngine
) );
}
@Override
public <T> SqmPredicate arrayIntersectsNullable(T[] array1, Expression<T[]> arrayExpression2) {
return isTrue( getFunctionDescriptor( "array_intersects_nullable" ).generateSqmExpression(
asList( value( array1, (SqmExpression<?>) arrayExpression2 ), (SqmExpression<?>) arrayExpression2 ),
null,
queryEngine
) );
}
@Override
public <T> SqmExpression<T> arrayGet(Expression<T[]> arrayExpression, Expression<Integer> indexExpression) {
return getFunctionDescriptor( "array_get" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) indexExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T> arrayGet(Expression<T[]> arrayExpression, Integer index) {
return getFunctionDescriptor( "array_get" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( index ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arraySet(
Expression<T[]> arrayExpression,
Expression<Integer> indexExpression,
Expression<T> elementExpression) {
return getFunctionDescriptor( "array_set" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) indexExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arraySet(
Expression<T[]> arrayExpression,
Expression<Integer> indexExpression,
T element) {
return getFunctionDescriptor( "array_set" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) indexExpression, value( element ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arraySet(
Expression<T[]> arrayExpression,
Integer index,
Expression<T> elementExpression) {
return getFunctionDescriptor( "array_set" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( index ), (SqmExpression<?>) elementExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arraySet(Expression<T[]> arrayExpression, Integer index, T element) {
return getFunctionDescriptor( "array_set" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( index ), value( element ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayRemove(Expression<T[]> arrayExpression, Expression<T> elementExpression) {
return getFunctionDescriptor( "array_remove" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayRemove(Expression<T[]> arrayExpression, T element) {
return getFunctionDescriptor( "array_remove" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( element ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayRemoveIndex(
Expression<T[]> arrayExpression,
Expression<Integer> indexExpression) {
return getFunctionDescriptor( "array_remove_index" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) indexExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayRemoveIndex(Expression<T[]> arrayExpression, Integer index) {
return getFunctionDescriptor( "array_remove_index" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( index ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arraySlice(
Expression<T[]> arrayExpression,
Expression<Integer> lowerIndexExpression,
Expression<Integer> upperIndexExpression) {
return getFunctionDescriptor( "array_slice" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) lowerIndexExpression, (SqmExpression<?>) upperIndexExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arraySlice(
Expression<T[]> arrayExpression,
Expression<Integer> lowerIndexExpression,
Integer upperIndex) {
return getFunctionDescriptor( "array_slice" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) lowerIndexExpression, value( upperIndex ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arraySlice(
Expression<T[]> arrayExpression,
Integer lowerIndex,
Expression<Integer> upperIndexExpression) {
return getFunctionDescriptor( "array_slice" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( lowerIndex ), (SqmExpression<?>) upperIndexExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arraySlice(
Expression<T[]> arrayExpression,
Integer lowerIndex,
Integer upperIndex) {
return getFunctionDescriptor( "array_slice" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( lowerIndex ), value( upperIndex ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayReplace(
Expression<T[]> arrayExpression,
Expression<T> oldElementExpression,
Expression<T> newElementExpression) {
return getFunctionDescriptor( "array_replace" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) oldElementExpression, (SqmExpression<?>) newElementExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayReplace(
Expression<T[]> arrayExpression,
Expression<T> oldElementExpression,
T newElement) {
return getFunctionDescriptor( "array_replace" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) oldElementExpression, value( newElement ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayReplace(
Expression<T[]> arrayExpression,
T oldElement,
Expression<T> newElementExpression) {
return getFunctionDescriptor( "array_replace" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( oldElement ), (SqmExpression<?>) newElementExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayReplace(Expression<T[]> arrayExpression, T oldElement, T newElement) {
return getFunctionDescriptor( "array_replace" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( oldElement ), value( newElement ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayTrim(
Expression<T[]> arrayExpression,
Expression<Integer> elementCountExpression) {
return getFunctionDescriptor( "array_trim" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) elementCountExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayTrim(Expression<T[]> arrayExpression, Integer elementCount) {
return getFunctionDescriptor( "array_trim" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( elementCount ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayFill(
Expression<T> elementExpression,
Expression<Integer> elementCountExpression) {
return getFunctionDescriptor( "array_fill" ).generateSqmExpression(
asList( (SqmExpression<?>) elementExpression, (SqmExpression<?>) elementCountExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayFill(Expression<T> elementExpression, Integer elementCount) {
return getFunctionDescriptor( "array_fill" ).generateSqmExpression(
asList( (SqmExpression<?>) elementExpression, value( elementCount ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayFill(T element, Expression<Integer> elementCountExpression) {
return getFunctionDescriptor( "array_fill" ).generateSqmExpression(
asList( value( element ), (SqmExpression<?>) elementCountExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T[]> arrayFill(T element, Integer elementCount) {
return getFunctionDescriptor( "array_fill" ).generateSqmExpression(
asList( value( element ), value( elementCount ) ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> arrayToString(
Expression<? extends Object[]> arrayExpression,
Expression<String> separatorExpression) {
return getFunctionDescriptor( "array_to_string" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) separatorExpression ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> arrayToString(
Expression<? extends Object[]> arrayExpression,
String separator) {
return getFunctionDescriptor( "array_to_string" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, value( separator ) ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> arrayToString(Expression<? extends Object[]> arrayExpression, Expression<String> separatorExpression, Expression<String> defaultExpression) {
return getFunctionDescriptor( "array_to_string" ).generateSqmExpression(
asList( (SqmExpression<?>) arrayExpression, (SqmExpression<?>) separatorExpression, (SqmExpression<?>) defaultExpression ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> arrayToString(Expression<? extends Object[]> arrayExpression, Expression<String> separatorExpression, String defaultValue) {
return arrayToString( arrayExpression, separatorExpression, value( defaultValue ) );
}
@Override
public SqmExpression<String> arrayToString(Expression<? extends Object[]> arrayExpression, String separator, Expression<String> defaultExpression) {
return arrayToString( arrayExpression, value( separator ), defaultExpression );
}
@Override
public SqmExpression<String> arrayToString(Expression<? extends Object[]> arrayExpression, String separator, String defaultValue) {
return arrayToString( arrayExpression, value( separator ), value( defaultValue ) );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Array functions for collection types
@Override
public <E, C extends Collection<E>> SqmExpression<C> collectionLiteral(E... elements) {
return getFunctionDescriptor( "array_list" ).generateSqmExpression(
literals( elements ),
null,
queryEngine
);
}
@Override
public <E> SqmExpression<Integer> collectionPosition(
Expression<? extends Collection<? extends E>> collectionExpression,
E element) {
return getFunctionDescriptor( "array_position" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( element ) ),
null,
queryEngine
);
}
@Override
public <E> SqmExpression<Integer> collectionPosition(
Expression<? extends Collection<? extends E>> collectionExpression,
Expression<E> elementExpression) {
return getFunctionDescriptor( "array_position" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<int[]> collectionPositions(
Expression<? extends Collection<? super T>> collectionExpression,
Expression<T> elementExpression) {
return getFunctionDescriptor( "array_positions" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<int[]> collectionPositions(
Expression<? extends Collection<? super T>> collectionExpression,
T element) {
return getFunctionDescriptor( "array_positions" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( element ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<List<Integer>> collectionPositionsList(
Expression<? extends Collection<? super T>> collectionExpression,
Expression<T> elementExpression) {
return getFunctionDescriptor( "array_positions_list" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<List<Integer>> collectionPositionsList(
Expression<? extends Collection<? super T>> collectionExpression,
T element) {
return getFunctionDescriptor( "array_positions_list" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( element ) ),
null,
queryEngine
);
}
@Override
public SqmExpression<Integer> collectionLength(Expression<? extends Collection<?>> collectionExpression) {
return getFunctionDescriptor( "array_length" ).generateSqmExpression(
Collections.singletonList( (SqmExpression<?>) collectionExpression ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionConcat(
Expression<C> collectionExpression1,
Expression<? extends Collection<? extends E>> collectionExpression2) {
return getFunctionDescriptor( "array_concat" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression1, (SqmExpression<?>) collectionExpression2 ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionConcat(
Expression<C> collectionExpression1,
Collection<? extends E> collection2) {
return getFunctionDescriptor( "array_concat" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression1, value( collection2, (SqmExpression<?>) collectionExpression1 ) ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionConcat(
C collection1,
Expression<? extends Collection<? extends E>> collectionExpression2) {
return getFunctionDescriptor( "array_concat" ).generateSqmExpression(
asList( value( collection1, (SqmExpression<?>) collectionExpression2 ), (SqmExpression<?>) collectionExpression2 ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionAppend(
Expression<C> collectionExpression,
Expression<? extends E> elementExpression) {
return getFunctionDescriptor( "array_append" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionAppend(
Expression<C> collectionExpression,
E element) {
return getFunctionDescriptor( "array_append" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( element ) ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionPrepend(
Expression<? extends E> elementExpression,
Expression<C> collectionExpression) {
return getFunctionDescriptor( "array_prepend" ).generateSqmExpression(
asList( (SqmExpression<?>) elementExpression, (SqmExpression<?>) collectionExpression ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionPrepend(
E element,
Expression<C> collectionExpression) {
return getFunctionDescriptor( "array_prepend" ).generateSqmExpression(
asList( value( element ), (SqmExpression<?>) collectionExpression ),
null,
queryEngine
);
}
@Override
public <E> SqmPredicate collectionContains(
Expression<? extends Collection<E>> collectionExpression,
Expression<? extends E> elementExpression) {
return isTrue( getFunctionDescriptor( "array_contains" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionContains(
Expression<? extends Collection<E>> collectionExpression,
E element) {
return isTrue( getFunctionDescriptor( "array_contains" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( element ) ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionContains(
Collection<E> collection,
Expression<E> elementExpression) {
return isTrue( getFunctionDescriptor( "array_contains" ).generateSqmExpression(
asList( collectionValue( collection, (SqmExpression<E>) elementExpression ), (SqmExpression<?>) elementExpression ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionContainsNullable(
Expression<? extends Collection<E>> collectionExpression,
Expression<? extends E> elementExpression) {
return isTrue( getFunctionDescriptor( "array_contains_nullable" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionContainsNullable(
Expression<? extends Collection<E>> collectionExpression,
E element) {
return isTrue( getFunctionDescriptor( "array_contains_nullable" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( element ) ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionContainsNullable(
Collection<E> collection,
Expression<E> elementExpression) {
return isTrue( getFunctionDescriptor( "array_contains_nullable" ).generateSqmExpression(
asList( collectionValue( collection, (SqmExpression<E>) elementExpression ), (SqmExpression<?>) elementExpression ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionIncludes(
Expression<? extends Collection<E>> collectionExpression,
Expression<? extends Collection<? extends E>> subCollectionExpression) {
return isTrue( getFunctionDescriptor( "array_includes" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) subCollectionExpression ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionIncludes(
Expression<? extends Collection<E>> collectionExpression,
Collection<? extends E> subCollection) {
return isTrue( getFunctionDescriptor( "array_includes" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( subCollection, (SqmExpression<?>) collectionExpression ) ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionIncludes(
Collection<E> collection,
Expression<? extends Collection<? extends E>> subCollectionExpression) {
return isTrue( getFunctionDescriptor( "array_includes" ).generateSqmExpression(
asList( value( collection, (SqmExpression<?>) subCollectionExpression ), (SqmExpression<?>) subCollectionExpression ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionIncludesNullable(
Expression<? extends Collection<E>> collectionExpression,
Expression<? extends Collection<? extends E>> subCollectionExpression) {
return isTrue( getFunctionDescriptor( "array_includes_nullable" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) subCollectionExpression ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionIncludesNullable(
Expression<? extends Collection<E>> collectionExpression,
Collection<? extends E> subCollection) {
return isTrue( getFunctionDescriptor( "array_includes_nullable" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( subCollection, (SqmExpression<?>) collectionExpression ) ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionIncludesNullable(
Collection<E> collection,
Expression<? extends Collection<? extends E>> subCollectionExpression) {
return isTrue( getFunctionDescriptor( "array_includes_nullable" ).generateSqmExpression(
asList( value( collection, (SqmExpression<?>) subCollectionExpression ), (SqmExpression<?>) subCollectionExpression ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionIntersects(
Expression<? extends Collection<E>> collectionExpression1,
Expression<? extends Collection<? extends E>> collectionExpression2) {
return isTrue( getFunctionDescriptor( "array_intersects" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression1, (SqmExpression<?>) collectionExpression2 ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionIntersects(
Expression<? extends Collection<E>> collectionExpression1,
Collection<? extends E> collection2) {
return isTrue( getFunctionDescriptor( "array_intersects" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression1, value( collection2, (SqmExpression<?>) collectionExpression1 ) ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionIntersects(
Collection<E> collection1,
Expression<? extends Collection<? extends E>> collectionExpression2) {
return isTrue( getFunctionDescriptor( "array_intersects" ).generateSqmExpression(
asList( value( collection1, (SqmExpression<?>) collectionExpression2 ), (SqmExpression<?>) collectionExpression2 ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionIntersectsNullable(
Expression<? extends Collection<E>> collectionExpression1,
Expression<? extends Collection<? extends E>> collectionExpression2) {
return isTrue( getFunctionDescriptor( "array_intersects_nullable" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression1, (SqmExpression<?>) collectionExpression2 ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionIntersectsNullable(
Expression<? extends Collection<E>> collectionExpression1,
Collection<? extends E> collection2) {
return isTrue( getFunctionDescriptor( "array_intersects_nullable" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression1, value( collection2, (SqmExpression<?>) collectionExpression1 ) ),
null,
queryEngine
) );
}
@Override
public <E> SqmPredicate collectionIntersectsNullable(
Collection<E> collection1,
Expression<? extends Collection<? extends E>> collectionExpression2) {
return isTrue( getFunctionDescriptor( "array_intersects_nullable" ).generateSqmExpression(
asList( value( collection1, (SqmExpression<?>) collectionExpression2 ), (SqmExpression<?>) collectionExpression2 ),
null,
queryEngine
) );
}
@Override
public <E> SqmExpression<E> collectionGet(
Expression<? extends Collection<E>> collectionExpression,
Expression<Integer> indexExpression) {
return getFunctionDescriptor( "array_get" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) indexExpression ),
null,
queryEngine
);
}
@Override
public <E> SqmExpression<E> collectionGet(Expression<? extends Collection<E>> collectionExpression, Integer index) {
return getFunctionDescriptor( "array_get" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( index ) ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionSet(
Expression<C> collectionExpression,
Expression<Integer> indexExpression,
Expression<? extends E> elementExpression) {
return getFunctionDescriptor( "array_set" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) indexExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionSet(
Expression<C> collectionExpression,
Expression<Integer> indexExpression,
E element) {
return getFunctionDescriptor( "array_set" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) indexExpression, value( element ) ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionSet(
Expression<C> collectionExpression,
Integer index,
Expression<? extends E> elementExpression) {
return getFunctionDescriptor( "array_set" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( index ), (SqmExpression<?>) elementExpression ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionSet(
Expression<C> collectionExpression,
Integer index,
E element) {
return getFunctionDescriptor( "array_set" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( index ), value( element ) ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionRemove(
Expression<C> collectionExpression,
Expression<? extends E> elementExpression) {
return getFunctionDescriptor( "array_remove" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) elementExpression ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionRemove(
Expression<C> collectionExpression,
E element) {
return getFunctionDescriptor( "array_remove" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( element ) ),
null,
queryEngine
);
}
@Override
public <C extends Collection<?>> SqmExpression<C> collectionRemoveIndex(
Expression<C> collectionExpression,
Expression<Integer> indexExpression) {
return getFunctionDescriptor( "array_remove_index" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) indexExpression ),
null,
queryEngine
);
}
@Override
public <C extends Collection<?>> SqmExpression<C> collectionRemoveIndex(
Expression<C> collectionExpression,
Integer index) {
return getFunctionDescriptor( "array_remove_index" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( index ) ),
null,
queryEngine
);
}
@Override
public <C extends Collection<?>> SqmExpression<C> collectionSlice(
Expression<C> collectionExpression,
Expression<Integer> lowerIndexExpression,
Expression<Integer> upperIndexExpression) {
return getFunctionDescriptor( "array_slice" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) lowerIndexExpression, (SqmExpression<?>) upperIndexExpression ),
null,
queryEngine
);
}
@Override
public <C extends Collection<?>> SqmExpression<C> collectionSlice(
Expression<C> collectionExpression,
Expression<Integer> lowerIndexExpression,
Integer upperIndex) {
return getFunctionDescriptor( "array_slice" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) lowerIndexExpression, value( upperIndex ) ),
null,
queryEngine
);
}
@Override
public <C extends Collection<?>> SqmExpression<C> collectionSlice(
Expression<C> collectionExpression,
Integer lowerIndex,
Expression<Integer> upperIndexExpression) {
return getFunctionDescriptor( "array_slice" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( lowerIndex ), (SqmExpression<?>) upperIndexExpression ),
null,
queryEngine
);
}
@Override
public <C extends Collection<?>> SqmExpression<C> collectionSlice(
Expression<C> collectionExpression,
Integer lowerIndex,
Integer upperIndex) {
return getFunctionDescriptor( "array_slice" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( lowerIndex ), value( upperIndex ) ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionReplace(
Expression<C> collectionExpression,
Expression<? extends E> oldElementExpression,
Expression<? extends E> newElementExpression) {
return getFunctionDescriptor( "array_replace" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) oldElementExpression, (SqmExpression<?>) newElementExpression ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionReplace(
Expression<C> collectionExpression,
Expression<? extends E> oldElementExpression,
E newElement) {
return getFunctionDescriptor( "array_replace" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) oldElementExpression, value( newElement ) ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionReplace(
Expression<C> collectionExpression,
E oldElement,
Expression<? extends E> newElementExpression) {
return getFunctionDescriptor( "array_replace" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( oldElement ), (SqmExpression<?>) newElementExpression ),
null,
queryEngine
);
}
@Override
public <E, C extends Collection<? super E>> SqmExpression<C> collectionReplace(
Expression<C> collectionExpression,
E oldElement,
E newElement) {
return getFunctionDescriptor( "array_replace" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( oldElement ), value( newElement ) ),
null,
queryEngine
);
}
@Override
public <C extends Collection<?>> SqmExpression<C> collectionTrim(
Expression<C> collectionExpression,
Expression<Integer> indexExpression) {
return getFunctionDescriptor( "array_trim" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) indexExpression ),
null,
queryEngine
);
}
@Override
public <C extends Collection<?>> SqmExpression<C> collectionTrim(
Expression<C> collectionExpression,
Integer index) {
return getFunctionDescriptor( "array_trim" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( index ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<Collection<T>> collectionFill(
Expression<T> elementExpression,
Expression<Integer> elementCountExpression) {
return getFunctionDescriptor( "array_fill_list" ).generateSqmExpression(
asList( (SqmExpression<?>) elementExpression, (SqmExpression<?>) elementCountExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<Collection<T>> collectionFill(Expression<T> elementExpression, Integer elementCount) {
return getFunctionDescriptor( "array_fill_list" ).generateSqmExpression(
asList( (SqmExpression<?>) elementExpression, value( elementCount ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<Collection<T>> collectionFill(T element, Expression<Integer> elementCountExpression) {
return getFunctionDescriptor( "array_fill_list" ).generateSqmExpression(
asList( value( element ), (SqmExpression<?>) elementCountExpression ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<Collection<T>> collectionFill(T element, Integer elementCount) {
return getFunctionDescriptor( "array_fill_list" ).generateSqmExpression(
asList( value( element ), value( elementCount ) ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> collectionToString(
Expression<? extends Collection<?>> collectionExpression,
Expression<String> separatorExpression) {
return getFunctionDescriptor( "array_to_string" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) separatorExpression ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> collectionToString(
Expression<? extends Collection<?>> collectionExpression,
String separator) {
return getFunctionDescriptor( "array_to_string" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, value( separator ) ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> collectionToString(Expression<? extends Collection<?>> collectionExpression, Expression<String> separatorExpression, Expression<String> defaultExpression) {
return getFunctionDescriptor( "array_to_string" ).generateSqmExpression(
asList( (SqmExpression<?>) collectionExpression, (SqmExpression<?>) separatorExpression, (SqmExpression<?>) defaultExpression ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> collectionToString(Expression<? extends Collection<?>> collectionExpression, Expression<String> separatorExpression, String defaultValue) {
return collectionToString( collectionExpression, separatorExpression, value( defaultValue ) );
}
@Override
public SqmExpression<String> collectionToString(Expression<? extends Collection<?>> collectionExpression, String separator, Expression<String> defaultExpression) {
return collectionToString( collectionExpression, value( separator ), defaultExpression );
}
@Override
public SqmExpression<String> collectionToString(Expression<? extends Collection<?>> collectionExpression, String separator, String defaultValue) {
return collectionToString( collectionExpression, value( separator ), value( defaultValue ) );
}
@Override
public SqmJsonValueExpression<String> jsonValue(Expression<?> jsonDocument, String jsonPath) {
return jsonValue( jsonDocument, value( jsonPath ), null );
}
@Override
public <T> SqmJsonValueExpression<T> jsonValue(
Expression<?> jsonDocument,
String jsonPath,
Class<T> returningType) {
return jsonValue( jsonDocument, value( jsonPath ), returningType );
}
@Override
public SqmJsonValueExpression<String> jsonValue(Expression<?> jsonDocument, Expression<String> jsonPath) {
return jsonValue( jsonDocument, jsonPath, null );
}
@Override
@SuppressWarnings("unchecked")
public <T> SqmJsonValueExpression<T> jsonValue(
Expression<?> jsonDocument,
Expression<String> jsonPath,
@Nullable Class<T> returningType) {
if ( returningType == null ) {
return (SqmJsonValueExpression<T>) getFunctionDescriptor( "json_value" ).generateSqmExpression(
asList( (SqmTypedNode<?>) jsonDocument, (SqmTypedNode<?>) jsonPath ),
null,
queryEngine
);
}
else {
final BasicType<T> type = getTypeConfiguration().standardBasicTypeForJavaType( returningType );
return (SqmJsonValueExpression<T>) getFunctionDescriptor( "json_value" ).generateSqmExpression(
asList( (SqmTypedNode<?>) jsonDocument, (SqmTypedNode<?>) jsonPath, new SqmCastTarget<>( type, this ) ),
type,
queryEngine
);
}
}
@Override
public SqmJsonQueryExpression jsonQuery(Expression<?> jsonDocument, String jsonPath) {
return jsonQuery( jsonDocument, value( jsonPath ) );
}
@Override
public SqmJsonQueryExpression jsonQuery(Expression<?> jsonDocument, Expression<String> jsonPath) {
return (SqmJsonQueryExpression) getFunctionDescriptor( "json_query" ).<String>generateSqmExpression(
asList( (SqmTypedNode<?>) jsonDocument, (SqmTypedNode<?>) jsonPath ),
null,
queryEngine
);
}
@Override
public SqmJsonExistsExpression jsonExists(Expression<?> jsonDocument, String jsonPath) {
return jsonExists( jsonDocument, value( jsonPath ) );
}
@Override
public SqmJsonExistsExpression jsonExists(Expression<?> jsonDocument, Expression<String> jsonPath) {
return (SqmJsonExistsExpression) getFunctionDescriptor( "json_exists" ).<Boolean>generateSqmExpression(
asList( (SqmTypedNode<?>) jsonDocument, (SqmTypedNode<?>) jsonPath ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> jsonArrayWithNulls(Expression<?>... values) {
final var arguments = new ArrayList<SqmTypedNode<?>>( values.length + 1 );
for ( Expression<?> expression : values ) {
arguments.add( (SqmTypedNode<?>) expression );
}
arguments.add( SqmJsonNullBehavior.NULL );
return getFunctionDescriptor( "json_array" ).generateSqmExpression(
arguments,
null,
queryEngine
);
}
@Override
public SqmExpression<String> jsonArray(Expression<?>... values) {
//noinspection unchecked
return getFunctionDescriptor( "json_array" ).generateSqmExpression(
(List<? extends SqmTypedNode<?>>) (List<?>) asList( values ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> jsonArrayAgg(Expression<?> value) {
return jsonArrayAgg( (SqmExpression<?>) value, null, null, null );
}
@Override
public SqmExpression<String> jsonArrayAgg(Expression<?> value, Predicate filter, JpaOrder... orderBy) {
return jsonArrayAgg( (SqmExpression<?>) value, null, (SqmPredicate) filter, orderByClause( orderBy ) );
}
@Override
public SqmExpression<String> jsonArrayAgg(Expression<?> value, Predicate filter) {
return jsonArrayAgg( (SqmExpression<?>) value, null, (SqmPredicate) filter, null );
}
@Override
public SqmExpression<String> jsonArrayAgg(Expression<?> value, JpaOrder... orderBy) {
return jsonArrayAgg( (SqmExpression<?>) value, null, null, orderByClause( orderBy ) );
}
@Override
public SqmExpression<String> jsonArrayAggWithNulls(Expression<?> value) {
return jsonArrayAgg( (SqmExpression<?>) value, SqmJsonNullBehavior.NULL, null, null );
}
@Override
public SqmExpression<String> jsonArrayAggWithNulls(Expression<?> value, Predicate filter, JpaOrder... orderBy) {
return jsonArrayAgg(
(SqmExpression<?>) value,
SqmJsonNullBehavior.NULL,
(SqmPredicate) filter,
orderByClause( orderBy )
);
}
@Override
public SqmExpression<String> jsonArrayAggWithNulls(Expression<?> value, Predicate filter) {
return jsonArrayAgg( (SqmExpression<?>) value, SqmJsonNullBehavior.NULL, (SqmPredicate) filter, null );
}
@Override
public SqmExpression<String> jsonArrayAggWithNulls(Expression<?> value, JpaOrder... orderBy) {
return jsonArrayAgg( (SqmExpression<?>) value, SqmJsonNullBehavior.NULL, null, orderByClause( orderBy ) );
}
@Override
public SqmExpression<String> jsonObjectAggWithUniqueKeysAndNulls(Expression<?> key, Expression<?> value) {
return jsonObjectAgg( key, value, SqmJsonNullBehavior.NULL, SqmJsonObjectAggUniqueKeysBehavior.WITH, null );
}
@Override
public SqmExpression<String> jsonObjectAggWithUniqueKeys(Expression<?> key, Expression<?> value) {
return jsonObjectAgg( key, value, null, SqmJsonObjectAggUniqueKeysBehavior.WITH, null );
}
@Override
public SqmExpression<String> jsonObjectAggWithNulls(Expression<?> key, Expression<?> value) {
return jsonObjectAgg( key, value, SqmJsonNullBehavior.NULL, null, null );
}
@Override
public SqmExpression<String> jsonObjectAgg(Expression<?> key, Expression<?> value) {
return jsonObjectAgg( key, value, null, null, null );
}
@Override
public SqmExpression<String> jsonObjectAggWithUniqueKeysAndNulls(
Expression<?> key,
Expression<?> value,
Predicate filter) {
return jsonObjectAgg( key, value, SqmJsonNullBehavior.NULL, SqmJsonObjectAggUniqueKeysBehavior.WITH, filter );
}
@Override
public SqmExpression<String> jsonObjectAggWithUniqueKeys(Expression<?> key, Expression<?> value, Predicate filter) {
return jsonObjectAgg( key, value, null, SqmJsonObjectAggUniqueKeysBehavior.WITH, filter );
}
@Override
public SqmExpression<String> jsonObjectAggWithNulls(Expression<?> key, Expression<?> value, Predicate filter) {
return jsonObjectAgg( key, value, SqmJsonNullBehavior.NULL, null, filter );
}
@Override
public SqmExpression<String> jsonObjectAgg(Expression<?> key, Expression<?> value, Predicate filter) {
return jsonObjectAgg( key, value, null, null, filter );
}
private SqmExpression<String> jsonObjectAgg(
Expression<?> key,
Expression<?> value,
@Nullable SqmJsonNullBehavior nullBehavior,
@Nullable SqmJsonObjectAggUniqueKeysBehavior uniqueKeysBehavior,
@Nullable Predicate filterPredicate) {
final ArrayList<SqmTypedNode<?>> arguments = new ArrayList<>( 4 );
arguments.add( (SqmTypedNode<?>) key );
arguments.add( (SqmTypedNode<?>) value );
if ( nullBehavior != null ) {
arguments.add( nullBehavior );
}
if ( uniqueKeysBehavior != null ) {
arguments.add( uniqueKeysBehavior );
}
return getFunctionDescriptor( "json_objectagg" ).generateAggregateSqmExpression(
arguments,
(SqmPredicate) filterPredicate,
null,
queryEngine
);
}
private @Nullable SqmOrderByClause orderByClause(JpaOrder[] orderBy) {
if ( orderBy.length == 0 ) {
return null;
}
final SqmOrderByClause sqmOrderByClause = new SqmOrderByClause( orderBy.length );
for ( JpaOrder jpaOrder : orderBy ) {
sqmOrderByClause.addSortSpecification( (SqmSortSpecification) jpaOrder );
}
return sqmOrderByClause;
}
private SqmExpression<String> jsonArrayAgg(
SqmExpression<?> value,
@Nullable SqmJsonNullBehavior nullBehavior,
@Nullable SqmPredicate filterPredicate,
@Nullable SqmOrderByClause orderByClause) {
return getFunctionDescriptor( "json_arrayagg" ).generateOrderedSetAggregateSqmExpression(
nullBehavior == null
? Collections.singletonList( value )
: asList( value, SqmJsonNullBehavior.NULL ),
filterPredicate,
orderByClause,
null,
queryEngine
);
}
@Override
public SqmExpression<String> jsonObjectWithNulls(Map<?, ? extends Expression<?>> keyValues) {
final var arguments = keyValuesAsAlternatingList( keyValues );
arguments.add( SqmJsonNullBehavior.NULL );
return getFunctionDescriptor( "json_object" ).generateSqmExpression(
arguments,
null,
queryEngine
);
}
@Override
public SqmExpression<String> jsonObject(Map<?, ? extends Expression<?>> keyValues) {
return getFunctionDescriptor( "json_object" ).generateSqmExpression(
keyValuesAsAlternatingList( keyValues ),
null,
queryEngine
);
}
private ArrayList<SqmTypedNode<?>> keyValuesAsAlternatingList(Map<?, ? extends Expression<?>> keyValues) {
final var list = new ArrayList<SqmTypedNode<?>>( keyValues.size() );
for ( Map.Entry<?, ? extends Expression<?>> entry : keyValues.entrySet() ) {
list.add( value( entry.getKey() ) );
list.add( (SqmTypedNode<?>) entry.getValue() );
}
return list;
}
@Override
public SqmExpression<String> jsonSet(Expression<?> jsonDocument, Expression<String> jsonPath, Object value) {
return jsonSet( jsonDocument, jsonPath, value( value ) );
}
@Override
public SqmExpression<String> jsonSet(Expression<?> jsonDocument, String jsonPath, Object value) {
return jsonSet( jsonDocument, value( jsonPath ), value( value ) );
}
@Override
public SqmExpression<String> jsonSet(Expression<?> jsonDocument, String jsonPath, Expression<?> value) {
return jsonSet( jsonDocument, value( jsonPath ), value );
}
@Override
public SqmExpression<String> jsonSet(Expression<?> jsonDocument, Expression<String> jsonPath, Expression<?> value) {
//noinspection unchecked
return getFunctionDescriptor( "json_set" ).generateSqmExpression(
(List<? extends SqmTypedNode<?>>) (List<?>) asList( jsonDocument, jsonPath, value ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> jsonRemove(Expression<?> jsonDocument, String jsonPath) {
return jsonRemove( jsonDocument, value( jsonPath ) );
}
@Override
public SqmExpression<String> jsonRemove(Expression<?> jsonDocument, Expression<String> jsonPath) {
//noinspection unchecked
return getFunctionDescriptor( "json_remove" ).generateSqmExpression(
(List<? extends SqmTypedNode<?>>) (List<?>) asList( jsonDocument, jsonPath ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> jsonInsert(Expression<?> jsonDocument, Expression<String> jsonPath, Object value) {
return jsonInsert( jsonDocument, jsonPath, value( value ) );
}
@Override
public SqmExpression<String> jsonInsert(Expression<?> jsonDocument, String jsonPath, Object value) {
return jsonInsert( jsonDocument, value( jsonPath ), value );
}
@Override
public SqmExpression<String> jsonInsert(Expression<?> jsonDocument, String jsonPath, Expression<?> value) {
return jsonInsert( jsonDocument, value( jsonPath ), value );
}
@Override
public SqmExpression<String> jsonInsert(
Expression<?> jsonDocument,
Expression<String> jsonPath,
Expression<?> value) {
//noinspection unchecked
return getFunctionDescriptor( "json_insert" ).generateSqmExpression(
(List<? extends SqmTypedNode<?>>) (List<?>) asList( jsonDocument, jsonPath, value ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> jsonReplace(Expression<?> jsonDocument, Expression<String> jsonPath, Object value) {
return jsonReplace( jsonDocument, jsonPath, value( value ) );
}
@Override
public SqmExpression<String> jsonReplace(Expression<?> jsonDocument, String jsonPath, Object value) {
return jsonReplace( jsonDocument, value( jsonPath ), value );
}
@Override
public SqmExpression<String> jsonReplace(Expression<?> jsonDocument, String jsonPath, Expression<?> value) {
return jsonReplace( jsonDocument, value( jsonPath ), value );
}
@Override
public SqmExpression<String> jsonReplace(
Expression<?> jsonDocument,
Expression<String> jsonPath,
Expression<?> value) {
//noinspection unchecked
return getFunctionDescriptor( "json_replace" ).generateSqmExpression(
(List<? extends SqmTypedNode<?>>) (List<?>) asList( jsonDocument, jsonPath, value ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> jsonMergepatch(String document, Expression<?> patch) {
return jsonMergepatch( value( document ), patch );
}
@Override
public SqmExpression<String> jsonMergepatch(Expression<?> document, String patch) {
return jsonMergepatch( document, value( patch ) );
}
@Override
public SqmExpression<String> jsonMergepatch(Expression<?> document, Expression<?> patch) {
//noinspection unchecked
return getFunctionDescriptor( "json_mergepatch" ).generateSqmExpression(
(List<? extends SqmTypedNode<?>>) (List<?>) asList( document, patch ),
null,
queryEngine
);
}
@Override
public SqmXmlElementExpression xmlelement(String elementName) {
final List<SqmTypedNode<?>> arguments = new ArrayList<>( 3 );
arguments.add( new SqmLiteral<>( elementName, getStringType(), this ) );
return (SqmXmlElementExpression) getFunctionDescriptor( "xmlelement" ).<String>generateSqmExpression(
arguments,
null,
queryEngine
);
}
@Override
public SqmExpression<String> xmlcomment(String comment) {
return getFunctionDescriptor( "xmlcomment" ).generateSqmExpression(
List.of( value( comment ) ),
null,
queryEngine
);
}
@Override
public <T> SqmExpression<T> named(Expression<T> expression, String name) {
return new SqmNamedExpression<>( (SqmExpression<T>) expression, name );
}
@Override
public SqmExpression<String> xmlforest(Expression<?>... elements) {
return xmlforest( asList( elements ) );
}
@Override
public SqmExpression<String> xmlforest(List<? extends Expression<?>> elements) {
final ArrayList<SqmExpression<?>> arguments = new ArrayList<>( elements.size() );
for ( Expression<?> expression : elements ) {
if ( expression instanceof SqmNamedExpression<?> ) {
arguments.add( (SqmNamedExpression<?>) expression );
}
else {
if ( !( expression instanceof SqmPath<?> path ) || !( path.getModel() instanceof PersistentAttribute<?, ?> attribute ) ) {
throw new SemanticException(
"Can't use expression '" + expression + " without explicit name in xmlforest function"+
", because XML element names can only be derived from path expressions."
);
}
arguments.add( new SqmNamedExpression<>( (SqmExpression<?>) expression, attribute.getName() ) );
}
}
return getFunctionDescriptor( "xmlforest" ).generateSqmExpression(
arguments,
null,
queryEngine
);
}
@Override
public SqmExpression<String> xmlconcat(Expression<?>... elements) {
return xmlconcat( asList( elements ) );
}
@Override
public SqmExpression<String> xmlconcat(List<? extends Expression<?>> elements) {
return getFunctionDescriptor( "xmlforest" ).generateSqmExpression(
(List<? extends SqmTypedNode<?>>) elements,
null,
queryEngine
);
}
@Override
public SqmExpression<String> xmlpi(String elementName) {
return getFunctionDescriptor( "xmlpi" ).generateSqmExpression(
Collections.singletonList( literal( elementName ) ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> xmlpi(String elementName, Expression<String> content) {
return getFunctionDescriptor( "xmlpi" ).generateSqmExpression(
asList( literal( elementName ), (SqmTypedNode<?>) content ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> xmlquery(String query, Expression<?> xmlDocument) {
return xmlquery( value( query ), xmlDocument );
}
@Override
public SqmExpression<String> xmlquery(Expression<String> query, Expression<?> xmlDocument) {
return getFunctionDescriptor( "xmlquery" ).generateSqmExpression(
asList( (SqmTypedNode<?>) query, (SqmTypedNode<?>) xmlDocument ),
null,
queryEngine
);
}
@Override
public SqmExpression<Boolean> xmlexists(String query, Expression<?> xmlDocument) {
return xmlexists( value( query ), xmlDocument );
}
@Override
public SqmExpression<Boolean> xmlexists(Expression<String> query, Expression<?> xmlDocument) {
return getFunctionDescriptor( "xmlexists" ).generateSqmExpression(
asList( (SqmTypedNode<?>) query, (SqmTypedNode<?>) xmlDocument ),
null,
queryEngine
);
}
@Override
public SqmExpression<String> xmlagg(JpaOrder order, Expression<?> argument) {
return xmlagg( order, null, null, argument );
}
@Override
public SqmExpression<String> xmlagg(JpaOrder order, JpaPredicate filter, Expression<?> argument) {
return xmlagg( order, filter, null, argument );
}
@Override
public SqmExpression<String> xmlagg(JpaOrder order, JpaWindow window, Expression<?> argument) {
return xmlagg( order, null, window, argument );
}
@Override
public SqmExpression<String> xmlagg(JpaOrder order, @Nullable JpaPredicate filter, @Nullable JpaWindow window, Expression<?> argument) {
return functionWithinGroup( "xmlagg", String.class, order, filter, window, argument );
}
@Override
public <E> SqmSetReturningFunction<E> setReturningFunction(String name, Expression<?>... args) {
return getSetReturningFunctionDescriptor( name ).generateSqmExpression(
expressionList( args ),
queryEngine
);
}
@Override
public <E> SqmSetReturningFunction<E> unnestArray(Expression<E[]> array) {
return getSetReturningFunctionDescriptor( "unnest" ).generateSqmExpression(
Collections.singletonList( (SqmTypedNode<?>) array ),
queryEngine
);
}
@Override
public <E> SqmSetReturningFunction<E> unnestCollection(Expression<? extends Collection<E>> collection) {
return getSetReturningFunctionDescriptor( "unnest" ).generateSqmExpression(
Collections.singletonList( (SqmTypedNode<?>) collection ),
queryEngine
);
}
@Override
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(Expression<E> start, Expression<E> stop, Expression<? extends TemporalAmount> step) {
return getSetReturningFunctionDescriptor( "generate_series" ).generateSqmExpression(
asList( (SqmTypedNode<?>) start, (SqmTypedNode<?>) stop, (SqmTypedNode<?>) step ),
queryEngine
);
}
@Override
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(E start, E stop, TemporalAmount step) {
return generateTimeSeries( value( start ), value( stop ), value( step ) );
}
@Override
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(E start, Expression<E> stop, TemporalAmount step) {
return generateTimeSeries( value( start ), stop, value( step ) );
}
@Override
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(Expression<E> start, E stop, TemporalAmount step) {
return generateTimeSeries( start, value( stop ), value( step ) );
}
@Override
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(Expression<E> start, Expression<E> stop, TemporalAmount step) {
return generateTimeSeries( start, stop, value( step ) );
}
@Override
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(E start, E stop, Expression<? extends TemporalAmount> step) {
return generateTimeSeries( value( start ), value( stop ), step );
}
@Override
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(Expression<E> start, E stop, Expression<? extends TemporalAmount> step) {
return generateTimeSeries( start, value( stop ), step );
}
@Override
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(E start, Expression<E> stop, Expression<? extends TemporalAmount> step) {
return generateTimeSeries( value( start ), stop, step );
}
@Override
public <E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop, Expression<E> step) {
return getSetReturningFunctionDescriptor( "generate_series" ).generateSqmExpression(
asList( (SqmTypedNode<?>) start, (SqmTypedNode<?>) stop, (SqmTypedNode<?>) step ),
queryEngine
);
}
@Override
public <E extends Number> SqmSetReturningFunction<E> generateSeries(E start, E stop, E step) {
return generateSeries( value( start ), value( stop ), value( step ) );
}
@Override
public <E extends Number> SqmSetReturningFunction<E> generateSeries(E start, E stop, Expression<E> step) {
return generateSeries( value( start ), value( stop ), step );
}
@Override
public <E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, E stop, E step) {
return generateSeries( start, value( stop ), value( step ) );
}
@Override
public <E extends Number> SqmSetReturningFunction<E> generateSeries(E start, Expression<E> stop, E step) {
return generateSeries( value( start ), stop, value( step ) );
}
@Override
public <E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop, E step) {
return generateSeries( start, stop, value( step ) );
}
@Override
public <E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, E stop, Expression<E> step) {
return generateSeries( start, value( stop ), step );
}
@Override
public <E extends Number> SqmSetReturningFunction<E> generateSeries(E start, Expression<E> stop, Expression<E> step) {
return generateSeries( value( start ), stop, step );
}
@Override
public <E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop) {
return getSetReturningFunctionDescriptor( "generate_series" ).generateSqmExpression(
asList( (SqmTypedNode<?>) start, (SqmTypedNode<?>) stop ),
queryEngine
);
}
@Override
public <E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, E stop) {
return generateSeries( start, value( stop ) );
}
@Override
public <E extends Number> SqmSetReturningFunction<E> generateSeries(E start, Expression<E> stop) {
return generateSeries( value( start ), stop );
}
@Override
public <E extends Number> SqmSetReturningFunction<E> generateSeries(E start, E stop) {
return generateSeries( value( start ), value( stop ) );
}
@Override
public SqmJsonTableFunction<?> jsonTable(Expression<?> jsonDocument) {
return jsonTable( jsonDocument, (Expression<String>) null );
}
@Override
public SqmJsonTableFunction<?> jsonTable(Expression<?> jsonDocument, String jsonPath) {
return jsonTable( jsonDocument, value( jsonPath ) );
}
@Override
public SqmJsonTableFunction<?> jsonTable(Expression<?> jsonDocument, @Nullable Expression<String> jsonPath) {
return (SqmJsonTableFunction<?>) getSetReturningFunctionDescriptor( "json_table" ).generateSqmExpression(
jsonPath == null
? Collections.singletonList( (SqmTypedNode<?>) jsonDocument )
: asList( (SqmTypedNode<?>) jsonDocument, (SqmTypedNode<?>) jsonPath ),
queryEngine
);
}
@Override
public SqmXmlTableFunction<?> xmlTable(String xpath, Expression<?> xmlDocument) {
return xmlTable( value( xpath ), xmlDocument );
}
@Override
public SqmXmlTableFunction<?> xmlTable(Expression<String> xpath, Expression<?> xmlDocument) {
return (SqmXmlTableFunction<?>) getSetReturningFunctionDescriptor( "xmltable" ).generateSqmExpression(
asList( (SqmTypedNode<?>) xpath, (SqmTypedNode<?>) xmlDocument ),
queryEngine
);
}
}
|
mapped
|
java
|
google__guava
|
android/guava-tests/benchmark/com/google/common/cache/LoadingCacheSingleThreadBenchmark.java
|
{
"start": 1064,
"end": 3497
}
|
class ____ {
@Param({"1000", "2000"})
int maximumSize;
@Param("5000")
int distinctKeys;
@Param("4")
int segments;
// 1 means uniform likelihood of keys; higher means some keys are more popular
// tweak this to control hit rate
@Param("2.5")
double concentration;
Random random = new Random();
LoadingCache<Integer, Integer> cache;
int max;
static AtomicLong requests = new AtomicLong(0);
static AtomicLong misses = new AtomicLong(0);
@BeforeExperiment
void setUp() {
// random integers will be generated in this range, then raised to the
// power of (1/concentration) and floor()ed
max = Ints.checkedCast((long) Math.pow(distinctKeys, concentration));
cache =
CacheBuilder.newBuilder()
.concurrencyLevel(segments)
.maximumSize(maximumSize)
.build(
new CacheLoader<Integer, Integer>() {
@Override
public Integer load(Integer from) {
return (int) misses.incrementAndGet();
}
});
// To start, fill up the cache.
// Each miss both increments the counter and causes the map to grow by one,
// so until evictions begin, the size of the map is the greatest return
// value seen so far
while (cache.getUnchecked(nextRandomKey()) < maximumSize) {}
requests.set(0);
misses.set(0);
}
@Benchmark
int time(int reps) {
int dummy = 0;
for (int i = 0; i < reps; i++) {
dummy += cache.getUnchecked(nextRandomKey());
}
requests.addAndGet(reps);
return dummy;
}
private int nextRandomKey() {
int a = random.nextInt(max);
/*
* For example, if concentration=2.0, the following takes the square root of
* the uniformly-distributed random integer, then truncates any fractional
* part, so higher integers would appear (in this case linearly) more often
* than lower ones.
*/
return (int) Math.pow(a, 1.0 / concentration);
}
@AfterExperiment
void tearDown() {
double req = requests.get();
double hit = req - misses.get();
// Currently, this is going into /dev/null, but I'll fix that
System.out.println("hit rate: " + hit / req);
}
// for proper distributions later:
// import JSci.maths.statistics.ProbabilityDistribution;
// int key = (int) dist.inverse(random.nextDouble());
}
|
LoadingCacheSingleThreadBenchmark
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/common/component/LifecycleTests.java
|
{
"start": 1205,
"end": 3553
}
|
class ____ extends ESTestCase {
public void testTransitions() {
doTransitionTest(false);
doTransitionTest(true);
}
private void doTransitionTest(boolean startBeforeClosing) {
final var lifecycle = new Lifecycle();
assertState(lifecycle, Lifecycle.State.INITIALIZED);
assertTrue(lifecycle.canMoveToStarted());
assertTrue(lifecycle.canMoveToClosed());
if (startBeforeClosing) {
assertTrue(lifecycle.moveToStarted());
assertState(lifecycle, Lifecycle.State.STARTED);
assertFalse(lifecycle.canMoveToStarted());
assertTrue(lifecycle.canMoveToStopped());
assertTrue(lifecycle.moveToStopped());
assertState(lifecycle, Lifecycle.State.STOPPED);
assertFalse(lifecycle.canMoveToStopped());
assertTrue(lifecycle.canMoveToClosed());
}
assertTrue(lifecycle.moveToClosed());
assertState(lifecycle, Lifecycle.State.CLOSED);
assertFalse(lifecycle.canMoveToClosed());
}
private static void assertState(Lifecycle lifecycle, Lifecycle.State expectedState) {
assertEquals(expectedState, lifecycle.state());
assertEquals(expectedState == Lifecycle.State.INITIALIZED, lifecycle.initialized());
assertEquals(expectedState == Lifecycle.State.STARTED, lifecycle.started());
assertEquals(expectedState == Lifecycle.State.STOPPED, lifecycle.stopped());
assertEquals(expectedState == Lifecycle.State.CLOSED, lifecycle.closed());
assertEquals(expectedState == Lifecycle.State.STOPPED || expectedState == Lifecycle.State.CLOSED, lifecycle.stoppedOrClosed());
}
public void testThreadSafety() {
final var lifecycle = new Lifecycle();
try (var testHarness = new ThreadSafetyTestHarness(between(1, 10))) {
assertState(lifecycle, Lifecycle.State.INITIALIZED);
testHarness.testTransition(lifecycle::moveToStarted);
assertState(lifecycle, Lifecycle.State.STARTED);
testHarness.testTransition(lifecycle::moveToStopped);
assertState(lifecycle, Lifecycle.State.STOPPED);
testHarness.testTransition(lifecycle::moveToClosed);
assertState(lifecycle, Lifecycle.State.CLOSED);
}
}
private static
|
LifecycleTests
|
java
|
apache__camel
|
components/camel-ai/camel-neo4j/src/test/java/org/apache/camel/component/neo4j/it/Neo4jVectorEmbeddingsIT.java
|
{
"start": 6197,
"end": 6984
}
|
enum ____ {
VECTOR_1(9, "VECTOR_1", List.of(0.8f, 0.6f)),
VECTOR_2(10, "VECTOR_2", List.of(0.1f, 0.9f)),
VECTOR_3(11, "VECTOR_3", List.of(0.7f, 0.7f)),
VECTOR_4(12, "VECTOR_4", List.of(-0.3f, -0.9f)),
VECTOR_5(13, "VECTOR_5", List.of(1.2f, 0.8f));
private final int id;
private final String text;
private final List<Float> vectors;
TestData(int id, String text, List<Float> vectors) {
this.id = id;
this.text = text;
this.vectors = vectors;
}
public int getId() {
return id;
}
public List<Float> getVectors() {
return vectors;
}
public String getText() {
return text;
}
}
}
|
TestData
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/docs/factories/primitive/CylinderFactory.java
|
{
"start": 232,
"end": 387
}
|
class ____ {
@Bean
@Named("V8") // <1>
final int v8 = 8;
@Bean
@Named("V6") // <1>
final int v6 = 6;
}
// end::class[]
|
CylinderFactory
|
java
|
elastic__elasticsearch
|
x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java
|
{
"start": 1905,
"end": 10169
}
|
class ____ extends HandledTransportAction<GetPipelineRequest, GetPipelineResponse> {
private static final Logger logger = LogManager.getLogger(TransportGetPipelineAction.class);
private static final Integer SIZE = 10000;
private static final String WILDCARD = "*";
private static final Pattern WILDCARD_PATTERN = Pattern.compile("[^*]+|(\\*)");
private final Client client;
@Inject
public TransportGetPipelineAction(TransportService transportService, ActionFilters actionFilters, Client client) {
super(GetPipelineAction.NAME, transportService, actionFilters, GetPipelineRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE);
this.client = new OriginSettingClient(client, LOGSTASH_MANAGEMENT_ORIGIN);
}
@Override
protected void doExecute(Task task, GetPipelineRequest request, ActionListener<GetPipelineResponse> listener) {
final Set<String> explicitPipelineIds = request.ids()
.stream()
.filter(pipeline -> pipeline.contains(WILDCARD) == false)
.collect(Collectors.toSet());
final Set<Pattern> wildcardPipelinePatterns = request.ids()
.stream()
.filter(pipeline -> pipeline.contains(WILDCARD))
.map(TransportGetPipelineAction::toWildcardPipelineIdPattern)
.map(Pattern::compile)
.collect(Collectors.toSet());
if (explicitPipelineIds.size() > 0 && wildcardPipelinePatterns.size() == 0) {
getPipelinesByIds(explicitPipelineIds, listener);
return;
}
client.prepareSearch(Logstash.LOGSTASH_CONCRETE_INDEX_NAME)
.setSource(
SearchSourceBuilder.searchSource().fetchSource(true).query(QueryBuilders.matchAllQuery()).size(SIZE).trackTotalHits(true)
)
.setScroll(TimeValue.timeValueMinutes(1L))
.execute(ActionListener.wrap(searchResponse -> {
final int numHits = Math.toIntExact(searchResponse.getHits().getTotalHits().value());
final Map<String, BytesReference> pipelineSources = Maps.newMapWithExpectedSize(numHits);
final Consumer<SearchResponse> clearScroll = (response) -> {
if (response != null && response.getScrollId() != null) {
ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
clearScrollRequest.addScrollId(response.getScrollId());
client.clearScroll(
clearScrollRequest,
ActionListener.wrap(
(r) -> {},
e -> logger.warn(() -> "clear scroll failed for scroll id [" + response.getScrollId() + "]", e)
)
);
}
};
handleFilteringSearchResponse(
searchResponse,
pipelineSources,
explicitPipelineIds,
wildcardPipelinePatterns,
0,
clearScroll,
listener
);
}, e -> handleFailure(e, listener)));
}
private void getPipelinesByIds(Set<String> ids, ActionListener<GetPipelineResponse> listener) {
client.prepareMultiGet().addIds(Logstash.LOGSTASH_CONCRETE_INDEX_NAME, ids).execute(ActionListener.wrap(mGetResponse -> {
logFailures(mGetResponse);
listener.onResponse(
new GetPipelineResponse(
Arrays.stream(mGetResponse.getResponses())
.filter(itemResponse -> itemResponse.isFailed() == false)
.map(MultiGetItemResponse::getResponse)
.filter(GetResponse::isExists)
.collect(Collectors.toMap(GetResponse::getId, GetResponse::getSourceAsBytesRef))
)
);
}, e -> handleFailure(e, listener)));
}
private static void handleFailure(Exception e, ActionListener<GetPipelineResponse> listener) {
Throwable cause = ExceptionsHelper.unwrapCause(e);
if (cause instanceof IndexNotFoundException) {
listener.onResponse(new GetPipelineResponse(Map.of()));
} else {
listener.onFailure(e);
}
}
private void handleFilteringSearchResponse(
SearchResponse searchResponse,
Map<String, BytesReference> pipelineSources,
Set<String> explicitPipelineIds,
Set<Pattern> wildcardPipelinePatterns,
int numberOfHitsSeenPreviously,
Consumer<SearchResponse> clearScroll,
ActionListener<GetPipelineResponse> listener
) {
int numberOfHitsSeenSoFar = numberOfHitsSeenPreviously + searchResponse.getHits().getHits().length;
if (numberOfHitsSeenSoFar > searchResponse.getHits().getTotalHits().value()) {
clearScroll.accept(searchResponse);
listener.onFailure(
new IllegalStateException(
"scrolling returned more hits ["
+ numberOfHitsSeenSoFar
+ "] than expected ["
+ searchResponse.getHits().getTotalHits().value()
+ "] so bailing out to prevent unbounded "
+ "memory consumption."
)
);
}
for (SearchHit hit : searchResponse.getHits().getHits()) {
if (explicitPipelineIds.isEmpty() && wildcardPipelinePatterns.isEmpty()) {
pipelineSources.put(hit.getId(), hit.getSourceRef());
continue;
}
// take if ID is in request IDs set
if (explicitPipelineIds.contains(hit.getId())) {
pipelineSources.put(hit.getId(), hit.getSourceRef());
continue;
}
// take if id matches request wildcard pattern
if (wildcardPipelinePatterns.stream().anyMatch(pattern -> pattern.matcher(hit.getId()).matches())) {
pipelineSources.put(hit.getId(), hit.getSourceRef());
}
}
if (numberOfHitsSeenSoFar == searchResponse.getHits().getTotalHits().value()) {
clearScroll.accept(searchResponse);
listener.onResponse(new GetPipelineResponse(pipelineSources));
} else {
client.prepareSearchScroll(searchResponse.getScrollId())
.setScroll(TimeValue.timeValueMinutes(1L))
.execute(
listener.delegateFailureAndWrap(
(delegate, searchResponse1) -> handleFilteringSearchResponse(
searchResponse1,
pipelineSources,
explicitPipelineIds,
wildcardPipelinePatterns,
numberOfHitsSeenSoFar,
clearScroll,
delegate
)
)
);
}
}
private static void logFailures(MultiGetResponse multiGetResponse) {
List<String> ids = Arrays.stream(multiGetResponse.getResponses())
.filter(MultiGetItemResponse::isFailed)
.filter(itemResponse -> itemResponse.getFailure() != null)
.map(itemResponse -> itemResponse.getFailure().getId())
.collect(Collectors.toList());
if (ids.isEmpty() == false) {
logger.info("Could not retrieve logstash pipelines with ids: {}", ids);
}
}
private static String toWildcardPipelineIdPattern(String wildcardPipelineId) {
Matcher matcher = WILDCARD_PATTERN.matcher(wildcardPipelineId);
StringBuilder stringBuilder = new StringBuilder();
while (matcher.find()) {
if (matcher.group(1) != null) {
matcher.appendReplacement(stringBuilder, ".*");
} else {
matcher.appendReplacement(stringBuilder, "\\\\Q" + matcher.group(0) + "\\\\E");
}
}
matcher.appendTail(stringBuilder);
return stringBuilder.toString();
}
}
|
TransportGetPipelineAction
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/connector/format/ProjectableDecodingFormat.java
|
{
"start": 1466,
"end": 3181
}
|
interface ____<I> extends DecodingFormat<I> {
/** Returns whether this format supports nested projection. */
default boolean supportsNestedProjection() {
return false;
}
/**
* Creates runtime decoder implementation that is configured to produce data of type {@code
* Projection.of(projections).project(physicalDataType)}. For more details on the usage, check
* {@link DecodingFormat} documentation.
*
* @param context the context provides several utilities required to instantiate the runtime
* decoder implementation of the format
* @param physicalDataType For more details check {@link DecodingFormat}
* @param projections the projections array. The array represents the mapping of the fields of
* the original {@link DataType}, including nested rows. For example, {@code [[0, 2, 1],
* ...]} specifies to include the 2nd field of the 3rd field of the 1st field in the
* top-level row. It's guaranteed that this array won't contain nested projections if {@link
* #supportsNestedProjection()} returns {@code false}. For more details, check {@link
* Projection} as well.
* @return the runtime decoder
* @see DecodingFormat
*/
I createRuntimeDecoder(
DynamicTableSource.Context context, DataType physicalDataType, int[][] projections);
default I createRuntimeDecoder(
DynamicTableSource.Context context, DataType projectedPhysicalDataType) {
return createRuntimeDecoder(
context,
projectedPhysicalDataType,
Projection.all(projectedPhysicalDataType).toNestedIndexes());
}
}
|
ProjectableDecodingFormat
|
java
|
apache__logging-log4j2
|
log4j-api/src/main/java/org/apache/logging/log4j/message/EntryMessage.java
|
{
"start": 901,
"end": 961
}
|
interface ____ extends FlowMessage {
// empty
}
|
EntryMessage
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/SimpleModelValidator.java
|
{
"start": 585,
"end": 1200
}
|
class ____ implements ModelValidator {
private final ServiceIntegrationValidator serviceIntegrationValidator;
public SimpleModelValidator(ServiceIntegrationValidator serviceIntegrationValidator) {
this.serviceIntegrationValidator = serviceIntegrationValidator;
}
@Override
public void validate(InferenceService service, Model model, TimeValue timeout, ActionListener<Model> listener) {
serviceIntegrationValidator.validate(service, model, timeout, listener.delegateFailureAndWrap((delegate, r) -> {
delegate.onResponse(model);
}));
}
}
|
SimpleModelValidator
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesScanRegistrarTests.java
|
{
"start": 1648,
"end": 6425
}
|
class ____ {
private final DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
private final ConfigurationPropertiesScanRegistrar registrar = new ConfigurationPropertiesScanRegistrar(
new MockEnvironment(), null);
@Test
void registerBeanDefinitionsShouldScanForConfigurationProperties() throws IOException {
this.registrar.registerBeanDefinitions(getAnnotationMetadata(ConfigurationPropertiesScanConfiguration.class),
this.beanFactory);
BeanDefinition bingDefinition = this.beanFactory.getBeanDefinition(
"bing-org.springframework.boot.context.properties.scan.valid.ConfigurationPropertiesScanConfiguration$BingProperties");
BeanDefinition fooDefinition = this.beanFactory.getBeanDefinition(
"foo-org.springframework.boot.context.properties.scan.valid.ConfigurationPropertiesScanConfiguration$FooProperties");
BeanDefinition barDefinition = this.beanFactory.getBeanDefinition(
"bar-org.springframework.boot.context.properties.scan.valid.ConfigurationPropertiesScanConfiguration$BarProperties");
assertThat(bingDefinition).satisfies(hasBindMethod(BindMethod.JAVA_BEAN));
assertThat(fooDefinition).satisfies(hasBindMethod(BindMethod.JAVA_BEAN));
assertThat(barDefinition).satisfies(hasBindMethod(BindMethod.VALUE_OBJECT));
}
@Test
void scanWhenBeanDefinitionExistsShouldSkip() throws IOException {
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
beanFactory.setAllowBeanDefinitionOverriding(false);
this.registrar.registerBeanDefinitions(
getAnnotationMetadata(ConfigurationPropertiesScanConfiguration.TestConfiguration.class), beanFactory);
assertThat(beanFactory.containsBeanDefinition(
"foo-org.springframework.boot.context.properties.scan.valid.ConfigurationPropertiesScanConfiguration$FooProperties"))
.isTrue();
assertThat(beanFactory.getBeanDefinitionNames())
.filteredOn((name) -> name.toLowerCase(Locale.ENGLISH).contains("fooproperties"))
.hasSize(1);
}
@Test
void scanWhenBasePackagesAndBasePackageClassesProvidedShouldUseThat() throws IOException {
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
beanFactory.setAllowBeanDefinitionOverriding(false);
this.registrar.registerBeanDefinitions(
getAnnotationMetadata(ConfigurationPropertiesScanConfiguration.DifferentPackageConfiguration.class),
beanFactory);
assertThat(beanFactory.containsBeanDefinition(
"foo-org.springframework.boot.context.properties.scan.valid.ConfigurationPropertiesScanConfiguration$FooProperties"))
.isFalse();
BeanDefinition aDefinition = beanFactory.getBeanDefinition(
"a-org.springframework.boot.context.properties.scan.valid.a.AScanConfiguration$AProperties");
BeanDefinition bFirstDefinition = beanFactory.getBeanDefinition(
"b.first-org.springframework.boot.context.properties.scan.valid.b.BScanConfiguration$BFirstProperties");
BeanDefinition bSecondDefinition = beanFactory.getBeanDefinition(
"b.second-org.springframework.boot.context.properties.scan.valid.b.BScanConfiguration$BSecondProperties");
assertThat(aDefinition).satisfies(hasBindMethod(BindMethod.JAVA_BEAN));
// Constructor injection
assertThat(bFirstDefinition).satisfies(hasBindMethod(BindMethod.VALUE_OBJECT));
// Post-processing injection
assertThat(bSecondDefinition).satisfies(hasBindMethod(BindMethod.JAVA_BEAN));
}
@Test
void scanWhenComponentAnnotationPresentShouldSkipType() throws IOException {
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
beanFactory.setAllowBeanDefinitionOverriding(false);
this.registrar.registerBeanDefinitions(getAnnotationMetadata(CombinedScanConfiguration.class), beanFactory);
assertThat(beanFactory.getBeanDefinitionCount()).isZero();
}
@Test
void scanWhenOtherComponentAnnotationPresentShouldSkipType() throws IOException {
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
beanFactory.setAllowBeanDefinitionOverriding(false);
this.registrar.registerBeanDefinitions(getAnnotationMetadata(OtherCombinedScanConfiguration.class),
beanFactory);
assertThat(beanFactory.getBeanDefinitionCount()).isZero();
}
private Consumer<BeanDefinition> hasBindMethod(BindMethod bindMethod) {
return (definition) -> {
assertThat(definition.hasAttribute(BindMethod.class.getName())).isTrue();
assertThat(definition.getAttribute(BindMethod.class.getName())).isEqualTo(bindMethod);
};
}
private AnnotationMetadata getAnnotationMetadata(Class<?> source) throws IOException {
return new SimpleMetadataReaderFactory().getMetadataReader(source.getName()).getAnnotationMetadata();
}
@ConfigurationPropertiesScan(basePackageClasses = CombinedConfiguration.class)
static
|
ConfigurationPropertiesScanRegistrarTests
|
java
|
google__guava
|
android/guava/src/com/google/common/reflect/Invokable.java
|
{
"start": 12181,
"end": 12516
}
|
class ____ T's raw class, or one of its supertypes.
@Override
public final Class<? super T> getDeclaringClass() {
return (Class<? super T>) member.getDeclaringClass();
}
/** Returns the type of {@code T}. */
// Overridden in TypeToken#method() and TypeToken#constructor()
@SuppressWarnings("unchecked") // The declaring
|
is
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/service/connection/ConnectionDetailsFactoriesTests.java
|
{
"start": 6305,
"end": 6559
}
|
class ____
implements ConnectionDetailsFactory<String, OtherConnectionDetails> {
@Override
public OtherConnectionDetails getConnectionDetails(String source) {
return new OtherConnectionDetailsImpl();
}
}
private
|
OtherConnectionDetailsFactory
|
java
|
apache__dubbo
|
dubbo-registry/dubbo-registry-api/src/test/java/org/apache/dubbo/registry/client/metadata/store/ExcludedParamsFilter.java
|
{
"start": 1059,
"end": 1571
}
|
class ____ implements MetadataParamsFilter {
@Override
public String[] serviceParamsIncluded() {
return new String[0];
}
@Override
public String[] serviceParamsExcluded() {
return new String[0];
}
/**
* Not included in this test
*/
@Override
public String[] instanceParamsIncluded() {
return new String[0];
}
@Override
public String[] instanceParamsExcluded() {
return new String[] {SIDE_KEY};
}
}
|
ExcludedParamsFilter
|
java
|
quarkusio__quarkus
|
extensions/smallrye-fault-tolerance/runtime/src/main/java/io/quarkus/smallrye/faulttolerance/runtime/config/SmallRyeFaultToleranceBuildTimeConfig.java
|
{
"start": 719,
"end": 1005
}
|
interface ____ {
/**
* Configuration of fault tolerance strategies; either global, per class, or per method.
* Keys are:
*
* <ul>
* <li>{@code global}: for global configuration</li>
* <li>{@code "<classname>"}: for per
|
SmallRyeFaultToleranceBuildTimeConfig
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/window/groupwindow/operator/WindowOperator.java
|
{
"start": 4054,
"end": 5595
}
|
class ____ {@link AggregateWindowOperator} and {@link
* TableAggregateWindowOperator}. The big difference between {@link AggregateWindowOperator} and
* {@link TableAggregateWindowOperator} is {@link AggregateWindowOperator} emits only one result for
* each aggregate group, while {@link TableAggregateWindowOperator} can emit multi results for each
* aggregate group.
*
* <p>When an element arrives it gets assigned a key using a {@link KeySelector} and it gets
* assigned to zero or more windows using a {@link GroupWindowAssigner}. Based on this, the element
* is put into panes. A pane is the bucket of elements that have the same key and same {@code
* Window}. An element can be in multiple panes if it was assigned to multiple windows by the {@code
* WindowAssigner}.
*
* <p>Each pane gets its own instance of the provided {@code Trigger}. This trigger determines when
* the contents of the pane should be processed to emit results. When a trigger fires, the given
* {@link org.apache.flink.table.runtime.generated.NamespaceAggsHandleFunctionBase} is invoked to
* produce the results that are emitted for the pane to which the {@code Trigger} belongs.
*
* <p>The parameter types: {@code <IN>}: RowData {@code <OUT>}: JoinedRowData(KEY, AGG_RESULT)
* {@code <KEY>}: GenericRowData {@code <AGG_RESULT>}: GenericRowData {@code <ACC>}: GenericRowData
*
* @param <K> The type of key returned by the {@code KeySelector}.
* @param <W> The type of {@code Window} that the {@code WindowAssigner} assigns.
*/
public abstract
|
for
|
java
|
grpc__grpc-java
|
netty/src/test/java/io/grpc/netty/NettyAdaptiveCumulatorTest.java
|
{
"start": 26676,
"end": 29450
}
|
class ____ {
private final ByteBufAllocator alloc = new PooledByteBufAllocator();
/**
* Test the issue with {@link CompositeByteBuf#component(int)} returning a ByteBuf with
* the indexes out-of-sync with {@code CompositeByteBuf.Component} offsets.
*/
@Test
public void mergeWithCompositeTail_outOfSyncComposite() {
NettyAdaptiveCumulator cumulator = new NettyAdaptiveCumulator(1024);
// Create underlying buffer spacious enough for the test data.
ByteBuf buf = alloc.buffer(32).writeBytes("---01234".getBytes(US_ASCII));
// Start with a regular cumulation and add the buf as the only component.
CompositeByteBuf composite1 = alloc.compositeBuffer(8).addFlattenedComponents(true, buf);
// Read composite1 buf to the beginning of the numbers.
assertThat(composite1.readCharSequence(3, US_ASCII).toString()).isEqualTo("---");
// Wrap composite1 into another cumulation. This is similar to
// what NettyAdaptiveCumulator.cumulate() does in the case the cumulation has refCnt != 1.
CompositeByteBuf composite2 =
alloc.compositeBuffer(8).addFlattenedComponents(true, composite1);
assertThat(composite2.toString(US_ASCII)).isEqualTo("01234");
assume().withMessage("Netty 4.1.111 doesn't work with NettyAdaptiveCumulator")
.that(usingPre4_1_111_Netty()).isTrue();
// The previous operation does not adjust the read indexes of the underlying buffers,
// only the internal Component offsets. When the cumulator attempts to append the input to
// the tail buffer, it extracts it from the cumulation, writes to it, and then adds it back.
// Because the readerIndex on the tail buffer is not adjusted during the read operation
// on the CompositeByteBuf, adding the tail back results in the discarded bytes of the tail
// to be added back to the cumulator as if they were never read.
//
// If the reader index of the tail is not manually corrected, the resulting
// cumulation will contain the discarded part of the tail: "---".
// If it's corrected, it will only contain the numbers.
CompositeByteBuf cumulation = (CompositeByteBuf) cumulator.cumulate(alloc, composite2,
ByteBufUtil.writeAscii(alloc, "56789"));
assertThat(cumulation.toString(US_ASCII)).isEqualTo("0123456789");
// Correctness check: we still have a single component, and this component is still the
// original underlying buffer.
assertThat(cumulation.numComponents()).isEqualTo(1);
// Replace '2' with '*', and '8' with '$'.
buf.setByte(5, '*').setByte(11, '$');
assertThat(cumulation.toString(US_ASCII)).isEqualTo("01*34567$9");
}
}
}
|
MergeWithCompositeTailMiscTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java
|
{
"start": 6236,
"end": 81277
}
|
class ____ extends InferenceServiceTestCase {
private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS);
private final MockWebServer webServer = new MockWebServer();
private ThreadPool threadPool;
private HttpClientManager clientManager;
@Before
public void init() throws Exception {
webServer.start();
threadPool = createThreadPool(inferenceUtilityExecutors());
clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class));
}
@After
public void shutdown() throws IOException {
clientManager.close();
terminate(threadPool);
webServer.close();
}
public void testParseRequestConfig_CreatesACohereEmbeddingsModel() throws IOException {
try (var service = createCohereService()) {
ActionListener<Model> modelListener = ActionListener.wrap(model -> {
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT));
assertThat(
embeddingsModel.getTaskSettings(),
is(new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.START))
);
assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret"));
}, e -> fail("Model parsing should have succeeded " + e.getMessage()));
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", CohereEmbeddingType.FLOAT),
getTaskSettingsMap(InputType.INGEST, CohereTruncation.START),
getSecretSettingsMap("secret")
),
modelListener
);
}
}
public void testParseRequestConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsProvided() throws IOException {
try (var service = createCohereService()) {
ActionListener<Model> modelListener = ActionListener.wrap(model -> {
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT));
assertThat(
embeddingsModel.getTaskSettings(),
is(new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.START))
);
assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret"));
}, e -> fail("Model parsing should have succeeded " + e.getMessage()));
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", CohereEmbeddingType.FLOAT),
getTaskSettingsMap(InputType.INGEST, CohereTruncation.START),
createRandomChunkingSettingsMap(),
getSecretSettingsMap("secret")
),
modelListener
);
}
}
public void testParseRequestConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException {
try (var service = createCohereService()) {
ActionListener<Model> modelListener = ActionListener.wrap(model -> {
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT));
assertThat(
embeddingsModel.getTaskSettings(),
is(new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.START))
);
assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret"));
}, e -> fail("Model parsing should have succeeded " + e.getMessage()));
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", CohereEmbeddingType.FLOAT),
getTaskSettingsMap(InputType.INGEST, CohereTruncation.START),
getSecretSettingsMap("secret")
),
modelListener
);
}
}
public void testParseRequestConfig_OptionalTaskSettings() throws IOException {
try (var service = createCohereService()) {
ActionListener<Model> modelListener = ActionListener.wrap(model -> {
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT));
assertThat(embeddingsModel.getTaskSettings(), equalTo(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS));
assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret"));
}, e -> fail("Model parsing should have succeeded " + e.getMessage()));
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", CohereEmbeddingType.FLOAT),
getSecretSettingsMap("secret")
),
modelListener
);
}
}
public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOException {
try (var service = createCohereService()) {
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"The [cohere] service does not support task type [sparse_embedding]"
);
service.parseRequestConfig(
"id",
TaskType.SPARSE_EMBEDDING,
getRequestConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null),
getTaskSettingsMapEmpty(),
getSecretSettingsMap("secret")
),
failureListener
);
}
}
private static ActionListener<Model> getModelListenerForException(Class<?> exceptionClass, String expectedMessage) {
return ActionListener.<Model>wrap((model) -> fail("Model parsing should have failed"), e -> {
assertThat(e, instanceOf(exceptionClass));
assertThat(e.getMessage(), is(expectedMessage));
});
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws IOException {
try (var service = createCohereService()) {
var serviceSettings = CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap(null, null, null);
serviceSettings.put(CohereServiceSettings.MODEL_ID, "foo");
var config = getRequestConfigMap(serviceSettings, getTaskSettingsMapEmpty(), getSecretSettingsMap("secret"));
config.put("extra_key", "value");
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"Configuration contains settings [{extra_key=value}] unknown to the [cohere] service"
);
service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, failureListener);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInServiceSettingsMap() throws IOException {
try (var service = createCohereService()) {
var serviceSettings = CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null);
serviceSettings.put("extra_key", "value");
var config = getRequestConfigMap(serviceSettings, getTaskSettingsMap(null, null), getSecretSettingsMap("secret"));
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"Configuration contains settings [{extra_key=value}] unknown to the [cohere] service"
);
service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, failureListener);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInTaskSettingsMap() throws IOException {
try (var service = createCohereService()) {
var taskSettingsMap = getTaskSettingsMap(InputType.INGEST, null);
taskSettingsMap.put("extra_key", "value");
var config = getRequestConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null),
taskSettingsMap,
getSecretSettingsMap("secret")
);
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"Configuration contains settings [{extra_key=value}] unknown to the [cohere] service"
);
service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, failureListener);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInSecretSettingsMap() throws IOException {
try (var service = createCohereService()) {
var secretSettingsMap = getSecretSettingsMap("secret");
secretSettingsMap.put("extra_key", "value");
var serviceSettings = CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap(null, null, null);
serviceSettings.put(CohereServiceSettings.MODEL_ID, "foo");
var config = getRequestConfigMap(serviceSettings, getTaskSettingsMapEmpty(), secretSettingsMap);
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"Configuration contains settings [{extra_key=value}] unknown to the [cohere] service"
);
service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, failureListener);
}
}
public void testParseRequestConfig_CreatesACohereEmbeddingsModelWithoutUrl() throws IOException {
try (var service = createCohereService()) {
var modelListener = ActionListener.<Model>wrap((model) -> {
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertNull(embeddingsModel.getServiceSettings().getCommonSettings().uri());
assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS));
assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret"));
}, (e) -> fail("Model parsing should have succeeded " + e.getMessage()));
var serviceSettings = CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap(null, null, null);
serviceSettings.put(CohereServiceSettings.MODEL_ID, "foo");
service.parseRequestConfig(
"id",
TaskType.TEXT_EMBEDDING,
getRequestConfigMap(serviceSettings, getTaskSettingsMapEmpty(), getSecretSettingsMap("secret")),
modelListener
);
}
}
public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModel() throws IOException {
try (var service = createCohereService()) {
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null),
getTaskSettingsMap(null, null),
getSecretSettingsMap("secret")
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null)));
assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWhenChunkingSettingsProvided() throws IOException {
try (var service = createCohereService()) {
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null),
getTaskSettingsMap(null, null),
createRandomChunkingSettingsMap(),
getSecretSettingsMap("secret")
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null)));
assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException {
try (var service = createCohereService()) {
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null),
getTaskSettingsMap(null, null),
getSecretSettingsMap("secret")
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null)));
assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_ThrowsErrorTryingToParseInvalidModel() throws IOException {
try (var service = createCohereService()) {
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null),
getTaskSettingsMapEmpty(),
getSecretSettingsMap("secret")
);
var thrownException = expectThrows(
ElasticsearchStatusException.class,
() -> service.parsePersistedConfigWithSecrets(
"id",
TaskType.SPARSE_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
)
);
assertThat(thrownException.getMessage(), containsString("Failed to parse stored model [id] for [cohere] service"));
assertThat(thrownException.getMessage(), containsString("The [cohere] service does not support task type [sparse_embedding]"));
}
}
public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWithoutUrl() throws IOException {
try (var service = createCohereService()) {
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap(null, null, null),
getTaskSettingsMap(InputType.INGEST, null),
getSecretSettingsMap("secret")
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertNull(embeddingsModel.getServiceSettings().getCommonSettings().uri());
assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.INGEST, null)));
assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException {
try (var service = createCohereService()) {
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", DenseVectorFieldMapper.ElementType.BYTE),
getTaskSettingsMap(InputType.SEARCH, CohereTruncation.NONE),
getSecretSettingsMap("secret")
);
persistedConfig.config().put("extra_key", "value");
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.BYTE));
assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.SEARCH, CohereTruncation.NONE)));
assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInSecretsSettings() throws IOException {
try (var service = createCohereService()) {
var secretSettingsMap = getSecretSettingsMap("secret");
secretSettingsMap.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null),
getTaskSettingsMapEmpty(),
secretSettingsMap
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS));
assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInSecrets() throws IOException {
try (var service = createCohereService()) {
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null),
getTaskSettingsMap(null, null),
getSecretSettingsMap("secret")
);
persistedConfig.secrets().put("extra_key", "value");
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null)));
assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException {
try (var service = createCohereService()) {
var serviceSettingsMap = CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null);
serviceSettingsMap.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(serviceSettingsMap, getTaskSettingsMapEmpty(), getSecretSettingsMap("secret"));
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS));
assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret"));
}
}
public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException {
try (var service = createCohereService()) {
var taskSettingsMap = getTaskSettingsMap(InputType.SEARCH, null);
taskSettingsMap.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null),
taskSettingsMap,
getSecretSettingsMap("secret")
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.TEXT_EMBEDDING,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.SEARCH, null)));
assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret"));
}
}
public void testParsePersistedConfig_CreatesACohereEmbeddingsModel() throws IOException {
try (var service = createCohereService()) {
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null),
getTaskSettingsMap(null, CohereTruncation.NONE)
);
var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config());
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, CohereTruncation.NONE)));
assertNull(embeddingsModel.getSecretSettings());
}
}
public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsProvided() throws IOException {
try (var service = createCohereService()) {
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null),
getTaskSettingsMap(null, CohereTruncation.NONE),
createRandomChunkingSettingsMap()
);
var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config());
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, CohereTruncation.NONE)));
assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
assertNull(embeddingsModel.getSecretSettings());
}
}
public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException {
try (var service = createCohereService()) {
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null),
getTaskSettingsMap(null, CohereTruncation.NONE)
);
var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config());
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, CohereTruncation.NONE)));
assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class));
assertNull(embeddingsModel.getSecretSettings());
}
}
public void testParsePersistedConfig_ThrowsErrorTryingToParseInvalidModel() throws IOException {
try (var service = createCohereService()) {
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null),
getTaskSettingsMapEmpty()
);
var thrownException = expectThrows(
ElasticsearchStatusException.class,
() -> service.parsePersistedConfig("id", TaskType.SPARSE_EMBEDDING, persistedConfig.config())
);
assertThat(thrownException.getMessage(), containsString("Failed to parse stored model [id] for [cohere] service"));
assertThat(thrownException.getMessage(), containsString("The [cohere] service does not support task type [sparse_embedding]"));
}
}
public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWithoutUrl() throws IOException {
try (var service = createCohereService()) {
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap(null, "model", CohereEmbeddingType.FLOAT),
getTaskSettingsMap(null, null)
);
var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config());
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertNull(embeddingsModel.getServiceSettings().getCommonSettings().uri());
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT));
assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null)));
assertNull(embeddingsModel.getSecretSettings());
}
}
public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException {
try (var service = createCohereService()) {
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null),
getTaskSettingsMapEmpty()
);
persistedConfig.config().put("extra_key", "value");
var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config());
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS));
assertNull(embeddingsModel.getSecretSettings());
}
}
public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException {
try (var service = createCohereService()) {
var serviceSettingsMap = CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null);
serviceSettingsMap.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(serviceSettingsMap, getTaskSettingsMap(InputType.SEARCH, null));
var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config());
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.SEARCH, null)));
assertNull(embeddingsModel.getSecretSettings());
}
}
public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException {
try (var service = createCohereService()) {
var taskSettingsMap = getTaskSettingsMap(InputType.INGEST, null);
taskSettingsMap.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(
CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null),
taskSettingsMap
);
var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config());
assertThat(model, instanceOf(CohereEmbeddingsModel.class));
var embeddingsModel = (CohereEmbeddingsModel) model;
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url"));
assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model"));
assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.INGEST, null)));
assertNull(embeddingsModel.getSecretSettings());
}
}
public void testInfer_ThrowsErrorWhenModelIsNotCohereModel() throws IOException {
var sender = createMockSender();
var factory = mock(HttpRequestSender.Factory.class);
when(factory.createSender()).thenReturn(sender);
var mockModel = getInvalidModel("model_id", "service_name");
try (var service = new CohereService(factory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
mockModel,
null,
null,
null,
List.of(""),
false,
new HashMap<>(),
InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT));
assertThat(
thrownException.getMessage(),
is("The internal model was invalid, please delete the service [service_name] with id [model_id] and add it again.")
);
verify(factory, times(1)).createSender();
verify(sender, times(1)).startAsynchronously(any());
}
verify(sender, times(1)).close();
verifyNoMoreInteractions(factory);
verifyNoMoreInteractions(sender);
}
public void testInfer_SendsRequest() throws IOException {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
String responseJson = """
{
"id": "de37399c-5df6-47cb-bc57-e3c5680c977b",
"texts": [
"hello"
],
"embeddings": {
"float": [
[
0.123,
-0.123
]
]
},
"meta": {
"api_version": {
"version": "1"
},
"billed_units": {
"input_tokens": 1
}
},
"response_type": "embeddings_by_type"
}
""";
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));
var model = CohereEmbeddingsModelTests.createModel(
getUrl(webServer),
"secret",
new CohereEmbeddingsTaskSettings(InputType.INGEST, null),
1024,
1024,
"model",
null
);
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
model,
null,
null,
null,
List.of("abc"),
false,
new HashMap<>(),
InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var result = listener.actionGet(TIMEOUT);
assertThat(result.asMap(), Matchers.is(buildExpectationFloat(List.of(new float[] { 0.123F, -0.123F }))));
assertThat(webServer.requests(), hasSize(1));
assertNull(webServer.requests().get(0).getUri().getQuery());
assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType()));
assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret"));
var requestMap = entityAsMap(webServer.requests().get(0).getBody());
assertThat(
requestMap,
is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document", "embedding_types", List.of("float")))
);
}
}
public void testInfer_ReturnsValidationException_WhenSendingRerankRequest_WithoutQueryField() throws IOException {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
var secret = "secret";
var modelName = "model";
var model = new CohereRerankModel(
"id",
new CohereRerankServiceSettings("abc", modelName, null, CohereServiceSettings.CohereApiVersion.V2),
new CohereRerankTaskSettings(null, null, null),
new DefaultSecretSettings(new SecureString(secret.toCharArray()))
);
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
model,
// null query string will trigger validation error
null,
null,
null,
List.of("abc"),
false,
new HashMap<>(),
InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var exception = expectThrows(ValidationException.class, () -> listener.actionGet(TIMEOUT));
assertThat(exception.getMessage(), containsString("Rerank task type requires a non-null query field"));
}
}
public void testInfer_SendsRerankRequest() throws IOException {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
String responseJson = """
{
"index": "d0760819-5a73-4d58-b163-3956d3648b62",
"results": [
],
"meta": {
"api_version": {
"version": "1"
},
"billed_units": {
"search_units": 1
}
}
}
""";
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));
var secret = "secret";
var modelName = "model";
var model = new CohereRerankModel(
"id",
new CohereRerankServiceSettings(getUrl(webServer), modelName, null, CohereServiceSettings.CohereApiVersion.V2),
new CohereRerankTaskSettings(null, null, null),
new DefaultSecretSettings(new SecureString(secret.toCharArray()))
);
var queryString = "a query";
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
model,
queryString,
null,
null,
List.of("abc"),
false,
new HashMap<>(),
InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var result = listener.actionGet(TIMEOUT);
assertThat(result.asMap(), Matchers.is(buildExpectationRerank(List.of())));
assertThat(webServer.requests(), hasSize(1));
assertNull(webServer.requests().get(0).getUri().getQuery());
assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType()));
assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), is(Strings.format("Bearer %s", secret)));
var requestMap = entityAsMap(webServer.requests().get(0).getBody());
assertThat(requestMap, is(Map.of("query", queryString, "documents", List.of("abc"), "model", modelName)));
}
}
public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
var model = CohereCompletionModelTests.createModel(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10));
assertThrows(
ElasticsearchStatusException.class,
() -> { service.updateModelWithEmbeddingDetails(model, randomNonNegativeInt()); }
);
}
}
public void testUpdateModelWithEmbeddingDetails_NullSimilarityInOriginalModel() throws IOException {
testUpdateModelWithEmbeddingDetails_Successful(null);
}
public void testUpdateModelWithEmbeddingDetails_NonNullSimilarityInOriginalModel() throws IOException {
testUpdateModelWithEmbeddingDetails_Successful(randomFrom(SimilarityMeasure.values()));
}
private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure similarityMeasure) throws IOException {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
var embeddingSize = randomNonNegativeInt();
var embeddingType = randomFrom(CohereEmbeddingType.values());
var model = CohereEmbeddingsModelTests.createModel(
randomAlphaOfLength(10),
randomAlphaOfLength(10),
CohereEmbeddingsTaskSettings.EMPTY_SETTINGS,
randomNonNegativeInt(),
randomNonNegativeInt(),
randomAlphaOfLength(10),
embeddingType,
similarityMeasure
);
Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize);
SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null
? CohereService.defaultSimilarity(embeddingType)
: similarityMeasure;
assertEquals(expectedSimilarityMeasure, updatedModel.getServiceSettings().similarity());
assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue());
}
}
public void testInfer_UnauthorisedResponse() throws IOException {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
String responseJson = """
{
"message": "invalid api token"
}
""";
webServer.enqueue(new MockResponse().setResponseCode(401).setBody(responseJson));
var model = CohereEmbeddingsModelTests.createModel(
getUrl(webServer),
"secret",
CohereEmbeddingsTaskSettings.EMPTY_SETTINGS,
1024,
1024,
"coheremodel",
null
);
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
model,
null,
null,
null,
List.of("abc"),
false,
new HashMap<>(),
InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var error = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT));
assertThat(error.getMessage(), containsString("Received an authentication error status code for request"));
assertThat(error.getMessage(), containsString("Error message: [invalid api token]"));
assertThat(webServer.requests(), hasSize(1));
}
}
public void testInfer_SetsInputTypeToIngest_FromInferParameter_WhenTaskSettingsAreEmpty() throws IOException {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
String responseJson = """
{
"id": "de37399c-5df6-47cb-bc57-e3c5680c977b",
"texts": [
"hello"
],
"embeddings": {
"float": [
[
0.123,
-0.123
]
]
},
"meta": {
"api_version": {
"version": "1"
},
"billed_units": {
"input_tokens": 1
}
},
"response_type": "embeddings_by_type"
}
""";
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));
var model = CohereEmbeddingsModelTests.createModel(
getUrl(webServer),
"secret",
CohereEmbeddingsTaskSettings.EMPTY_SETTINGS,
1024,
1024,
"model",
null
);
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
model,
null,
null,
null,
List.of("abc"),
false,
new HashMap<>(),
InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var result = listener.actionGet(TIMEOUT);
assertEquals(buildExpectationFloat(List.of(new float[] { 0.123F, -0.123F })), result.asMap());
assertThat(webServer.requests(), hasSize(1));
assertNull(webServer.requests().get(0).getUri().getQuery());
assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType()));
assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret"));
var requestMap = entityAsMap(webServer.requests().get(0).getBody());
assertThat(
requestMap,
is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document", "embedding_types", List.of("float")))
);
}
}
public void testInfer_SetsInputTypeToIngestFromInferParameter_WhenModelSettingIsNull_AndRequestTaskSettingsIsSearch()
throws IOException {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
String responseJson = """
{
"id": "de37399c-5df6-47cb-bc57-e3c5680c977b",
"texts": [
"hello"
],
"embeddings": {
"float": [
[
0.123,
-0.123
]
]
},
"meta": {
"api_version": {
"version": "1"
},
"billed_units": {
"input_tokens": 1
}
},
"response_type": "embeddings_by_type"
}
""";
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));
var model = CohereEmbeddingsModelTests.createModel(
getUrl(webServer),
"secret",
new CohereEmbeddingsTaskSettings(null, null),
1024,
1024,
"model",
null
);
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
model,
null,
null,
null,
List.of("abc"),
false,
CohereEmbeddingsTaskSettingsTests.getTaskSettingsMap(InputType.SEARCH, null),
InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var result = listener.actionGet(TIMEOUT);
assertThat(result.asMap(), Matchers.is(buildExpectationFloat(List.of(new float[] { 0.123F, -0.123F }))));
assertThat(webServer.requests(), hasSize(1));
assertNull(webServer.requests().get(0).getUri().getQuery());
assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType()));
assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret"));
var requestMap = entityAsMap(webServer.requests().get(0).getBody());
assertThat(
requestMap,
is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document", "embedding_types", List.of("float")))
);
}
}
public void testInfer_DoesNotSetInputType_WhenNotPresentInTaskSettings_AndUnspecifiedIsPassedInRequest_v1API() throws IOException {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
String responseJson = """
{
"id": "de37399c-5df6-47cb-bc57-e3c5680c977b",
"texts": [
"hello"
],
"embeddings": {
"float": [
[
0.123,
-0.123
]
]
},
"meta": {
"api_version": {
"version": "1"
},
"billed_units": {
"input_tokens": 1
}
},
"response_type": "embeddings_by_type"
}
""";
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));
var model = CohereEmbeddingsModelTests.createModel(
getUrl(webServer),
"secret",
new CohereEmbeddingsTaskSettings(null, null),
1024,
1024,
"model",
null,
CohereServiceSettings.CohereApiVersion.V1
);
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
model,
null,
null,
null,
List.of("abc"),
false,
new HashMap<>(),
InputType.UNSPECIFIED,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var result = listener.actionGet(TIMEOUT);
assertThat(result.asMap(), Matchers.is(buildExpectationFloat(List.of(new float[] { 0.123F, -0.123F }))));
assertThat(webServer.requests(), hasSize(1));
assertNull(webServer.requests().get(0).getUri().getQuery());
assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType()));
assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret"));
var requestMap = entityAsMap(webServer.requests().get(0).getBody());
assertThat(requestMap, is(Map.of("texts", List.of("abc"), "model", "model", "embedding_types", List.of("float"))));
}
}
public void testInfer_DefaultsInputType_WhenNotPresentInTaskSettings_AndUnspecifiedIsPassedInRequest_v2API() throws IOException {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
String responseJson = """
{
"id": "de37399c-5df6-47cb-bc57-e3c5680c977b",
"texts": [
"hello"
],
"embeddings": {
"float": [
[
0.123,
-0.123
]
]
},
"meta": {
"api_version": {
"version": "1"
},
"billed_units": {
"input_tokens": 1
}
},
"response_type": "embeddings_by_type"
}
""";
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));
var model = CohereEmbeddingsModelTests.createModel(
getUrl(webServer),
"secret",
new CohereEmbeddingsTaskSettings(null, null),
1024,
1024,
"model",
null,
CohereServiceSettings.CohereApiVersion.V2
);
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
model,
null,
null,
null,
List.of("abc"),
false,
new HashMap<>(),
InputType.UNSPECIFIED,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
listener.actionGet(TIMEOUT);
assertThat(webServer.requests(), hasSize(1));
var requestMap = entityAsMap(webServer.requests().get(0).getBody());
assertThat(
requestMap,
is(Map.of("texts", List.of("abc"), "model", "model", "embedding_types", List.of("float"), "input_type", "search_query"))
);
}
}
public void testChunkedInfer_BatchesCallsChunkingSettingsSet() throws IOException {
var model = CohereEmbeddingsModelTests.createModel(
getUrl(webServer),
"secret",
new CohereEmbeddingsTaskSettings(null, null),
createRandomChunkingSettings(),
1024,
1024,
"model",
null
);
testChunkedInfer(model);
}
public void testChunkedInfer_ChunkingSettingsNotSet() throws IOException {
var model = CohereEmbeddingsModelTests.createModel(
getUrl(webServer),
"secret",
new CohereEmbeddingsTaskSettings(null, null),
null,
1024,
1024,
"model",
null
);
testChunkedInfer(model);
}
private void testChunkedInfer(CohereEmbeddingsModel model) throws IOException {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
// Batching will call the service with 2 inputs
String responseJson = """
{
"id": "de37399c-5df6-47cb-bc57-e3c5680c977b",
"texts": [
"hello"
],
"embeddings": {
"float": [
[
0.123,
-0.123
],
[
0.223,
-0.223
]
]
},
"meta": {
"api_version": {
"version": "1"
},
"billed_units": {
"input_tokens": 1
}
},
"response_type": "embeddings_by_type"
}
""";
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));
PlainActionFuture<List<ChunkedInference>> listener = new PlainActionFuture<>();
// 2 inputs
service.chunkedInfer(
model,
null,
List.of(new ChunkInferenceInput("a"), new ChunkInferenceInput("bb")),
new HashMap<>(),
InputType.UNSPECIFIED,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var results = listener.actionGet(TIMEOUT);
assertThat(results, hasSize(2));
{
assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class));
var floatResult = (ChunkedInferenceEmbedding) results.get(0);
assertThat(floatResult.chunks(), hasSize(1));
assertEquals(new ChunkedInference.TextOffset(0, 1), floatResult.chunks().get(0).offset());
assertArrayEquals(
new float[] { 0.123f, -0.123f },
((DenseEmbeddingFloatResults.Embedding) floatResult.chunks().get(0).embedding()).values(),
0.0f
);
}
{
assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class));
var floatResult = (ChunkedInferenceEmbedding) results.get(1);
assertThat(floatResult.chunks(), hasSize(1));
assertEquals(new ChunkedInference.TextOffset(0, 2), floatResult.chunks().get(0).offset());
assertArrayEquals(
new float[] { 0.223f, -0.223f },
((DenseEmbeddingFloatResults.Embedding) floatResult.chunks().get(0).embedding()).values(),
0.0f
);
}
assertThat(webServer.requests(), hasSize(1));
assertNull(webServer.requests().get(0).getUri().getQuery());
assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType()));
assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret"));
var requestMap = entityAsMap(webServer.requests().get(0).getBody());
assertThat(
requestMap,
is(Map.of("texts", List.of("a", "bb"), "model", "model", "embedding_types", List.of("float"), "input_type", "search_query"))
);
}
}
public void testChunkedInfer_BatchesCalls_Bytes() throws IOException {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
// Batching will call the service with 2 inputs
String responseJson = """
{
"id": "de37399c-5df6-47cb-bc57-e3c5680c977b",
"texts": [
"hello"
],
"embeddings": {
"int8": [
[
23,
-23
],
[
24,
-24
]
]
},
"meta": {
"api_version": {
"version": "1"
},
"billed_units": {
"input_tokens": 1
}
},
"response_type": "embeddings_by_type"
}
""";
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));
var model = CohereEmbeddingsModelTests.createModel(
getUrl(webServer),
"secret",
new CohereEmbeddingsTaskSettings(null, null),
1024,
1024,
"model",
CohereEmbeddingType.BYTE
);
PlainActionFuture<List<ChunkedInference>> listener = new PlainActionFuture<>();
// 2 inputs
service.chunkedInfer(
model,
null,
List.of(new ChunkInferenceInput("a"), new ChunkInferenceInput("bb")),
new HashMap<>(),
InputType.UNSPECIFIED,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var results = listener.actionGet(TIMEOUT);
assertThat(results, hasSize(2));
{
assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class));
var byteResult = (ChunkedInferenceEmbedding) results.get(0);
assertThat(byteResult.chunks(), hasSize(1));
assertEquals(new ChunkedInference.TextOffset(0, 1), byteResult.chunks().get(0).offset());
assertThat(byteResult.chunks().get(0).embedding(), instanceOf(DenseEmbeddingByteResults.Embedding.class));
assertArrayEquals(
new byte[] { 23, -23 },
((DenseEmbeddingByteResults.Embedding) byteResult.chunks().get(0).embedding()).values()
);
}
{
assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class));
var byteResult = (ChunkedInferenceEmbedding) results.get(1);
assertThat(byteResult.chunks(), hasSize(1));
assertEquals(new ChunkedInference.TextOffset(0, 2), byteResult.chunks().get(0).offset());
assertThat(byteResult.chunks().get(0).embedding(), instanceOf(DenseEmbeddingByteResults.Embedding.class));
assertArrayEquals(
new byte[] { 24, -24 },
((DenseEmbeddingByteResults.Embedding) byteResult.chunks().get(0).embedding()).values()
);
}
assertThat(webServer.requests(), hasSize(1));
assertNull(webServer.requests().get(0).getUri().getQuery());
assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType()));
assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret"));
var requestMap = entityAsMap(webServer.requests().get(0).getBody());
assertThat(
requestMap,
is(Map.of("texts", List.of("a", "bb"), "model", "model", "embedding_types", List.of("int8"), "input_type", "search_query"))
);
}
}
public void testDefaultSimilarity_BinaryEmbedding() {
assertEquals(SimilarityMeasure.L2_NORM, CohereService.defaultSimilarity(CohereEmbeddingType.BINARY));
assertEquals(SimilarityMeasure.L2_NORM, CohereService.defaultSimilarity(CohereEmbeddingType.BIT));
}
public void testDefaultSimilarity_NotBinaryEmbedding() {
assertEquals(SimilarityMeasure.COSINE, CohereService.defaultSimilarity(CohereEmbeddingType.FLOAT));
assertEquals(SimilarityMeasure.COSINE, CohereService.defaultSimilarity(CohereEmbeddingType.BYTE));
assertEquals(SimilarityMeasure.COSINE, CohereService.defaultSimilarity(CohereEmbeddingType.INT8));
}
public void testInfer_StreamRequest() throws Exception {
String responseJson = """
{"event_type":"text-generation", "text":"hello"}
{"event_type":"text-generation", "text":"there"}
""";
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));
streamChatCompletion().hasNoErrors().hasEvent("""
{"completion":[{"delta":"hello"},{"delta":"there"}]}""");
}
private InferenceEventsAssertion streamChatCompletion() throws Exception {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
var model = CohereCompletionModelTests.createModel(getUrl(webServer), "secret", "model");
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
model,
null,
null,
null,
List.of("abc"),
true,
new HashMap<>(),
InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
return InferenceEventsAssertion.assertThat(listener.actionGet(TIMEOUT)).hasFinishedStream();
}
}
public void testInfer_StreamRequest_ErrorResponse() throws Exception {
String responseJson = """
{ "event_type":"stream-end", "finish_reason":"ERROR", "response":{ "text": "how dare you" } }
""";
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));
streamChatCompletion().hasNoEvents().hasErrorWithStatusCode(500).hasErrorContaining("how dare you");
}
@SuppressWarnings("checkstyle:LineLength")
public void testGetConfiguration() throws Exception {
try (var service = createCohereService()) {
String content = XContentHelper.stripWhitespace("""
{
"service": "cohere",
"name": "Cohere",
"task_types": ["text_embedding", "rerank", "completion"],
"configurations": {
"api_key": {
"description": "API Key for the provider you're connecting to.",
"label": "API Key",
"required": true,
"sensitive": true,
"updatable": true,
"type": "str",
"supported_task_types": ["text_embedding", "rerank", "completion"]
},
"model_id": {
"description": "The name of the model to use for the inference task.",
"label": "Model ID",
"required": true,
"sensitive": false,
"updatable": false,
"type": "str",
"supported_task_types": ["text_embedding", "rerank", "completion"]
},
"rate_limit.requests_per_minute": {
"description": "Minimize the number of rate limit errors.",
"label": "Rate Limit",
"required": false,
"sensitive": false,
"updatable": false,
"type": "int",
"supported_task_types": ["text_embedding", "rerank", "completion"]
}
}
}
""");
InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes(
new BytesArray(content),
XContentType.JSON
);
boolean humanReadable = true;
BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable);
InferenceServiceConfiguration serviceConfiguration = service.getConfiguration();
assertToXContentEquivalent(
originalBytes,
toXContent(serviceConfiguration, XContentType.JSON, humanReadable),
XContentType.JSON
);
}
}
public void testSupportsStreaming() throws IOException {
try (var service = new CohereService(mock(), createWithEmptySettings(mock()), mockClusterServiceEmpty())) {
assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.COMPLETION)));
assertFalse(service.canStream(TaskType.ANY));
}
}
private Map<String, Object> getRequestConfigMap(
Map<String, Object> serviceSettings,
Map<String, Object> taskSettings,
Map<String, Object> chunkingSettings,
Map<String, Object> secretSettings
) {
var requestConfigMap = getRequestConfigMap(serviceSettings, taskSettings, secretSettings);
requestConfigMap.put(ModelConfigurations.CHUNKING_SETTINGS, chunkingSettings);
return requestConfigMap;
}
private Map<String, Object> getRequestConfigMap(
Map<String, Object> serviceSettings,
Map<String, Object> taskSettings,
Map<String, Object> secretSettings
) {
var builtServiceSettings = new HashMap<>();
builtServiceSettings.putAll(serviceSettings);
builtServiceSettings.putAll(secretSettings);
return new HashMap<>(
Map.of(ModelConfigurations.SERVICE_SETTINGS, builtServiceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)
);
}
private Map<String, Object> getRequestConfigMap(Map<String, Object> serviceSettings, Map<String, Object> secretSettings) {
var builtServiceSettings = new HashMap<>();
builtServiceSettings.putAll(serviceSettings);
builtServiceSettings.putAll(secretSettings);
return new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, builtServiceSettings));
}
private CohereService createCohereService() {
return new CohereService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool), mockClusterServiceEmpty());
}
@Override
public InferenceService createInferenceService() {
return createCohereService();
}
@Override
protected void assertRerankerWindowSize(RerankingInferenceService rerankingInferenceService) {
assertThat(rerankingInferenceService.rerankerWindowSize("any model"), is(2800));
}
}
|
CohereServiceTests
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/convert/ConversionServiceArguments.java
|
{
"start": 1389,
"end": 2599
}
|
class ____ {
private ConversionServiceArguments() {
}
public static Stream<? extends Arguments> with(Formatter<?> formatter) {
return with((conversionService) -> conversionService.addFormatter(formatter));
}
public static Stream<? extends Arguments> with(GenericConverter converter) {
return with((conversionService) -> conversionService.addConverter(converter));
}
public static Stream<? extends Arguments> with(Consumer<FormattingConversionService> initializer) {
FormattingConversionService withoutDefaults = new FormattingConversionService();
initializer.accept(withoutDefaults);
return Stream.of(
Arguments.of(new NamedConversionService(withoutDefaults, "Without defaults conversion service")),
Arguments.of(new NamedConversionService(new ApplicationConversionService(),
"Application conversion service")));
}
public static boolean isApplicationConversionService(ConversionService conversionService) {
if (conversionService instanceof NamedConversionService namedConversionService) {
return isApplicationConversionService(namedConversionService.delegate);
}
return conversionService instanceof ApplicationConversionService;
}
static
|
ConversionServiceArguments
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.