language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | components/camel-cometd/src/main/java/org/apache/camel/component/cometd/CometdComponent.java | {
"start": 3320,
"end": 14158
} | class ____ {
Connector connector;
CometDServlet servlet;
Server server;
int refCount;
ConnectorRef(Connector connector, CometDServlet servlet, Server server) {
this.connector = connector;
this.servlet = servlet;
this.server = server;
increment();
}
public int increment() {
return ++refCount;
}
public int decrement() {
return --refCount;
}
}
public CometdComponent() {
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
CometdEndpoint endpoint = new CometdEndpoint(this, uri, remaining);
setProperties(endpoint, parameters);
return endpoint;
}
/**
* Connects the URL specified on the endpoint to the specified processor.
*/
public void connect(CometdProducerConsumer prodcon) throws Exception {
Server server = null;
// Make sure that there is a connector for the requested endpoint.
CometdEndpoint endpoint = prodcon.getEndpoint();
String connectorKey = endpoint.getProtocol() + ":" + endpoint.getUri().getHost() + ":" + endpoint.getPort();
connectorsLock.lock();
try {
ConnectorRef connectorRef = connectors.get(connectorKey);
if (connectorRef == null) {
ServerConnector connector;
server = createServer();
if ("cometds".equals(endpoint.getProtocol())) {
connector = getSslSocketConnector(server);
} else {
connector = new ServerConnector(server);
}
connector.setPort(endpoint.getPort());
connector.setHost(endpoint.getUri().getHost());
if ("localhost".equalsIgnoreCase(endpoint.getUri().getHost())) {
LOG.warn("You use localhost interface! It means that no external connections will be available."
+ " Don't you want to use 0.0.0.0 instead (all network interfaces)?");
}
server.addConnector(connector);
CometDServlet servlet = createServletForConnector(server, endpoint);
connectorRef = new ConnectorRef(connector, servlet, server);
server.start();
connectors.put(connectorKey, connectorRef);
} else {
connectorRef.increment();
}
BayeuxServerImpl bayeux = (BayeuxServerImpl) connectorRef.servlet.getBayeuxServer();
if (securityPolicy != null) {
bayeux.setSecurityPolicy(securityPolicy);
}
if (extensions != null) {
for (BayeuxServer.Extension extension : extensions) {
bayeux.addExtension(extension);
}
}
if (serverListeners != null) {
for (BayeuxServer.BayeuxServerListener serverListener : serverListeners) {
bayeux.addListener(serverListener);
}
}
prodcon.setBayeux(bayeux);
} finally {
connectorsLock.unlock();
}
}
/**
* Disconnects the URL specified on the endpoint from the specified processor.
*/
public void disconnect(CometdProducerConsumer prodcon) throws Exception {
CometdEndpoint endpoint = prodcon.getEndpoint();
String connectorKey = endpoint.getProtocol() + ":" + endpoint.getUri().getHost() + ":" + endpoint.getPort();
connectorsLock.lock();
try {
ConnectorRef connectorRef = connectors.get(connectorKey);
if (connectorRef != null && connectorRef.decrement() == 0) {
connectorRef.server.removeConnector(connectorRef.connector);
connectorRef.connector.stop();
connectorRef.server.stop();
connectors.remove(connectorKey);
}
} finally {
connectorsLock.unlock();
}
}
protected CometDServlet createServletForConnector(Server server, CometdEndpoint endpoint)
throws Exception {
CometDServlet servlet = new CometDServlet();
ServletContextHandler context
= new ServletContextHandler("/", false, false);
server.setHandler(context);
ServletHolder holder = new ServletHolder();
holder.setServlet(servlet);
holder.setAsyncSupported(true);
// Use baseResource to pass as a parameter the url
// pointing to by example classpath:webapp
if (endpoint.getBaseResource() != null) {
String[] resources = endpoint.getBaseResource().split(":");
LOG.debug(">>> Protocol found: {}, and resource: {}", resources[0], resources[1]);
if (resources[0].equals("file")) {
context.setBaseResource(new MountedPathResourceFactory().newResource(resources[1]));
} else if (resources[0].equals("classpath")) {
// Create a URL handler using classpath protocol
URL url = this.getCamelContext().getClassResolver().loadResourceAsURL(resources[1]);
context.setBaseResource(new MountedPathResourceFactory().newResource(url));
}
}
applyCrossOriginFiltering(endpoint, context);
context.addServlet(holder, "/cometd/*");
context.addServlet("org.eclipse.jetty.ee10.servlet.DefaultServlet", "/");
context.setSessionHandler(new SessionHandler());
holder.setInitParameter("timeout", Integer.toString(endpoint.getTimeout()));
holder.setInitParameter("interval", Integer.toString(endpoint.getInterval()));
holder.setInitParameter("maxInterval", Integer.toString(endpoint.getMaxInterval()));
holder.setInitParameter("multiFrameInterval", Integer.toString(endpoint.getMultiFrameInterval()));
holder.setInitParameter("JSONCommented", Boolean.toString(endpoint.isJsonCommented()));
holder.setInitParameter("logLevel", Integer.toString(endpoint.getLogLevel()));
return servlet;
}
protected ServerConnector getSslSocketConnector(Server server) throws Exception {
ServerConnector sslSocketConnector = null;
SSLContextParameters sslParams = this.sslContextParameters;
if (sslParams == null) {
sslParams = retrieveGlobalSslContextParameters();
}
SslContextFactory.Server sslContextFactory = new SslContextFactory.Server();
sslContextFactory.setEndpointIdentificationAlgorithm(null);
if (sslParams != null) {
sslContextFactory.setSslContext(sslParams.createSSLContext(getCamelContext()));
} else {
sslContextFactory.setKeyStorePassword(sslKeyPassword);
sslContextFactory.setKeyManagerPassword(sslPassword);
if (sslKeystore != null) {
sslContextFactory.setKeyStorePath(sslKeystore);
}
}
sslSocketConnector = new ServerConnector(server, sslContextFactory);
return sslSocketConnector;
}
public String getSslKeyPassword() {
return sslKeyPassword;
}
public String getSslPassword() {
return sslPassword;
}
public String getSslKeystore() {
return sslKeystore;
}
/**
* The password for the keystore when using SSL.
*/
public void setSslKeyPassword(String sslKeyPassword) {
this.sslKeyPassword = sslKeyPassword;
}
/**
* The password when using SSL.
*/
public void setSslPassword(String sslPassword) {
this.sslPassword = sslPassword;
}
/**
* The path to the keystore.
*/
public void setSslKeystore(String sslKeystore) {
this.sslKeystore = sslKeystore;
}
/**
* To use a custom configured SecurityPolicy to control authorization
*/
public void setSecurityPolicy(SecurityPolicy securityPolicy) {
this.securityPolicy = securityPolicy;
}
public SecurityPolicy getSecurityPolicy() {
return securityPolicy;
}
public List<BayeuxServer.Extension> getExtensions() {
return extensions;
}
/**
* To use a list of custom BayeuxServer.Extension that allows modifying incoming and outgoing requests.
*/
public void setExtensions(List<BayeuxServer.Extension> extensions) {
this.extensions = extensions;
}
public void addExtension(BayeuxServer.Extension extension) {
if (extensions == null) {
extensions = new ArrayList<>();
}
extensions.add(extension);
}
public void addServerListener(BayeuxServer.BayeuxServerListener serverListener) {
if (serverListeners == null) {
serverListeners = new ArrayList<>();
}
serverListeners.add(serverListener);
}
public SSLContextParameters getSslContextParameters() {
return sslContextParameters;
}
/**
* To configure security using SSLContextParameters
*/
public void setSslContextParameters(SSLContextParameters sslContextParameters) {
this.sslContextParameters = sslContextParameters;
}
@Override
public boolean isUseGlobalSslContextParameters() {
return this.useGlobalSslContextParameters;
}
/**
* Enable usage of global SSL context parameters.
*/
@Override
public void setUseGlobalSslContextParameters(boolean useGlobalSslContextParameters) {
this.useGlobalSslContextParameters = useGlobalSslContextParameters;
}
protected Server createServer() {
Server server = new Server();
ContextHandlerCollection collection = new ContextHandlerCollection();
server.setHandler(collection);
return server;
}
@Override
protected void doStop() throws Exception {
connectorsLock.lock();
try {
for (ConnectorRef connectorRef : connectors.values()) {
connectorRef.connector.stop();
}
connectors.clear();
} finally {
connectorsLock.unlock();
}
super.doStop();
}
private void applyCrossOriginFiltering(CometdEndpoint endpoint, ServletContextHandler context) {
if (endpoint.isCrossOriginFilterOn()) {
FilterHolder filterHolder = new FilterHolder();
CrossOriginFilter filter = new CrossOriginFilter();
filterHolder.setFilter(filter);
filterHolder.setInitParameter("allowedOrigins", endpoint.getAllowedOrigins());
context.addFilter(filterHolder, endpoint.getFilterPath(), EnumSet.allOf(DispatcherType.class));
}
}
}
| ConnectorRef |
java | apache__rocketmq | remoting/src/main/java/org/apache/rocketmq/remoting/protocol/BrokerSyncInfo.java | {
"start": 858,
"end": 2204
} | class ____ extends RemotingSerializable {
/**
* For slave online sync, retrieve HA address before register
*/
private String masterHaAddress;
private long masterFlushOffset;
private String masterAddress;
public BrokerSyncInfo(String masterHaAddress, long masterFlushOffset, String masterAddress) {
this.masterHaAddress = masterHaAddress;
this.masterFlushOffset = masterFlushOffset;
this.masterAddress = masterAddress;
}
public String getMasterHaAddress() {
return masterHaAddress;
}
public void setMasterHaAddress(String masterHaAddress) {
this.masterHaAddress = masterHaAddress;
}
public long getMasterFlushOffset() {
return masterFlushOffset;
}
public void setMasterFlushOffset(long masterFlushOffset) {
this.masterFlushOffset = masterFlushOffset;
}
public String getMasterAddress() {
return masterAddress;
}
public void setMasterAddress(String masterAddress) {
this.masterAddress = masterAddress;
}
@Override
public String toString() {
return "BrokerSyncInfo{" +
"masterHaAddress='" + masterHaAddress + '\'' +
", masterFlushOffset=" + masterFlushOffset +
", masterAddress=" + masterAddress +
'}';
}
}
| BrokerSyncInfo |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/driver/impl/StateStoreMySQLImpl.java | {
"start": 14333,
"end": 14923
} | class ____ validate is this is one of the supported
* record types.
* @param clazz Class of the record.
* @return Table name for this record class.
*/
private <T extends BaseRecord> String getAndValidateTableNameForClass(final Class<T> clazz) {
String tableName = StateStoreUtils.getRecordName(clazz);
if (VALID_TABLES.contains(tableName)) {
return tableName;
} else {
throw new IllegalArgumentException(tableName + " is not a valid table name");
}
}
/**
* Class that relies on a HikariDataSource to provide SQL connections.
*/
static | and |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java | {
"start": 2375,
"end": 2783
} | class ____ extends ActionResponse implements ToXContentObject, QlStatusResponse.AsyncStatus {
private final Hits hits;
private final long tookInMillis;
private final boolean isTimeout;
private final String asyncExecutionId;
private final boolean isRunning;
private final boolean isPartial;
private final ShardSearchFailure[] shardFailures;
private static final | EqlSearchResponse |
java | netty__netty | codec-http3/src/test/java/io/netty/handler/codec/http3/example/Http3ClientExample.java | {
"start": 1844,
"end": 5146
} | class ____ {
private Http3ClientExample() { }
public static void main(String... args) throws Exception {
EventLoopGroup group = new MultiThreadIoEventLoopGroup(1, NioIoHandler.newFactory());
try {
QuicSslContext context = QuicSslContextBuilder.forClient()
.trustManager(InsecureTrustManagerFactory.INSTANCE)
.applicationProtocols(Http3.supportedApplicationProtocols()).build();
ChannelHandler codec = Http3.newQuicClientCodecBuilder()
.sslContext(context)
.maxIdleTimeout(5000, TimeUnit.MILLISECONDS)
.initialMaxData(10000000)
.initialMaxStreamDataBidirectionalLocal(1000000)
.build();
Bootstrap bs = new Bootstrap();
Channel channel = bs.group(group)
.channel(NioDatagramChannel.class)
.handler(codec)
.bind(0).sync().channel();
QuicChannel quicChannel = QuicChannel.newBootstrap(channel)
.handler(new Http3ClientConnectionHandler())
.remoteAddress(new InetSocketAddress(NetUtil.LOCALHOST4, Http3ServerExample.PORT))
.connect()
.get();
QuicStreamChannel streamChannel = Http3.newRequestStream(quicChannel,
new Http3RequestStreamInboundHandler() {
@Override
protected void channelRead(ChannelHandlerContext ctx, Http3HeadersFrame frame) {
ReferenceCountUtil.release(frame);
}
@Override
protected void channelRead(ChannelHandlerContext ctx, Http3DataFrame frame) {
System.err.print(frame.content().toString(CharsetUtil.US_ASCII));
ReferenceCountUtil.release(frame);
}
@Override
protected void channelInputClosed(ChannelHandlerContext ctx) {
ctx.close();
}
}).sync().getNow();
// Write the Header frame and send the FIN to mark the end of the request.
// After this its not possible anymore to write any more data.
Http3HeadersFrame frame = new DefaultHttp3HeadersFrame();
frame.headers().method("GET").path("/")
.authority(NetUtil.LOCALHOST4.getHostAddress() + ":" + Http3ServerExample.PORT)
.scheme("https");
streamChannel.writeAndFlush(frame)
.addListener(QuicStreamChannel.SHUTDOWN_OUTPUT).sync();
// Wait for the stream channel and quic channel to be closed (this will happen after we received the FIN).
// After this is done we will close the underlying datagram channel.
streamChannel.closeFuture().sync();
// After we received the response lets also close the underlying QUIC channel and datagram channel.
quicChannel.close().sync();
channel.close().sync();
} finally {
group.shutdownGracefully();
}
}
}
| Http3ClientExample |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/Long2DArraysBaseTest.java | {
"start": 1106,
"end": 1781
} | class ____ {
/**
* is initialized with {@link #initActualArray()} with default value = {{0, 2, 4}, {6, 8, 10}}
*/
protected long[][] actual;
protected Failures failures;
protected Long2DArrays long2dArrays;
protected Arrays2D arrays2d;
protected AssertionInfo info = someInfo();
@BeforeEach
public void setUp() {
failures = spy(Failures.instance());
long2dArrays = new Long2DArrays();
long2dArrays.failures = failures;
arrays2d = mock(Arrays2D.class);
long2dArrays.setArrays(arrays2d);
initActualArray();
}
protected void initActualArray() {
actual = new long[][] { { 0, 2, 4 }, { 6, 8, 10 } };
}
}
| Long2DArraysBaseTest |
java | spring-projects__spring-boot | buildSrc/src/main/java/org/springframework/boot/build/antora/LocalAggregateContentContribution.java | {
"start": 985,
"end": 1548
} | class ____ extends ContentContribution {
protected LocalAggregateContentContribution(Project project, String name) {
super(project, name, "local-aggregate");
}
@Override
void produceFrom(CopySpec copySpec) {
super.configureProduction(copySpec);
configurePlaybookGeneration(this::addToAlwaysInclude);
}
private void addToAlwaysInclude(GenerateAntoraPlaybook task) {
task.getAntoraExtensions()
.getZipContentsCollector()
.getAlwaysInclude()
.add(new AlwaysInclude(getName(), "local-aggregate-content"));
}
}
| LocalAggregateContentContribution |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/internal/util/collections/BoundedConcurrentHashMap.java | {
"start": 10609,
"end": 14185
} | class ____<K, V> extends LinkedHashMap<HashEntry<K, V>, V> implements EvictionPolicy<K, V> {
private final ConcurrentLinkedQueue<HashEntry<K, V>> accessQueue;
private final AtomicInteger accessQueueSize;
private final Segment<K, V> segment;
private final int maxBatchQueueSize;
private final int trimDownSize;
private final float batchThresholdFactor;
private final Set<HashEntry<K, V>> evicted;
LRU(Segment<K, V> s, int capacity, float lf, int maxBatchSize, float batchThresholdFactor) {
super( capacity, lf, true );
this.segment = s;
this.trimDownSize = capacity;
this.maxBatchQueueSize = Math.min( maxBatchSize, MAX_BATCH_SIZE );
this.batchThresholdFactor = batchThresholdFactor;
this.accessQueue = new ConcurrentLinkedQueue<>();
this.evicted = new HashSet<>();
this.accessQueueSize = new AtomicInteger();
}
@Override
public void execute() {
assert segment.isHeldByCurrentThread();
int removed = 0;
HashEntry<K, V> e;
try {
while ((e = accessQueue.poll()) != null) {
removed++;
put(e, e.value);
}
}
finally {
// guarantee that under OOM size won't be broken
accessQueueSize.addAndGet(-removed);
}
}
@Override
public void onEntryMiss(HashEntry<K, V> e) {
assert segment.isHeldByCurrentThread();
put( e, e.value );
if ( !evicted.isEmpty() ) {
evicted.clear();
}
}
/*
* Invoked without holding a lock on Segment
*/
@Override
public boolean onEntryHit(HashEntry<K, V> e) {
accessQueue.add( e );
// counter-intuitive:
// Why not placing this *before* appending the entry to the access queue?
// we don't want the eviction to kick-in if the access queue doesn't contain enough entries.
final int size = accessQueueSize.incrementAndGet();
return size >= maxBatchQueueSize * batchThresholdFactor;
}
/*
* Invoked without holding a lock on Segment
*/
@Override
public boolean thresholdExpired() {
return accessQueueSize.get() >= maxBatchQueueSize;
}
@Override
public void onEntryRemove(HashEntry<K, V> e) {
assert segment.isHeldByCurrentThread();
remove( e );
// we could have multiple instances of e in accessQueue; remove them all
int removed = 0;
while ( accessQueue.remove( e ) ) {
removed--;
}
accessQueueSize.addAndGet(-removed);
}
@Override
public void clear() {
assert segment.isHeldByCurrentThread();
super.clear();
int removed = 0;
while (accessQueue.poll() != null) {
removed++;
}
accessQueueSize.addAndGet(-removed);
}
@Override
public Eviction strategy() {
return Eviction.LRU;
}
private boolean isAboveThreshold() {
return size() > trimDownSize;
}
protected boolean removeEldestEntry(Map.Entry<HashEntry<K, V>, V> eldest) {
assert segment.isHeldByCurrentThread();
boolean aboveThreshold = isAboveThreshold();
if ( aboveThreshold ) {
HashEntry<K, V> evictedEntry = eldest.getKey();
segment.remove( evictedEntry.key, evictedEntry.hash, null );
evicted.add( evictedEntry );
}
return aboveThreshold;
}
@Override
public HashEntry<K, V> createNewEntry(K key, int hash, HashEntry<K, V> next, V value) {
return new HashEntry<>( key, hash, next, value );
}
}
/**
* Adapted to Infinispan BoundedConcurrentHashMap using LIRS implementation ideas from Charles Fry (fry@google.com)
* See http://code.google.com/p/concurrentlinkedhashmap/source/browse/trunk/src/test/java/com/googlecode/concurrentlinkedhashmap/caches/LirsMap.java
* for original sources
*/
private static final | LRU |
java | alibaba__nacos | api/src/main/java/com/alibaba/nacos/api/selector/Selector.java | {
"start": 1524,
"end": 2303
} | interface ____<R, C, E> extends Serializable {
/**
* parse the selector, build the inner info which used by {@link #select(Object)}.
*
* @param expression expression.
* @return selector.
* @throws NacosException parse failed exception.
*/
Selector<R, C, E> parse(E expression) throws NacosException;
/**
* select the target result.
*
* @param context selector context.
* @return select result.
*/
R select(C context);
/**
* Get the selector type.
*
* @return selector type.
*/
String getType();
/**
* Get the select context which used by {@link #select(Object)}.
*
* @return selector context type.
*/
String getContextType();
}
| Selector |
java | apache__kafka | clients/src/test/java/org/apache/kafka/clients/consumer/MockConsumerTest.java | {
"start": 1646,
"end": 11328
} | class ____ {
private final MockConsumer<String, String> consumer = new MockConsumer<>(AutoOffsetResetStrategy.EARLIEST.name());
@Test
public void testSimpleMock() {
consumer.subscribe(Collections.singleton("test"));
assertEquals(0, consumer.poll(Duration.ZERO).count());
consumer.rebalance(Arrays.asList(new TopicPartition("test", 0), new TopicPartition("test", 1)));
// Mock consumers need to seek manually since they cannot automatically reset offsets
HashMap<TopicPartition, Long> beginningOffsets = new HashMap<>();
beginningOffsets.put(new TopicPartition("test", 0), 0L);
beginningOffsets.put(new TopicPartition("test", 1), 0L);
consumer.updateBeginningOffsets(beginningOffsets);
consumer.seek(new TopicPartition("test", 0), 0);
ConsumerRecord<String, String> rec1 = new ConsumerRecord<>("test", 0, 0, 0L, TimestampType.CREATE_TIME,
0, 0, "key1", "value1", new RecordHeaders(), Optional.empty());
ConsumerRecord<String, String> rec2 = new ConsumerRecord<>("test", 0, 1, 0L, TimestampType.CREATE_TIME,
0, 0, "key2", "value2", new RecordHeaders(), Optional.empty());
consumer.addRecord(rec1);
consumer.addRecord(rec2);
ConsumerRecords<String, String> recs = consumer.poll(Duration.ofMillis(1));
Iterator<ConsumerRecord<String, String>> iter = recs.iterator();
assertEquals(rec1, iter.next());
assertEquals(rec2, iter.next());
assertFalse(iter.hasNext());
final TopicPartition tp = new TopicPartition("test", 0);
assertEquals(2L, consumer.position(tp));
assertEquals(1, recs.nextOffsets().size());
assertEquals(new OffsetAndMetadata(2, Optional.empty(), ""), recs.nextOffsets().get(tp));
consumer.commitSync();
assertEquals(2L, consumer.committed(Collections.singleton(tp)).get(tp).offset());
}
@Test
public void testConsumerRecordsIsEmptyWhenReturningNoRecords() {
TopicPartition partition = new TopicPartition("test", 0);
consumer.assign(Collections.singleton(partition));
consumer.addRecord(new ConsumerRecord<>("test", 0, 0, null, null));
consumer.updateEndOffsets(Collections.singletonMap(partition, 1L));
consumer.seekToEnd(Collections.singleton(partition));
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1));
assertEquals(0, records.count());
assertTrue(records.isEmpty());
}
@Test
public void shouldNotClearRecordsForPausedPartitions() {
TopicPartition partition0 = new TopicPartition("test", 0);
Collection<TopicPartition> testPartitionList = Collections.singletonList(partition0);
consumer.assign(testPartitionList);
consumer.addRecord(new ConsumerRecord<>("test", 0, 0, null, null));
consumer.updateBeginningOffsets(Collections.singletonMap(partition0, 0L));
consumer.seekToBeginning(testPartitionList);
consumer.pause(testPartitionList);
consumer.poll(Duration.ofMillis(1));
consumer.resume(testPartitionList);
ConsumerRecords<String, String> recordsSecondPoll = consumer.poll(Duration.ofMillis(1));
assertEquals(1, recordsSecondPoll.count());
assertEquals(1, recordsSecondPoll.nextOffsets().size());
assertEquals(new OffsetAndMetadata(1, Optional.empty(), ""), recordsSecondPoll.nextOffsets().get(new TopicPartition("test", 0)));
}
@Test
public void endOffsetsShouldBeIdempotent() {
TopicPartition partition = new TopicPartition("test", 0);
consumer.updateEndOffsets(Collections.singletonMap(partition, 10L));
// consumer.endOffsets should NOT change the value of end offsets
assertEquals(10L, (long) consumer.endOffsets(Collections.singleton(partition)).get(partition));
assertEquals(10L, (long) consumer.endOffsets(Collections.singleton(partition)).get(partition));
assertEquals(10L, (long) consumer.endOffsets(Collections.singleton(partition)).get(partition));
consumer.updateEndOffsets(Collections.singletonMap(partition, 11L));
// consumer.endOffsets should NOT change the value of end offsets
assertEquals(11L, (long) consumer.endOffsets(Collections.singleton(partition)).get(partition));
assertEquals(11L, (long) consumer.endOffsets(Collections.singleton(partition)).get(partition));
assertEquals(11L, (long) consumer.endOffsets(Collections.singleton(partition)).get(partition));
}
@Test
public void testDurationBasedOffsetReset() {
MockConsumer<String, String> consumer = new MockConsumer<>("by_duration:PT1H");
consumer.subscribe(Collections.singleton("test"));
consumer.rebalance(Arrays.asList(new TopicPartition("test", 0), new TopicPartition("test", 1)));
HashMap<TopicPartition, Long> durationBasedOffsets = new HashMap<>();
durationBasedOffsets.put(new TopicPartition("test", 0), 10L);
durationBasedOffsets.put(new TopicPartition("test", 1), 11L);
consumer.updateDurationOffsets(durationBasedOffsets);
ConsumerRecord<String, String> rec1 = new ConsumerRecord<>("test", 0, 10L, 0L, TimestampType.CREATE_TIME,
0, 0, "key1", "value1", new RecordHeaders(), Optional.empty());
ConsumerRecord<String, String> rec2 = new ConsumerRecord<>("test", 0, 11L, 0L, TimestampType.CREATE_TIME,
0, 0, "key2", "value2", new RecordHeaders(), Optional.empty());
consumer.addRecord(rec1);
consumer.addRecord(rec2);
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1));
Iterator<ConsumerRecord<String, String>> iter = records.iterator();
assertEquals(rec1, iter.next());
assertEquals(rec2, iter.next());
assertFalse(iter.hasNext());
}
@Test
public void testRebalanceListener() {
final List<TopicPartition> revoked = new ArrayList<>();
final List<TopicPartition> assigned = new ArrayList<>();
ConsumerRebalanceListener consumerRebalanceListener = new ConsumerRebalanceListener() {
@Override
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
revoked.clear();
revoked.addAll(partitions);
}
@Override
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
if (partitions.isEmpty()) {
return;
}
assigned.clear();
assigned.addAll(partitions);
}
};
consumer.subscribe(Collections.singleton("test"), consumerRebalanceListener);
assertEquals(0, consumer.poll(Duration.ZERO).count());
List<TopicPartition> topicPartitionList = Arrays.asList(new TopicPartition("test", 0), new TopicPartition("test", 1));
consumer.rebalance(topicPartitionList);
assertTrue(revoked.isEmpty());
assertEquals(2, assigned.size());
assertTrue(assigned.contains(topicPartitionList.get(0)));
assertTrue(assigned.contains(topicPartitionList.get(1)));
consumer.rebalance(Collections.emptyList());
assertEquals(2, assigned.size());
assertTrue(revoked.contains(topicPartitionList.get(0)));
assertTrue(revoked.contains(topicPartitionList.get(1)));
consumer.rebalance(Collections.singletonList(topicPartitionList.get(0)));
assertEquals(1, assigned.size());
assertTrue(assigned.contains(topicPartitionList.get(0)));
consumer.rebalance(Collections.singletonList(topicPartitionList.get(1)));
assertEquals(1, assigned.size());
assertTrue(assigned.contains(topicPartitionList.get(1)));
assertEquals(1, revoked.size());
assertTrue(revoked.contains(topicPartitionList.get(0)));
}
@Test
public void testRe2JPatternSubscription() {
assertThrows(IllegalArgumentException.class, () -> consumer.subscribe((SubscriptionPattern) null));
assertThrows(IllegalArgumentException.class, () -> consumer.subscribe(new SubscriptionPattern("")));
SubscriptionPattern pattern = new SubscriptionPattern("t.*");
assertThrows(IllegalArgumentException.class, () -> consumer.subscribe(pattern, null));
consumer.subscribe(pattern);
assertTrue(consumer.subscription().isEmpty());
// Check that the subscription to pattern was successfully applied in the mock consumer (using a different
// subscription type should fail)
assertThrows(IllegalStateException.class, () -> consumer.subscribe(List.of("topic1")));
}
@Test
public void shouldReturnMaxPollRecords() {
TopicPartition partition = new TopicPartition("test", 0);
consumer.assign(Collections.singleton(partition));
consumer.updateBeginningOffsets(Collections.singletonMap(partition, 0L));
IntStream.range(0, 10).forEach(offset -> consumer.addRecord(new ConsumerRecord<>("test", 0, offset, null, null)));
consumer.setMaxPollRecords(2L);
ConsumerRecords<String, String> records;
records = consumer.poll(Duration.ofMillis(1));
assertEquals(2, records.count());
records = consumer.poll(Duration.ofMillis(1));
assertEquals(2, records.count());
consumer.setMaxPollRecords(Long.MAX_VALUE);
records = consumer.poll(Duration.ofMillis(1));
assertEquals(6, records.count());
records = consumer.poll(Duration.ofMillis(1));
assertTrue(records.isEmpty());
}
}
| MockConsumerTest |
java | google__dagger | javatests/dagger/functional/kotlin/CompanionModuleTest.java | {
"start": 816,
"end": 1569
} | class ____ {
@Test
public void verifyCompanionModule() {
TestKotlinComponentWithCompanionModule component =
DaggerTestKotlinComponentWithCompanionModule.create();
assertThat(component.getDataA()).isNotNull();
assertThat(component.getDataB()).isNotNull();
assertThat(component.getBoolean()).isTrue();
assertThat(component.getStringType()).isNotNull();
assertThat(component.getCatNamedStringType()).isEqualTo("Cat");
assertThat(component.getDogNamedStringType()).isEqualTo("Dog");
assertThat(component.getInterface()).isNotNull();
assertThat(component.getLong()).isEqualTo(4L);
assertThat(component.getDouble()).isEqualTo(1.0);
assertThat(component.getInteger()).isEqualTo(2);
}
}
| CompanionModuleTest |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/ContextPropagation.java | {
"start": 1608,
"end": 8949
} | class ____ {
static final Function<Context, Context> NO_OP = c -> c;
static final Function<Context, Context> WITH_GLOBAL_REGISTRY_NO_PREDICATE;
@SuppressWarnings("DataFlowIssue") // only accessed when not null
static ContextSnapshotFactory globalContextSnapshotFactory = null;
static {
WITH_GLOBAL_REGISTRY_NO_PREDICATE = ContextPropagationSupport.isContextPropagationAvailable() ?
new ContextCaptureNoPredicate() : NO_OP;
if (ContextPropagationSupport.isContextPropagation103Available()) {
globalContextSnapshotFactory = ContextSnapshotFactory.builder()
.clearMissing(false)
.build();
}
}
static <T> Flux<T> fluxRestoreThreadLocals(Flux<? extends T> flux, boolean fuseable) {
return fuseable ?
new FluxContextWriteRestoringThreadLocalsFuseable<>(flux, Function.identity())
: new FluxContextWriteRestoringThreadLocals<>(flux, Function.identity());
}
static <T> Mono<T> monoRestoreThreadLocals(Mono<? extends T> mono) {
return new MonoContextWriteRestoringThreadLocals<>(mono, Function.identity());
}
static void configureContextSnapshotFactory(boolean clearMissing) {
if (ContextPropagationSupport.isContextPropagation103OnClasspath) {
globalContextSnapshotFactory = ContextSnapshotFactory.builder()
.clearMissing(clearMissing)
.build();
}
}
@SuppressWarnings("unchecked")
static <C> ContextSnapshot.Scope setThreadLocals(Object context) {
if (ContextPropagationSupport.isContextPropagation103OnClasspath) {
return globalContextSnapshotFactory.setThreadLocalsFrom(context);
}
else {
ContextRegistry registry = ContextRegistry.getInstance();
ContextAccessor<?, ?> contextAccessor = registry.getContextAccessorForRead(context);
Map<Object, @Nullable Object> previousValues = null;
for (ThreadLocalAccessor<?> threadLocalAccessor : registry.getThreadLocalAccessors()) {
Object key = threadLocalAccessor.key();
Object value = ((ContextAccessor<C, ?>) contextAccessor).readValue((C) context, key);
previousValues = setThreadLocal(key, value, threadLocalAccessor, previousValues);
}
if (ContextPropagationSupport.isContextPropagation101Available()) {
return ReactorScopeImpl.from(previousValues, registry);
}
return ReactorScopeImpl100.from(previousValues, registry);
}
}
@SuppressWarnings({"unchecked", "deprecation"})
private static <V> Map<Object, @Nullable Object> setThreadLocal(Object key, @Nullable V value,
ThreadLocalAccessor<?> accessor, @Nullable Map<Object, @Nullable Object> previousValues) {
previousValues = (previousValues != null ? previousValues : new HashMap<>());
previousValues.put(key, accessor.getValue());
if (value != null) {
((ThreadLocalAccessor<V>) accessor).setValue(value);
}
else {
accessor.reset();
}
return previousValues;
}
@SuppressWarnings("deprecation")
static ContextSnapshot captureThreadLocals() {
if (ContextPropagationSupport.isContextPropagation103OnClasspath) {
return globalContextSnapshotFactory.captureAll();
}
else {
return ContextSnapshot.captureAll();
}
}
public static Function<Runnable, Runnable> scopePassingOnScheduleHook() {
return delegate -> {
ContextSnapshot contextSnapshot = captureThreadLocals();
return contextSnapshot.wrap(delegate);
};
}
/**
* Create a support function that takes a snapshot of thread locals and merges them with the
* provided {@link Context}, resulting in a new {@link Context} which includes entries
* captured from threadLocals by the Context-Propagation API.
* <p>
* This variant uses the implicit global {@code ContextRegistry} and captures from all
* available {@code ThreadLocalAccessors}. It is the same variant backing {@link Flux#contextCapture()}
* and {@link Mono#contextCapture()}.
*
* @return the {@link Context} augmented with captured entries
*/
static Function<Context, Context> contextCapture() {
return WITH_GLOBAL_REGISTRY_NO_PREDICATE;
}
static Context contextCaptureToEmpty() {
return contextCapture().apply(Context.empty());
}
/**
* When <a href="https://github.com/micrometer-metrics/context-propagation">context-propagation library</a>
* is available on the classpath, the provided {@link BiConsumer handler} will be
* called with {@link ThreadLocal} values restored from the provided {@link Context}.
* @param handler user provided handler
* @param contextSupplier supplies the potentially modified {@link Context} to
* restore {@link ThreadLocal} values from
* @return potentially wrapped {@link BiConsumer} or the original
* @param <T> type of handled values
* @param <R> the transformed type
*/
@SuppressWarnings("try")
static <T, R> BiConsumer<T, SynchronousSink<R>> contextRestoreForHandle(BiConsumer<T, SynchronousSink<R>> handler, Supplier<Context> contextSupplier) {
if (ContextPropagationSupport.shouldRestoreThreadLocalsInSomeOperators()) {
final Context ctx = contextSupplier.get();
if (ctx.isEmpty()) {
return handler;
}
if (ContextPropagationSupport.isContextPropagation103OnClasspath) {
return (v, sink) -> {
try (ContextSnapshot.Scope ignored =
globalContextSnapshotFactory.setThreadLocalsFrom(ctx)) {
handler.accept(v, sink);
}
};
}
else {
return (v, sink) -> {
try (@SuppressWarnings("deprecation") ContextSnapshot.Scope ignored =
ContextSnapshot.setAllThreadLocalsFrom(ctx)) {
handler.accept(v, sink);
}
};
}
}
else {
return handler;
}
}
/**
* When <a href="https://github.com/micrometer-metrics/context-propagation">context-propagation library</a>
* is available on the classpath, the provided {@link SignalListener} will be wrapped
* with another one that restores {@link ThreadLocal} values from the provided
* {@link Context}.
* <p><strong>Note, this is only applied to {@link FluxTap}, {@link FluxTapFuseable},
* {@link MonoTap}, and {@link MonoTapFuseable}.</strong> The automatic propagation
* variants: {@link FluxTapRestoringThreadLocals} and
* {@link MonoTapRestoringThreadLocals} do not use this method.
* @param original the original {@link SignalListener} from the user
* @param contextSupplier supplies the potentially modified {@link Context} to
* restore {@link ThreadLocal} values from
* @return potentially wrapped {@link SignalListener} or the original
* @param <T> type of handled values
*/
static <T> SignalListener<T> contextRestoreForTap(final SignalListener<T> original, Supplier<Context> contextSupplier) {
if (!ContextPropagationSupport.isContextPropagationAvailable()) {
return original;
}
final Context ctx = contextSupplier.get();
if (ctx.isEmpty()) {
return original;
}
if (ContextPropagationSupport.isContextPropagation103OnClasspath) {
return new ContextRestore103SignalListener<>(original, ctx, globalContextSnapshotFactory);
}
else {
return new ContextRestoreSignalListener<>(original, ctx);
}
}
//the SignalListener implementation can be tested independently with a test-specific ContextRegistry
static | ContextPropagation |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/annotations/BatchSize.java | {
"start": 1186,
"end": 1879
} | class ____ {
* ...
* }
* </pre>
* <p>
* will initialize up to 100 unfetched {@code Product} proxies in each
* trip to the database.
* <p>
* Similarly:
* <pre>
* @OneToMany
* @BatchSize(size = 5) /
* Set<Product> getProducts() { ... };
* </pre>
* <p>
* will initialize up to 5 unfetched collections of {@code Product}s in
* each SQL {@code select}.
*
* @see org.hibernate.cfg.AvailableSettings#DEFAULT_BATCH_FETCH_SIZE
*
* @author Emmanuel Bernard
* @author Steve Ebersole
*/
@AttributeBinderType(binder = BatchSizeBinder.class)
@TypeBinderType(binder = BatchSizeBinder.class)
@Target({TYPE, METHOD, FIELD})
@Retention(RUNTIME)
public @ | Product |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java | {
"start": 21253,
"end": 22103
} | interface ____ {
/**
* @param node The (expected) remote node, for error reporting and passing to
* {@link TransportMessageListener#onRequestSent}.
* @param channel The TCP channel to use to send the handshake request.
* @param requestId The transport request ID, for matching up the response.
* @param handshakeTransportVersion The {@link TransportVersion} to use for the handshake request, which will be
* {@link TransportHandshaker#V8_HANDSHAKE_VERSION} in production.
*/
void sendRequest(DiscoveryNode node, TcpChannel channel, long requestId, TransportVersion handshakeTransportVersion)
throws IOException;
}
}
| HandshakeRequestSender |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/filter/CompositeFilter.java | {
"start": 2971,
"end": 3778
} | class ____ implements FilterChain {
private final FilterChain originalChain;
private final List<? extends Filter> additionalFilters;
private int currentPosition = 0;
public VirtualFilterChain(FilterChain chain, List<? extends Filter> additionalFilters) {
this.originalChain = chain;
this.additionalFilters = additionalFilters;
}
@Override
public void doFilter(final ServletRequest request, final ServletResponse response)
throws IOException, ServletException {
if (this.currentPosition == this.additionalFilters.size()) {
this.originalChain.doFilter(request, response);
}
else {
this.currentPosition++;
Filter nextFilter = this.additionalFilters.get(this.currentPosition - 1);
nextFilter.doFilter(request, response, this);
}
}
}
}
| VirtualFilterChain |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/BoxedPrimitiveConstructorTest.java | {
"start": 8701,
"end": 9051
} | class ____ {",
" void m() {",
" new Inner();",
" }",
"}")
.withClasspath(BoxedPrimitiveConstructorTest.class, Inner.class)
.doTest();
}
@Test
public void autoboxWidening() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| Test |
java | apache__camel | components/camel-ai/camel-langchain4j-tools/src/main/java/org/apache/camel/component/langchain4j/tools/spec/CamelToolSpecification.java | {
"start": 1188,
"end": 2670
} | class ____ {
private ToolSpecification toolSpecification;
private LangChain4jToolsConsumer consumer;
public CamelToolSpecification(ToolSpecification toolSpecification, LangChain4jToolsConsumer consumer) {
this.toolSpecification = toolSpecification;
this.consumer = consumer;
}
public ToolSpecification getToolSpecification() {
return toolSpecification;
}
public void setToolSpecification(ToolSpecification toolSpecification) {
this.toolSpecification = toolSpecification;
}
public LangChain4jToolsConsumer getConsumer() {
return consumer;
}
public void setConsumer(LangChain4jToolsConsumer consumer) {
this.consumer = consumer;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CamelToolSpecification that = (CamelToolSpecification) o;
return Objects.equals(toolSpecification, that.toolSpecification) && Objects.equals(consumer,
that.consumer);
}
@Override
public int hashCode() {
return Objects.hash(toolSpecification, consumer);
}
@Override
public String toString() {
return "CamelToolSpecification{" +
"toolSpecification=" + toolSpecification +
", consumer=" + consumer +
'}';
}
}
| CamelToolSpecification |
java | elastic__elasticsearch | build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JarApiComparisonTask.java | {
"start": 2646,
"end": 3916
} | class ____ extends PrecommitTask {
@TaskAction
public void compare() {
FileCollection fileCollection = getOldJar().get();
File newJarFile = getNewJar().get().getSingleFile();
Set<String> oldJarNames = fileCollection.getFiles().stream().map(File::getName).collect(Collectors.toSet());
if (oldJarNames.size() > 1) {
throw new IllegalStateException("Expected a single original jar, but found: " + oldJarNames);
}
if (oldJarNames.contains(newJarFile.getName())) {
throw new IllegalStateException(
"We should be comparing different jars, but original and new jars were both: " + newJarFile.getAbsolutePath()
);
}
JarScanner oldJS = new JarScanner(getOldJar().get().getSingleFile().getPath());
JarScanner newJS = new JarScanner(newJarFile.getPath());
try {
JarScanner.compareSignatures(oldJS.jarSignature(), newJS.jarSignature());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@CompileClasspath
public abstract Property<FileCollection> getOldJar();
@CompileClasspath
public abstract Property<FileCollection> getNewJar();
public static | JarApiComparisonTask |
java | spring-projects__spring-boot | cli/spring-boot-cli/src/main/java/org/springframework/boot/cli/command/CommandFactory.java | {
"start": 914,
"end": 1054
} | interface ____ {
/**
* Returns the CLI {@link Command}s.
* @return the commands
*/
Collection<Command> getCommands();
}
| CommandFactory |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImplBuilder.java | {
"start": 1222,
"end": 1300
} | class ____ to be used as a builder for {@link FsVolumeImpl} objects.
*/
public | is |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/webjar/WebJarResultsBuildItem.java | {
"start": 746,
"end": 2093
} | class ____ {
/**
* Resolved dependency of the webjar
*/
private final ResolvedDependency dependency;
/**
* Path to where the webjar content was unpacked to. For dev and test mode, the files while be unpacked to a temp
* directory on disk. In Prod Mode, the files will be available as generated resources inside this path.
*/
private final String finalDestination;
/**
* Web roots that can be used to serve web jar files from.
*/
private final List<FileSystemStaticHandler.StaticWebRootConfiguration> webRootConfigurations;
public WebJarResult(ResolvedDependency dependency, String finalDestination,
List<FileSystemStaticHandler.StaticWebRootConfiguration> webRootConfigurations) {
this.dependency = dependency;
this.finalDestination = finalDestination;
this.webRootConfigurations = webRootConfigurations;
}
public ResolvedDependency getDependency() {
return dependency;
}
public String getFinalDestination() {
return finalDestination;
}
public List<FileSystemStaticHandler.StaticWebRootConfiguration> getWebRootConfigurations() {
return webRootConfigurations;
}
}
}
| WebJarResult |
java | hibernate__hibernate-orm | hibernate-vector/src/test/java/org/hibernate/vector/BinaryVectorTest.java | {
"start": 2044,
"end": 11769
} | class ____ {
private static final byte[] V1 = new byte[]{ 1, 2, 3 };
private static final byte[] V2 = new byte[]{ 4, 5, 6 };
@BeforeEach
public void prepareData(SessionFactoryScope scope) {
scope.inTransaction( em -> {
em.persist( new VectorEntity( 1L, V1 ) );
em.persist( new VectorEntity( 2L, V2 ) );
} );
}
@AfterEach
public void cleanup(SessionFactoryScope scope) {
scope.inTransaction( em -> {
em.createMutationQuery( "delete from VectorEntity" ).executeUpdate();
} );
}
@Test
public void testRead(SessionFactoryScope scope) {
scope.inTransaction( em -> {
VectorEntity tableRecord;
tableRecord = em.find( VectorEntity.class, 1L );
assertArrayEquals( new byte[]{ 1, 2, 3 }, tableRecord.getTheVector() );
tableRecord = em.find( VectorEntity.class, 2L );
assertArrayEquals( new byte[]{ 4, 5, 6 }, tableRecord.getTheVector() );
} );
}
@Test
public void testCast(SessionFactoryScope scope) {
scope.inTransaction( em -> {
final String literal = VectorTestHelper.vectorBinaryStringLiteral( new byte[] {1, 1, 1}, em );
final Tuple vector = em.createSelectionQuery( "select cast(e.theVector as string), cast('" + literal + "' as binary_vector(3)) from VectorEntity e where e.id = 1", Tuple.class )
.getSingleResult();
assertEquals( VectorTestHelper.vectorBinaryStringLiteral( V1, em ), vector.get( 0, String.class ) );
assertArrayEquals( new byte[]{ 1, 1, 1 }, vector.get( 1, byte[].class ) );
} );
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsCosineDistance.class)
@SkipForDialect(dialectClass = PostgreSQLDialect.class, matchSubTypes = true, reason = "Not supported with bit vectors")
public void testCosineDistance(SessionFactoryScope scope) {
scope.inTransaction( em -> {
final byte[] vector = new byte[]{ 1, 1, 1 };
final List<Tuple> results = em.createSelectionQuery( "select e.id, cosine_distance(e.theVector, :vec) from VectorEntity e order by e.id", Tuple.class )
.setParameter( "vec", vector )
.getResultList();
assertEquals( 2, results.size() );
assertEquals( 1L, results.get( 0 ).get( 0 ) );
assertEquals( cosineDistanceBinary( V1, vector ), results.get( 0 ).get( 1, double.class ), 0.0000001D );
assertEquals( 2L, results.get( 1 ).get( 0 ) );
assertEquals( cosineDistanceBinary( V2, vector ), results.get( 1 ).get( 1, double.class ), 0.0000001D );
} );
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsEuclideanSquaredDistance.class)
@SkipForDialect(dialectClass = PostgreSQLDialect.class, matchSubTypes = true, reason = "Not supported with bit vectors")
public void testEuclideanDistance(SessionFactoryScope scope) {
scope.inTransaction( em -> {
final byte[] vector = new byte[]{ 1, 1, 1 };
final List<Tuple> results = em.createSelectionQuery( "select e.id, euclidean_distance(e.theVector, :vec) from VectorEntity e order by e.id", Tuple.class )
.setParameter( "vec", vector )
.getResultList();
assertEquals( 2, results.size() );
assertEquals( 1L, results.get( 0 ).get( 0 ) );
assertEquals( euclideanDistanceBinary( V1, vector ), results.get( 0 ).get( 1, double.class ), 0.000001D );
assertEquals( 2L, results.get( 1 ).get( 0 ) );
assertEquals( euclideanDistanceBinary( V2, vector ), results.get( 1 ).get( 1, double.class ), 0.000001D );
} );
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsEuclideanDistance.class)
@SkipForDialect(dialectClass = PostgreSQLDialect.class, matchSubTypes = true, reason = "Not supported with bit vectors")
public void testEuclideanSquaredDistance(SessionFactoryScope scope) {
scope.inTransaction( em -> {
final byte[] vector = new byte[]{ 1, 1, 1 };
final List<Tuple> results = em.createSelectionQuery( "select e.id, euclidean_squared_distance(e.theVector, :vec) from VectorEntity e order by e.id", Tuple.class )
.setParameter( "vec", vector )
.getResultList();
assertEquals( 2, results.size() );
assertEquals( 1L, results.get( 0 ).get( 0 ) );
assertEquals( euclideanSquaredDistanceBinary( V1, vector ), results.get( 0 ).get( 1, double.class ), 0.000001D );
assertEquals( 2L, results.get( 1 ).get( 0 ) );
assertEquals( euclideanSquaredDistanceBinary( V2, vector ), results.get( 1 ).get( 1, double.class ), 0.000001D );
} );
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsTaxicabDistance.class)
@SkipForDialect(dialectClass = PostgreSQLDialect.class, matchSubTypes = true, reason = "Not supported with bit vectors")
public void testTaxicabDistance(SessionFactoryScope scope) {
scope.inTransaction( em -> {
final byte[] vector = new byte[]{ 1, 1, 1 };
final List<Tuple> results = em.createSelectionQuery( "select e.id, taxicab_distance(e.theVector, :vec) from VectorEntity e order by e.id", Tuple.class )
.setParameter( "vec", vector )
.getResultList();
assertEquals( 2, results.size() );
assertEquals( 1L, results.get( 0 ).get( 0 ) );
assertEquals( taxicabDistanceBinary( V1, vector ), results.get( 0 ).get( 1, double.class ), 0D );
assertEquals( 2L, results.get( 1 ).get( 0 ) );
assertEquals( taxicabDistanceBinary( V2, vector ), results.get( 1 ).get( 1, double.class ), 0D );
} );
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsInnerProduct.class)
@SkipForDialect(dialectClass = PostgreSQLDialect.class, matchSubTypes = true, reason = "Not supported with bit vectors")
public void testInnerProduct(SessionFactoryScope scope) {
scope.inTransaction( em -> {
final byte[] vector = new byte[]{ 1, 1, 1 };
final List<Tuple> results = em.createSelectionQuery( "select e.id, inner_product(e.theVector, :vec), negative_inner_product(e.theVector, :vec) from VectorEntity e order by e.id", Tuple.class )
.setParameter( "vec", vector )
.getResultList();
assertEquals( 2, results.size() );
assertEquals( 1L, results.get( 0 ).get( 0 ) );
assertEquals( innerProductBinary( V1, vector ), results.get( 0 ).get( 1, double.class ), 0D );
assertEquals( innerProductBinary( V1, vector ) * -1, results.get( 0 ).get( 2, double.class ), 0D );
assertEquals( 2L, results.get( 1 ).get( 0 ) );
assertEquals( innerProductBinary( V2, vector ), results.get( 1 ).get( 1, double.class ), 0D );
assertEquals( innerProductBinary( V2, vector ) * -1, results.get( 1 ).get( 2, double.class ), 0D );
} );
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsHammingDistance.class)
public void testHammingDistance(SessionFactoryScope scope) {
scope.inTransaction( em -> {
//tag::hamming-distance-example[]
final byte[] vector = new byte[]{ 1, 1, 1 };
final List<Tuple> results = em.createSelectionQuery( "select e.id, hamming_distance(e.theVector, :vec) from VectorEntity e order by e.id", Tuple.class )
.setParameter( "vec", vector )
.getResultList();
//end::hamming-distance-example[]
assertEquals( 2, results.size() );
assertEquals( 1L, results.get( 0 ).get( 0 ) );
assertEquals( hammingDistanceBinary( V1, vector ), results.get( 0 ).get( 1, double.class ), 0D );
assertEquals( 2L, results.get( 1 ).get( 0 ) );
assertEquals( hammingDistanceBinary( V2, vector ), results.get( 1 ).get( 1, double.class ), 0D );
} );
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsJaccardDistance.class)
public void testJaccardDistance(SessionFactoryScope scope) {
scope.inTransaction( em -> {
//tag::jaccard-distance-example[]
final byte[] vector = new byte[]{ 1, 1, 1 };
final List<Tuple> results = em.createSelectionQuery( "select e.id, jaccard_distance(e.theVector, :vec) from VectorEntity e order by e.id", Tuple.class )
.setParameter( "vec", vector )
.getResultList();
//end::jaccard-distance-example[]
assertEquals( 2, results.size() );
assertEquals( 1L, results.get( 0 ).get( 0 ) );
assertEquals( jaccardDistanceBinary( V1, vector ), results.get( 0 ).get( 1, double.class ), 0D );
assertEquals( 2L, results.get( 1 ).get( 0 ) );
assertEquals( jaccardDistanceBinary( V2, vector ), results.get( 1 ).get( 1, double.class ), 0D );
} );
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsVectorDims.class)
public void testVectorDims(SessionFactoryScope scope) {
scope.inTransaction( em -> {
final List<Tuple> results = em.createSelectionQuery( "select e.id, vector_dims(e.theVector) from VectorEntity e order by e.id", Tuple.class )
.getResultList();
assertEquals( 2, results.size() );
assertEquals( 1L, results.get( 0 ).get( 0 ) );
assertEquals( V1.length * 8, results.get( 0 ).get( 1 ) );
assertEquals( 2L, results.get( 1 ).get( 0 ) );
assertEquals( V2.length * 8, results.get( 1 ).get( 1 ) );
} );
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsVectorNorm.class)
@SkipForDialect(dialectClass = PostgreSQLDialect.class, matchSubTypes = true, reason = "Not supported with bit vectors")
@SkipForDialect(dialectClass = OracleDialect.class, reason = "Oracle 23.9 bug")
public void testVectorNorm(SessionFactoryScope scope) {
scope.inTransaction( em -> {
final List<Tuple> results = em.createSelectionQuery( "select e.id, vector_norm(e.theVector) from VectorEntity e order by e.id", Tuple.class )
.getResultList();
assertEquals( 2, results.size() );
assertEquals( 1L, results.get( 0 ).get( 0 ) );
assertEquals( euclideanNormBinary( V1 ), results.get( 0 ).get( 1, double.class ), 0D );
assertEquals( 2L, results.get( 1 ).get( 0 ) );
assertEquals( euclideanNormBinary( V2 ), results.get( 1 ).get( 1, double.class ), 0D );
} );
}
@Entity( name = "VectorEntity" )
public static | BinaryVectorTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java | {
"start": 1991,
"end": 2515
} | class ____ extends NMView implements YarnWebParams {
@Override protected void preHead(Page.HTML<__> html) {
commonPreHead(html);
set(DATATABLES_ID, "containers");
set(initID(DATATABLES, "containers"), containersTableInit());
setTableStyles(html, "containers");
}
private String containersTableInit() {
return tableInit().append(",aoColumns:[null]}").toString();
}
@Override
protected Class<? extends SubView> content() {
return ApplicationBlock.class;
}
public static | ApplicationPage |
java | elastic__elasticsearch | x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/SubGroupCollectorTests.java | {
"start": 619,
"end": 5094
} | class ____ extends ESTestCase {
public void testNoAggs() {
TermsAggregationBuilder stackTraces = new TermsAggregationBuilder("stacktraces").field("stacktrace.id");
TraceEvent traceEvent = new TraceEvent(1L);
SubGroupCollector collector = SubGroupCollector.attach(stackTraces, new String[0]);
assertTrue("Sub aggregations attached", stackTraces.getSubAggregations().isEmpty());
SubGroupCollector.Bucket currentStackTrace = bucket("1", 5);
collector.collectResults(currentStackTrace, traceEvent);
assertNull(traceEvent.subGroups);
}
public void testMultipleAggsInSingleStackTrace() {
TermsAggregationBuilder stackTraces = new TermsAggregationBuilder("stacktraces").field("stacktrace.id");
TraceEvent traceEvent = new TraceEvent(1L);
SubGroupCollector collector = SubGroupCollector.attach(stackTraces, new String[] { "service.name", "transaction.name" });
assertFalse("No sub aggregations attached", stackTraces.getSubAggregations().isEmpty());
StaticAgg services = new StaticAgg();
SubGroupCollector.Bucket currentStackTrace = bucket("1", 5, services);
// tag::noformat
services.addBuckets(CUSTOM_EVENT_SUB_AGGREGATION_NAME + "service.name",
bucket("basket", 7L,
agg(CUSTOM_EVENT_SUB_AGGREGATION_NAME + "transaction.name",
bucket("add-to-basket", 4L),
bucket("delete-from-basket", 3L)
)
),
bucket("checkout", 4L,
agg(CUSTOM_EVENT_SUB_AGGREGATION_NAME + "transaction.name",
bucket("enter-address", 4L),
bucket("submit-order", 3L)
)
)
);
// end::noformat
collector.collectResults(currentStackTrace, traceEvent);
assertNotNull(traceEvent.subGroups);
assertEquals(Long.valueOf(7L), traceEvent.subGroups.getCount("basket"));
assertEquals(Long.valueOf(4L), traceEvent.subGroups.getCount("checkout"));
SubGroup basketTransactionNames = traceEvent.subGroups.getSubGroup("basket").getSubGroup("transaction.name");
assertEquals(Long.valueOf(4L), basketTransactionNames.getCount("add-to-basket"));
assertEquals(Long.valueOf(3L), basketTransactionNames.getCount("delete-from-basket"));
SubGroup checkoutTransactionNames = traceEvent.subGroups.getSubGroup("checkout").getSubGroup("transaction.name");
assertEquals(Long.valueOf(4L), checkoutTransactionNames.getCount("enter-address"));
assertEquals(Long.valueOf(3L), checkoutTransactionNames.getCount("submit-order"));
}
public void testSingleAggInMultipleStackTraces() {
TermsAggregationBuilder stackTraces = new TermsAggregationBuilder("stacktraces").field("stacktrace.id");
TraceEvent traceEvent = new TraceEvent(1L);
SubGroupCollector collector = SubGroupCollector.attach(stackTraces, new String[] { "service.name" });
assertFalse("No sub aggregations attached", stackTraces.getSubAggregations().isEmpty());
StaticAgg services1 = new StaticAgg();
SubGroupCollector.Bucket currentStackTrace1 = bucket("1", 5, services1);
services1.addBuckets(CUSTOM_EVENT_SUB_AGGREGATION_NAME + "service.name", bucket("basket", 7L));
collector.collectResults(currentStackTrace1, traceEvent);
StaticAgg services2 = new StaticAgg();
SubGroupCollector.Bucket currentStackTrace2 = bucket("1", 3, services2);
services2.addBuckets(CUSTOM_EVENT_SUB_AGGREGATION_NAME + "service.name", bucket("basket", 1L), bucket("checkout", 5L));
collector.collectResults(currentStackTrace2, traceEvent);
assertNotNull(traceEvent.subGroups);
assertEquals(Long.valueOf(8L), traceEvent.subGroups.getCount("basket"));
assertEquals(Long.valueOf(5L), traceEvent.subGroups.getCount("checkout"));
}
private SubGroupCollector.Bucket bucket(String key, long count) {
return bucket(key, count, null);
}
private SubGroupCollector.Bucket bucket(String key, long count, SubGroupCollector.Agg aggregations) {
return new StaticBucket(key, count, aggregations);
}
private SubGroupCollector.Agg agg(String name, SubGroupCollector.Bucket... buckets) {
StaticAgg a = new StaticAgg();
a.addBuckets(name, buckets);
return a;
}
private static | SubGroupCollectorTests |
java | google__error-prone | test_helpers/src/test/java/com/google/errorprone/CompilationTestHelperTest.java | {
"start": 14588,
"end": 14975
} | class ____ extends BugChecker implements CompilationUnitTreeMatcher {
@Override
public Description matchCompilationUnit(CompilationUnitTree tree, VisitorState state) {
if (tree.getPackage() != null) {
return describeMatch(tree.getPackage());
}
return NO_MATCH;
}
}
/** Test classes used for withClassPath tests */
public static | PackageTreeChecker |
java | apache__camel | components/camel-jsonpath/src/main/java/org/apache/camel/jsonpath/JsonPathExpression.java | {
"start": 1423,
"end": 7589
} | class ____ extends ExpressionAdapter {
private static final Logger LOG = LoggerFactory.getLogger(JsonPathExpression.class);
private final String expression;
private JsonPathEngine engine;
private boolean predicate;
private Class<?> resultType;
private boolean suppressExceptions;
private boolean allowSimple = true;
private boolean allowEasyPredicate = true;
private boolean writeAsString;
private boolean unpackArray;
private Expression source;
private Option[] options;
public JsonPathExpression(String expression) {
this.expression = expression;
}
public boolean isPredicate() {
return predicate;
}
/**
* Whether to be evaluated as a predicate
*/
public void setPredicate(boolean predicate) {
this.predicate = predicate;
}
public Class<?> getResultType() {
return resultType;
}
/**
* To configure the result type to use
*/
public void setResultType(Class<?> resultType) {
this.resultType = resultType;
}
public boolean isSuppressExceptions() {
return suppressExceptions;
}
/**
* Whether to suppress exceptions such as PathNotFoundException
*/
public void setSuppressExceptions(boolean suppressExceptions) {
this.suppressExceptions = suppressExceptions;
}
public boolean isAllowSimple() {
return allowSimple;
}
/**
* Whether to allow in inlined simple exceptions in the json path expression
*/
public void setAllowSimple(boolean allowSimple) {
this.allowSimple = allowSimple;
}
public boolean isAllowEasyPredicate() {
return allowEasyPredicate;
}
/**
* Whether to allow using the easy predicate parser to pre-parse predicates. See {@link EasyPredicateParser} for
* more details.
*/
public void setAllowEasyPredicate(boolean allowEasyPredicate) {
this.allowEasyPredicate = allowEasyPredicate;
}
public boolean isWriteAsString() {
return writeAsString;
}
/**
* Whether to write the output of each row/element as a JSON String value instead of a Map/POJO value.
*/
public void setWriteAsString(boolean writeAsString) {
this.writeAsString = writeAsString;
}
public boolean isUnpackArray() {
return unpackArray;
}
/**
* Whether to unpack a single element json-array into an object.
*/
public void setUnpackArray(boolean unpackArray) {
this.unpackArray = unpackArray;
}
public Expression getSource() {
return source;
}
public void setSource(Expression source) {
this.source = source;
}
public Option[] getOptions() {
return options;
}
/**
* To configure the json path options to use
*/
public void setOptions(Option[] options) {
this.options = options;
}
@Override
public Object evaluate(Exchange exchange) {
try {
Object result = evaluateJsonPath(exchange, engine);
boolean resultTypeIsCollection = resultType != null && Collection.class.isAssignableFrom(resultType);
if (unpackArray) {
// in some cases we get a single element that is wrapped in a List, so unwrap that
// if we for example want to grab the single entity and convert that to an int/boolean/String etc
boolean singleElement = result instanceof List && ((List<?>) result).size() == 1;
if (singleElement && !resultTypeIsCollection) {
result = ((List<?>) result).get(0);
LOG.trace("Unwrapping result: {} from single element List before converting to: {}", result,
resultType);
}
}
if (resultType == null) {
return result;
}
if (resultTypeIsCollection) {
// we want a list as output
boolean resultIsCollection = result instanceof List;
if (!resultIsCollection) {
var list = new LinkedList<>();
list.add(result);
result = list;
}
return exchange.getContext().getTypeConverter().convertTo(resultType, exchange, result);
} else if (result instanceof Collection<?> col) {
// convert each element in the list
result = col.stream()
.filter(Objects::nonNull) // skip null
.map(item -> exchange.getContext().getTypeConverter().convertTo(resultType, exchange, item))
.collect(Collectors.toList());
}
if (result instanceof Collection<?> col && col.size() == 1) {
result = col.stream().findFirst().get();
}
return exchange.getContext().getTypeConverter().convertTo(resultType, exchange, result);
} catch (Exception e) {
throw new ExpressionEvaluationException(this, exchange, e);
}
}
@Override
public void init(CamelContext context) {
String exp = expression;
if (predicate && isAllowEasyPredicate()) {
EasyPredicateParser parser = new EasyPredicateParser();
exp = parser.parse(expression);
if (!exp.equals(expression)) {
LOG.debug("EasyPredicateParser parsed {} -> {}", expression, exp);
}
}
LOG.debug("Initializing {} using: {}", predicate ? "predicate" : "expression", exp);
try {
engine = new JsonPathEngine(
exp, source, writeAsString, suppressExceptions, allowSimple, options, context);
} catch (Exception e) {
throw new ExpressionIllegalSyntaxException(exp, e);
}
}
@Override
public String toString() {
return "jsonpath[" + expression + "]";
}
private Object evaluateJsonPath(Exchange exchange, JsonPathEngine engine) throws Exception {
return engine.read(exchange);
}
}
| JsonPathExpression |
java | quarkusio__quarkus | extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/PropertyCodecProviderBuildItem.java | {
"start": 265,
"end": 674
} | class ____ extends SimpleBuildItem {
private final List<String> propertyCodecProviderClassNames;
public PropertyCodecProviderBuildItem(List<String> codecProviderClassNames) {
this.propertyCodecProviderClassNames = codecProviderClassNames;
}
public List<String> getPropertyCodecProviderClassNames() {
return propertyCodecProviderClassNames;
}
}
| PropertyCodecProviderBuildItem |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/AbstractAssert.java | {
"start": 2550,
"end": 3028
} | class ____ all assertions.
*
* @param <SELF> the "self" type of this assertion class. Please read "<a href="http://bit.ly/1IZIRcY"
* target="_blank">Emulating 'self types' using Java Generics to simplify fluent API implementation</a>"
* for more details.
* @param <ACTUAL> the type of the "actual" value.
* @author Alex Ruiz
* @author Joel Costigliola
* @author Mikhail Mazursky
* @author Nicolas François
*/
public abstract | for |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/scheduling/DelegatingSecurityContextTaskSchedulerIntegrationTests.java | {
"start": 1567,
"end": 4986
} | class ____ {
@Test
public void scheduleWhenThreadFactoryIsPlatformThenSecurityContextPropagated() throws Exception {
SecurityContext securityContext = scheduleAndReturn(Executors.defaultThreadFactory());
assertThat(securityContext.getAuthentication()).isNotNull();
}
@Test
@DisabledOnJre(JRE.JAVA_17)
public void scheduleWhenThreadFactoryIsVirtualThenSecurityContextPropagated() throws Exception {
SecurityContext securityContext = scheduleAndReturn(new VirtualThreadTaskExecutor().getVirtualThreadFactory());
assertThat(securityContext.getAuthentication()).isNotNull();
}
private SecurityContext scheduleAndReturn(ThreadFactory threadFactory) throws Exception {
// @formatter:off
return DelegatingSecurityContextTestUtils.runAndReturn(
threadFactory,
this::createTaskScheduler,
(taskScheduler, task) -> taskScheduler.schedule(task, new PeriodicTrigger(Duration.ofMillis(50)))
);
// @formatter:on
}
@Test
public void scheduleAtFixedRateWhenThreadFactoryIsPlatformThenSecurityContextPropagated() throws Exception {
SecurityContext securityContext = scheduleAtFixedRateAndReturn(Executors.defaultThreadFactory());
assertThat(securityContext.getAuthentication()).isNotNull();
}
@Test
@DisabledOnJre(JRE.JAVA_17)
public void scheduleAtFixedRateWhenThreadFactoryIsVirtualThenSecurityContextPropagated() throws Exception {
SecurityContext securityContext = scheduleAtFixedRateAndReturn(
new VirtualThreadTaskExecutor().getVirtualThreadFactory());
assertThat(securityContext.getAuthentication()).isNotNull();
}
private SecurityContext scheduleAtFixedRateAndReturn(ThreadFactory threadFactory) throws Exception {
// @formatter:off
return DelegatingSecurityContextTestUtils.runAndReturn(
threadFactory,
this::createTaskScheduler,
(taskScheduler, task) -> taskScheduler.scheduleAtFixedRate(task, Duration.ofMillis(50))
);
// @formatter:on
}
@Test
public void scheduleWithFixedDelayWhenThreadFactoryIsPlatformThenSecurityContextPropagated() throws Exception {
SecurityContext securityContext = scheduleWithFixedDelayAndReturn(Executors.defaultThreadFactory());
assertThat(securityContext.getAuthentication()).isNotNull();
}
@Test
@DisabledOnJre(JRE.JAVA_17)
public void scheduleWithFixedDelayWhenThreadFactoryIsVirtualThenSecurityContextPropagated() throws Exception {
SecurityContext securityContext = scheduleWithFixedDelayAndReturn(
new VirtualThreadTaskExecutor().getVirtualThreadFactory());
assertThat(securityContext.getAuthentication()).isNotNull();
}
private SecurityContext scheduleWithFixedDelayAndReturn(ThreadFactory threadFactory) throws Exception {
// @formatter:off
return DelegatingSecurityContextTestUtils.runAndReturn(
threadFactory,
this::createTaskScheduler,
(taskScheduler, task) -> taskScheduler.scheduleWithFixedDelay(task, Duration.ofMillis(50))
);
// @formatter:on
}
private DelegatingSecurityContextTaskScheduler createTaskScheduler(ScheduledExecutorService delegate) {
return new DelegatingSecurityContextTaskScheduler(new ConcurrentTaskScheduler(delegate), securityContext());
}
private static SecurityContext securityContext() {
SecurityContext securityContext = SecurityContextHolder.createEmptyContext();
securityContext.setAuthentication(new TestingAuthenticationToken("user", null));
return securityContext;
}
}
| DelegatingSecurityContextTaskSchedulerIntegrationTests |
java | google__dagger | hilt-android/main/java/dagger/hilt/android/internal/lifecycle/RetainedLifecycleImpl.java | {
"start": 1000,
"end": 2258
} | class ____
implements ActivityRetainedLifecycle,
ViewModelLifecycle {
private final Set<RetainedLifecycle.OnClearedListener> listeners = new HashSet<>();
private boolean onClearedDispatched = false;
public RetainedLifecycleImpl() {}
@Override
public void addOnClearedListener(@NonNull RetainedLifecycle.OnClearedListener listener) {
ThreadUtil.ensureMainThread();
throwIfOnClearedDispatched();
listeners.add(listener);
}
@Override
public void removeOnClearedListener(@NonNull RetainedLifecycle.OnClearedListener listener) {
ThreadUtil.ensureMainThread();
throwIfOnClearedDispatched();
listeners.remove(listener);
}
public void dispatchOnCleared() {
ThreadUtil.ensureMainThread();
onClearedDispatched = true;
for (RetainedLifecycle.OnClearedListener listener : listeners) {
listener.onCleared();
}
}
private void throwIfOnClearedDispatched() {
if (onClearedDispatched) {
throw new IllegalStateException(
"There was a race between the call to add/remove an OnClearedListener and onCleared(). "
+ "This can happen when posting to the Main thread from a background thread, "
+ "which is not supported.");
}
}
}
| RetainedLifecycleImpl |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/token/OAuth2TokenContext.java | {
"start": 4343,
"end": 8891
} | class ____<T extends OAuth2TokenContext, B extends AbstractBuilder<T, B>> {
private static final String PRINCIPAL_AUTHENTICATION_KEY = Authentication.class.getName().concat(".PRINCIPAL");
private static final String AUTHORIZED_SCOPE_KEY = OAuth2Authorization.class.getName()
.concat(".AUTHORIZED_SCOPE");
private static final String AUTHORIZATION_GRANT_AUTHENTICATION_KEY = Authentication.class.getName()
.concat(".AUTHORIZATION_GRANT");
private final Map<Object, Object> context = new HashMap<>();
/**
* Sets the {@link RegisteredClient registered client}.
* @param registeredClient the {@link RegisteredClient}
* @return the {@link AbstractBuilder} for further configuration
*/
public B registeredClient(RegisteredClient registeredClient) {
return put(RegisteredClient.class, registeredClient);
}
/**
* Sets the {@link Authentication} representing the {@code Principal} resource
* owner (or client).
* @param principal the {@link Authentication} representing the {@code Principal}
* resource owner (or client)
* @return the {@link AbstractBuilder} for further configuration
*/
public B principal(Authentication principal) {
return put(PRINCIPAL_AUTHENTICATION_KEY, principal);
}
/**
* Sets the {@link AuthorizationServerContext authorization server context}.
* @param authorizationServerContext the {@link AuthorizationServerContext}
* @return the {@link AbstractBuilder} for further configuration
*/
public B authorizationServerContext(AuthorizationServerContext authorizationServerContext) {
return put(AuthorizationServerContext.class, authorizationServerContext);
}
/**
* Sets the {@link OAuth2Authorization authorization}.
* @param authorization the {@link OAuth2Authorization}
* @return the {@link AbstractBuilder} for further configuration
*/
public B authorization(OAuth2Authorization authorization) {
return put(OAuth2Authorization.class, authorization);
}
/**
* Sets the authorized scope(s).
* @param authorizedScopes the authorized scope(s)
* @return the {@link AbstractBuilder} for further configuration
*/
public B authorizedScopes(Set<String> authorizedScopes) {
return put(AUTHORIZED_SCOPE_KEY, authorizedScopes);
}
/**
* Sets the {@link OAuth2TokenType token type}.
* @param tokenType the {@link OAuth2TokenType}
* @return the {@link AbstractBuilder} for further configuration
*/
public B tokenType(OAuth2TokenType tokenType) {
return put(OAuth2TokenType.class, tokenType);
}
/**
* Sets the {@link AuthorizationGrantType authorization grant type}.
* @param authorizationGrantType the {@link AuthorizationGrantType}
* @return the {@link AbstractBuilder} for further configuration
*/
public B authorizationGrantType(AuthorizationGrantType authorizationGrantType) {
return put(AuthorizationGrantType.class, authorizationGrantType);
}
/**
* Sets the {@link Authentication} representing the authorization grant.
* @param authorizationGrant the {@link Authentication} representing the
* authorization grant
* @return the {@link AbstractBuilder} for further configuration
*/
public B authorizationGrant(Authentication authorizationGrant) {
return put(AUTHORIZATION_GRANT_AUTHENTICATION_KEY, authorizationGrant);
}
/**
* Associates an attribute.
* @param key the key for the attribute
* @param value the value of the attribute
* @return the {@link AbstractBuilder} for further configuration
*/
public B put(Object key, Object value) {
Assert.notNull(key, "key cannot be null");
Assert.notNull(value, "value cannot be null");
this.context.put(key, value);
return getThis();
}
/**
* A {@code Consumer} of the attributes {@code Map} allowing the ability to add,
* replace, or remove.
* @param contextConsumer a {@link Consumer} of the attributes {@code Map}
* @return the {@link AbstractBuilder} for further configuration
*/
public B context(Consumer<Map<Object, Object>> contextConsumer) {
contextConsumer.accept(this.context);
return getThis();
}
@SuppressWarnings("unchecked")
protected <V> V get(Object key) {
return (V) this.context.get(key);
}
protected Map<Object, Object> getContext() {
return this.context;
}
@SuppressWarnings("unchecked")
protected final B getThis() {
return (B) this;
}
/**
* Builds a new {@link OAuth2TokenContext}.
* @return the {@link OAuth2TokenContext}
*/
public abstract T build();
}
}
| AbstractBuilder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/SimpleOverriddenConverterTest.java | {
"start": 1089,
"end": 1766
} | class ____ {
/**
* Test outcome of annotations exclusively.
*/
@Test
public void testSimpleConvertOverrides(SessionFactoryScope scope) {
final EntityPersister ep = scope.getSessionFactory().getMappingMetamodel().getEntityDescriptor(Sub.class.getName());
final JdbcTypeRegistry jdbcTypeRegistry = scope.getSessionFactory().getTypeConfiguration().getJdbcTypeRegistry();
BasicType<?> type = (BasicType<?>) ep.getPropertyType( "it" );
assertTyping( StringJavaType.class, type.getJavaTypeDescriptor() );
assertTyping( jdbcTypeRegistry.getDescriptor( Types.VARCHAR ).getClass(), type.getJdbcType() );
}
@MappedSuperclass
public static | SimpleOverriddenConverterTest |
java | netty__netty | handler/src/main/java/io/netty/handler/ssl/OpenSslSessionContext.java | {
"start": 1129,
"end": 8226
} | class ____ implements SSLSessionContext {
private final OpenSslSessionStats stats;
// The OpenSslKeyMaterialProvider is not really used by the OpenSslSessionContext but only be stored here
// to make it easier to destroy it later because the ReferenceCountedOpenSslContext will hold a reference
// to OpenSslSessionContext.
private final OpenSslKeyMaterialProvider provider;
final ReferenceCountedOpenSslContext context;
private final OpenSslSessionCache sessionCache;
private final long mask;
// IMPORTANT: We take the OpenSslContext and not just the long (which points the native instance) to prevent
// the GC to collect OpenSslContext as this would also free the pointer and so could result in a
// segfault when the user calls any of the methods here that try to pass the pointer down to the native
// level.
OpenSslSessionContext(ReferenceCountedOpenSslContext context, OpenSslKeyMaterialProvider provider, long mask,
OpenSslSessionCache cache) {
this.context = context;
this.provider = provider;
this.mask = mask;
stats = new OpenSslSessionStats(context);
sessionCache = cache;
SSLContext.setSSLSessionCache(context.ctx, cache);
}
final boolean useKeyManager() {
return provider != null;
}
@Override
public void setSessionCacheSize(int size) {
ObjectUtil.checkPositiveOrZero(size, "size");
sessionCache.setSessionCacheSize(size);
}
@Override
public int getSessionCacheSize() {
return sessionCache.getSessionCacheSize();
}
@Override
public void setSessionTimeout(int seconds) {
ObjectUtil.checkPositiveOrZero(seconds, "seconds");
Lock writerLock = context.ctxLock.writeLock();
writerLock.lock();
try {
SSLContext.setSessionCacheTimeout(context.ctx, seconds);
sessionCache.setSessionTimeout(seconds);
} finally {
writerLock.unlock();
}
}
@Override
public int getSessionTimeout() {
return sessionCache.getSessionTimeout();
}
@Override
public SSLSession getSession(byte[] bytes) {
return sessionCache.getSession(new OpenSslSessionId(bytes));
}
@Override
public Enumeration<byte[]> getIds() {
return new Enumeration<byte[]>() {
private final Iterator<OpenSslSessionId> ids = sessionCache.getIds().iterator();
@Override
public boolean hasMoreElements() {
return ids.hasNext();
}
@Override
public byte[] nextElement() {
return ids.next().cloneBytes();
}
};
}
/**
* Sets the SSL session ticket keys of this context.
* @deprecated use {@link #setTicketKeys(OpenSslSessionTicketKey...)}.
*/
@Deprecated
public void setTicketKeys(byte[] keys) {
if (keys.length % SessionTicketKey.TICKET_KEY_SIZE != 0) {
throw new IllegalArgumentException("keys.length % " + SessionTicketKey.TICKET_KEY_SIZE + " != 0");
}
SessionTicketKey[] tickets = new SessionTicketKey[keys.length / SessionTicketKey.TICKET_KEY_SIZE];
for (int i = 0, a = 0; i < tickets.length; i++) {
byte[] name = Arrays.copyOfRange(keys, a, SessionTicketKey.NAME_SIZE);
a += SessionTicketKey.NAME_SIZE;
byte[] hmacKey = Arrays.copyOfRange(keys, a, SessionTicketKey.HMAC_KEY_SIZE);
i += SessionTicketKey.HMAC_KEY_SIZE;
byte[] aesKey = Arrays.copyOfRange(keys, a, SessionTicketKey.AES_KEY_SIZE);
a += SessionTicketKey.AES_KEY_SIZE;
tickets[i] = new SessionTicketKey(name, hmacKey, aesKey);
}
Lock writerLock = context.ctxLock.writeLock();
writerLock.lock();
try {
SSLContext.clearOptions(context.ctx, SSL.SSL_OP_NO_TICKET);
SSLContext.setSessionTicketKeys(context.ctx, tickets);
} finally {
writerLock.unlock();
}
}
/**
* Sets the SSL session ticket keys of this context. Depending on the underlying native library you may omit the
* argument or pass an empty array and so let the native library handle the key generation and rotating for you.
* If this is supported by the underlying native library should be checked in this case. For example
* <a href="https://commondatastorage.googleapis.com/chromium-boringssl-docs/ssl.h.html#Session-tickets/">
* BoringSSL</a> is known to support this.
*/
public void setTicketKeys(OpenSslSessionTicketKey... keys) {
ObjectUtil.checkNotNull(keys, "keys");
SessionTicketKey[] ticketKeys = new SessionTicketKey[keys.length];
for (int i = 0; i < ticketKeys.length; i++) {
ticketKeys[i] = keys[i].key;
}
Lock writerLock = context.ctxLock.writeLock();
writerLock.lock();
try {
SSLContext.clearOptions(context.ctx, SSL.SSL_OP_NO_TICKET);
if (ticketKeys.length > 0) {
SSLContext.setSessionTicketKeys(context.ctx, ticketKeys);
}
} finally {
writerLock.unlock();
}
}
/**
* Enable or disable caching of SSL sessions.
*/
public void setSessionCacheEnabled(boolean enabled) {
long mode = enabled ? mask | SSL.SSL_SESS_CACHE_NO_INTERNAL_LOOKUP |
SSL.SSL_SESS_CACHE_NO_INTERNAL_STORE : SSL.SSL_SESS_CACHE_OFF;
Lock writerLock = context.ctxLock.writeLock();
writerLock.lock();
try {
SSLContext.setSessionCacheMode(context.ctx, mode);
if (!enabled) {
sessionCache.clear();
}
} finally {
writerLock.unlock();
}
}
/**
* Return {@code true} if caching of SSL sessions is enabled, {@code false} otherwise.
*/
public boolean isSessionCacheEnabled() {
Lock readerLock = context.ctxLock.readLock();
readerLock.lock();
try {
return (SSLContext.getSessionCacheMode(context.ctx) & mask) != 0;
} finally {
readerLock.unlock();
}
}
/**
* Returns the stats of this context.
*/
public OpenSslSessionStats stats() {
return stats;
}
/**
* Remove the given {@link OpenSslInternalSession} from the cache, and so not re-use it for new connections.
*/
final void removeFromCache(OpenSslSessionId id) {
sessionCache.removeSessionWithId(id);
}
final boolean isInCache(OpenSslSessionId id) {
return sessionCache.containsSessionWithId(id);
}
boolean setSessionFromCache(long ssl, OpenSslInternalSession session, String host, int port) {
return sessionCache.setSession(ssl, session, host, port);
}
final void destroy() {
if (provider != null) {
provider.destroy();
}
sessionCache.clear();
}
}
| OpenSslSessionContext |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/runtime/operators/sink/committables/CheckpointCommittableManagerImpl.java | {
"start": 1743,
"end": 10771
} | class ____<CommT> implements CheckpointCommittableManager<CommT> {
/** Mapping of subtask id to {@link SubtaskCommittableManager}. */
private final Map<Integer, SubtaskCommittableManager<CommT>> subtasksCommittableManagers;
private final long checkpointId;
private final int numberOfSubtasks;
private final SinkCommitterMetricGroup metricGroup;
private static final Logger LOG =
LoggerFactory.getLogger(CheckpointCommittableManagerImpl.class);
@VisibleForTesting
CheckpointCommittableManagerImpl(
Map<Integer, SubtaskCommittableManager<CommT>> subtasksCommittableManagers,
int numberOfSubtasks,
long checkpointId,
SinkCommitterMetricGroup metricGroup) {
this.subtasksCommittableManagers = checkNotNull(subtasksCommittableManagers);
this.numberOfSubtasks = numberOfSubtasks;
this.checkpointId = checkpointId;
this.metricGroup = metricGroup;
}
public static <CommT> CheckpointCommittableManagerImpl<CommT> forSummary(
CommittableSummary<CommT> summary, SinkCommitterMetricGroup metricGroup) {
return new CheckpointCommittableManagerImpl<>(
new HashMap<>(),
summary.getNumberOfSubtasks(),
summary.getCheckpointIdOrEOI(),
metricGroup);
}
@Override
public long getCheckpointId() {
return checkpointId;
}
@Override
public int getNumberOfSubtasks() {
return numberOfSubtasks;
}
Collection<SubtaskCommittableManager<CommT>> getSubtaskCommittableManagers() {
return subtasksCommittableManagers.values();
}
void addSummary(CommittableSummary<CommT> summary) {
long checkpointId = summary.getCheckpointIdOrEOI();
SubtaskCommittableManager<CommT> manager =
new SubtaskCommittableManager<>(
summary.getNumberOfCommittables(),
summary.getSubtaskId(),
checkpointId,
metricGroup);
// Remove branch once CommittableMessage.EOI has been removed (earliest 2.2)
if (checkpointId == CommittableMessage.EOI) {
SubtaskCommittableManager<CommT> merged =
subtasksCommittableManagers.merge(
summary.getSubtaskId(), manager, SubtaskCommittableManager::merge);
LOG.debug("Adding EOI summary (new={}}, merged={}}).", manager, merged);
} else {
SubtaskCommittableManager<CommT> existing =
subtasksCommittableManagers.putIfAbsent(summary.getSubtaskId(), manager);
if (existing != null) {
throw new UnsupportedOperationException(
String.format(
"Received duplicate committable summary for checkpoint %s + subtask %s (new=%s, old=%s). Please check the status of FLINK-25920",
checkpointId, summary.getSubtaskId(), manager, existing));
} else {
LOG.debug(
"Setting the summary for checkpointId {} with {}",
this.checkpointId,
manager);
}
}
}
void addCommittable(CommittableWithLineage<CommT> committable) {
getSubtaskCommittableManager(committable.getSubtaskId()).add(committable);
}
SubtaskCommittableManager<CommT> getSubtaskCommittableManager(int subtaskId) {
SubtaskCommittableManager<CommT> committables =
this.subtasksCommittableManagers.get(subtaskId);
return checkNotNull(committables, "Unknown subtask for %s", subtaskId);
}
@Override
public boolean isFinished() {
return subtasksCommittableManagers.values().stream()
.allMatch(SubtaskCommittableManager::isFinished);
}
@Override
public boolean hasGloballyReceivedAll() {
return subtasksCommittableManagers.size() == numberOfSubtasks
&& subtasksCommittableManagers.values().stream()
.allMatch(SubtaskCommittableManager::hasReceivedAll);
}
@Override
public void commit(Committer<CommT> committer, int maxRetries)
throws IOException, InterruptedException {
Collection<CommitRequestImpl<CommT>> requests =
getPendingRequests().collect(Collectors.toList());
for (int retry = 0; !requests.isEmpty() && retry <= maxRetries; retry++) {
requests.forEach(CommitRequestImpl::setSelected);
committer.commit(Collections.unmodifiableCollection(requests));
requests.forEach(CommitRequestImpl::setCommittedIfNoError);
requests = requests.stream().filter(r -> !r.isFinished()).collect(Collectors.toList());
}
if (!requests.isEmpty()) {
throw new IOException(
String.format(
"Failed to commit %s committables after %s retries: %s",
requests.size(), maxRetries, requests));
}
}
@Override
public Collection<CommT> getSuccessfulCommittables() {
return subtasksCommittableManagers.values().stream()
.flatMap(SubtaskCommittableManager::getSuccessfulCommittables)
.collect(Collectors.toList());
}
@Override
public int getNumFailed() {
return subtasksCommittableManagers.values().stream()
.mapToInt(SubtaskCommittableManager::getNumFailed)
.sum();
}
Stream<CommitRequestImpl<CommT>> getPendingRequests() {
return subtasksCommittableManagers.values().stream()
.peek(this::assertReceivedAll)
.flatMap(SubtaskCommittableManager::getPendingRequests);
}
/**
* For committers: Sinks don't use unaligned checkpoints, so we receive all committables of a
* given upstream task before the respective barrier. Thus, when the barrier reaches the
* committer, all committables of a specific checkpoint must have been received. Committing
* happens even later on notifyCheckpointComplete.
*
* <p>Global committers need to ensure that all committables of all subtasks have been received
* with {@link #hasGloballyReceivedAll()} before trying to commit. Naturally, this method then
* becomes a no-op.
*
* <p>Note that by transitivity, the assertion also holds for committables of subsumed
* checkpoints.
*
* <p>This assertion will fail in case of bugs in the writer or in the pre-commit topology if
* present.
*/
private void assertReceivedAll(SubtaskCommittableManager<CommT> subtask) {
Preconditions.checkArgument(
subtask.hasReceivedAll(),
"Trying to commit incomplete batch of committables subtask=%s, manager=%s",
subtask.getSubtaskId(),
this);
}
CheckpointCommittableManagerImpl<CommT> merge(CheckpointCommittableManagerImpl<CommT> other) {
checkArgument(other.checkpointId == checkpointId);
CheckpointCommittableManagerImpl<CommT> merged = copy();
for (Map.Entry<Integer, SubtaskCommittableManager<CommT>> subtaskEntry :
other.subtasksCommittableManagers.entrySet()) {
merged.subtasksCommittableManagers.merge(
subtaskEntry.getKey(),
subtaskEntry.getValue(),
SubtaskCommittableManager::merge);
}
return merged;
}
CheckpointCommittableManagerImpl<CommT> copy() {
return new CheckpointCommittableManagerImpl<>(
subtasksCommittableManagers.entrySet().stream()
.collect(Collectors.toMap(Map.Entry::getKey, (e) -> e.getValue().copy())),
numberOfSubtasks,
checkpointId,
metricGroup);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CheckpointCommittableManagerImpl<?> that = (CheckpointCommittableManagerImpl<?>) o;
return checkpointId == that.checkpointId
&& numberOfSubtasks == that.numberOfSubtasks
&& Objects.equals(subtasksCommittableManagers, that.subtasksCommittableManagers);
}
@Override
public int hashCode() {
return Objects.hash(subtasksCommittableManagers, checkpointId, numberOfSubtasks);
}
@Override
public String toString() {
return "CheckpointCommittableManagerImpl{"
+ "numberOfSubtasks="
+ numberOfSubtasks
+ ", checkpointId="
+ checkpointId
+ ", subtasksCommittableManagers="
+ subtasksCommittableManagers
+ '}';
}
}
| CheckpointCommittableManagerImpl |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/model/ast/builder/TableUpdateBuilderStandard.java | {
"start": 842,
"end": 3135
} | class ____<O extends MutationOperation>
extends AbstractTableUpdateBuilder<O> {
private final String whereFragment;
public TableUpdateBuilderStandard(
MutationTarget<?> mutationTarget,
TableMapping tableMapping,
SessionFactoryImplementor sessionFactory) {
super( mutationTarget, tableMapping, sessionFactory );
this.whereFragment = null;
}
public TableUpdateBuilderStandard(
MutationTarget<?> mutationTarget,
MutatingTableReference tableReference,
SessionFactoryImplementor sessionFactory) {
this( mutationTarget, tableReference, sessionFactory, null );
}
public TableUpdateBuilderStandard(
MutationTarget<?> mutationTarget,
MutatingTableReference tableReference,
SessionFactoryImplementor sessionFactory,
String whereFragment) {
super( mutationTarget, tableReference, sessionFactory );
this.whereFragment = whereFragment;
}
public String getWhereFragment() {
return whereFragment;
}
//TODO: The unchecked typecasts here are horrible
@SuppressWarnings("unchecked")
@Override
public RestrictedTableMutation<O> buildMutation() {
final var valueBindings = combine( getValueBindings(), getKeyBindings(), getLobValueBindings() );
if ( valueBindings.isEmpty() ) {
return (RestrictedTableMutation<O>)
new TableUpdateNoSet( getMutatingTable(), getMutationTarget() );
}
if ( getMutatingTable().getTableMapping().getUpdateDetails().getCustomSql() != null ) {
return (RestrictedTableMutation<O>)
new TableUpdateCustomSql(
getMutatingTable(),
getMutationTarget(),
getSqlComment(),
valueBindings,
getKeyRestrictionBindings(),
getOptimisticLockBindings()
);
}
if ( getMutatingTable().getTableMapping().isOptional() ) {
return (RestrictedTableMutation<O>)
new OptionalTableUpdate(
getMutatingTable(),
getMutationTarget(),
valueBindings,
getKeyRestrictionBindings(),
getOptimisticLockBindings()
);
}
return (RestrictedTableMutation<O>)
new TableUpdateStandard(
getMutatingTable(),
getMutationTarget(),
getSqlComment(),
valueBindings,
getKeyRestrictionBindings(),
getOptimisticLockBindings(),
whereFragment,
null,
emptyList()
);
}
}
| TableUpdateBuilderStandard |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ForOverrideCheckerTest.java | {
"start": 4218,
"end": 4683
} | class ____ {
public void tryCall() {
ExtendMe extendMe = new ExtendMe();
// BUG: Diagnostic contains: must not be invoked
extendMe.overrideMe();
}
}
""")
.doTest();
}
@Test
public void userCannotCallDefault() {
compilationHelper
.addSourceLines(
"test/Test.java",
"""
package test;
public | Test |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/characters/Characters_assertLowerCase_Test.java | {
"start": 1426,
"end": 3178
} | class ____ extends CharactersBaseTest {
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> characters.assertLowerCase(someInfo(), null))
.withMessage(actualIsNull());
}
@Test
void should_pass_if_actual_is_lowercase() {
characters.assertLowerCase(someInfo(), 'a');
}
@Test
void should_fail_if_actual_is_not_lowercase() {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> characters.assertLowerCase(info, 'A'));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeLowerCase('A'));
}
@Test
void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> charactersWithCaseInsensitiveComparisonStrategy.assertLowerCase(someInfo(),
null))
.withMessage(actualIsNull());
}
@Test
void should_pass_if_actual_is_lowercase_whatever_custom_comparison_strategy_is() {
charactersWithCaseInsensitiveComparisonStrategy.assertLowerCase(someInfo(), 'a');
}
@Test
void should_fail_if_actual_is_not_lowercase_whatever_custom_comparison_strategy_is() {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> charactersWithCaseInsensitiveComparisonStrategy.assertLowerCase(info, 'A'));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeLowerCase('A'));
}
}
| Characters_assertLowerCase_Test |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/subselect/SetSubselectTest.java | {
"start": 653,
"end": 1434
} | class ____ {
@AfterEach
void tearDown(SessionFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
public void testSubselect(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( session -> {
Author b = new Author();
b.setName( "Camilleri" );
b.setId( 1 );
session.persist( b );
Book book = new Book();
book.setId( 2 );
book.setAuthorId( 1 );
book.setTitle( "Il sognaglio" );
session.persist( book );
Book book2 = new Book();
book2.setId( 3 );
book2.setAuthorId( 1 );
book2.setTitle( "Il casellante" );
session.persist( book2 );
} );
factoryScope.inTransaction( session -> {
Author author = session.find( Author.class, 1 );
assertThat( author.getBooks().size(), is( 2 ) );
} );
}
}
| SetSubselectTest |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/JCacheEndpointBuilderFactory.java | {
"start": 25830,
"end": 27340
} | interface ____
extends
EndpointProducerBuilder {
default AdvancedJCacheEndpointProducerBuilder advanced() {
return (AdvancedJCacheEndpointProducerBuilder) this;
}
/**
* The Properties for the javax.cache.spi.CachingProvider to create the
* CacheManager.
*
* The option is a: <code>java.util.Properties</code> type.
*
* Group: common
*
* @param cacheConfigurationProperties the value to set
* @return the dsl builder
*/
default JCacheEndpointProducerBuilder cacheConfigurationProperties(Properties cacheConfigurationProperties) {
doSetProperty("cacheConfigurationProperties", cacheConfigurationProperties);
return this;
}
/**
* The Properties for the javax.cache.spi.CachingProvider to create the
* CacheManager.
*
* The option will be converted to a <code>java.util.Properties</code>
* type.
*
* Group: common
*
* @param cacheConfigurationProperties the value to set
* @return the dsl builder
*/
default JCacheEndpointProducerBuilder cacheConfigurationProperties(String cacheConfigurationProperties) {
doSetProperty("cacheConfigurationProperties", cacheConfigurationProperties);
return this;
}
/**
* The fully qualified | JCacheEndpointProducerBuilder |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/FSClusterStats.java | {
"start": 1117,
"end": 2708
} | interface ____ {
/**
* an indication of the total load of the cluster.
*
* @return a count of the total number of block transfers and block
* writes that are currently occuring on the cluster.
*/
public int getTotalLoad();
/**
* Indicate whether or not the cluster is now avoiding
* to use stale DataNodes for writing.
*
* @return True if the cluster is currently avoiding using stale DataNodes
* for writing targets, and false otherwise.
*/
public boolean isAvoidingStaleDataNodesForWrite();
/**
* Indicates number of datanodes that are in service.
* @return Number of datanodes that are both alive and not decommissioned.
*/
public int getNumDatanodesInService();
/**
* An indication of the average load of non-decommission(ing|ed) nodes
* eligible for block placement.
*
* @return average of the in service number of block transfers and block
* writes that are currently occurring on the cluster.
*/
public double getInServiceXceiverAverage();
/**
* An indication of the average load of volumes at non-decommission(ing|ed)
* nodes eligible for block placement.
*
* @return average of in service number of block transfers and block
* writes that are currently occurring on the volumes of the
* cluster.
*/
double getInServiceXceiverAverageForVolume();
/**
* Indicates the storage statistics per storage type.
* @return storage statistics per storage type.
*/
Map<StorageType, StorageTypeStats> getStorageTypeStats();
}
| FSClusterStats |
java | apache__hadoop | hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RecordCreatorFactory.java | {
"start": 2357,
"end": 2491
} | interface ____<R extends Record, T> {
R create(Name name, T target);
}
/**
* An A Record creator.
*/
static | RecordCreator |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/embeddable/InheritedPropertyTest.java | {
"start": 2654,
"end": 2788
} | class ____ {
@Id
Long id;
@Embedded
Animal pet;
}
@Embeddable
@DiscriminatorColumn(name = "animal_type")
public static | Owner |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltCacheFactory.java | {
"start": 1940,
"end": 2533
} | class ____<T> implements PreBuiltCache<T> {
private T model = null;
@Override
public T get(IndexVersion version) {
return model;
}
@Override
public void put(IndexVersion version, T model) {
this.model = model;
}
@Override
public Collection<T> values() {
return model == null ? Collections.emptySet() : Collections.singleton(model);
}
}
/**
* This cache contains one version for each elasticsearch version object
*/
private static | PreBuiltCacheStrategyOne |
java | quarkusio__quarkus | extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devui/DevBeanInfo.java | {
"start": 657,
"end": 8401
} | class ____ implements Comparable<DevBeanInfo> {
public static DevBeanInfo from(BeanInfo bean, CompletedApplicationClassPredicateBuildItem predicate) {
Set<Name> qualifiers = new HashSet<>();
for (AnnotationInstance qualifier : bean.getQualifiers()) {
qualifiers.add(Name.from(qualifier));
}
Set<Name> types = new HashSet<>();
for (Type beanType : bean.getTypes()) {
types.add(Name.from(beanType));
}
Name scope = Name.from(bean.getScope().getDotName());
Name providerType = Name.from(bean.getProviderType());
List<String> interceptors;
List<InterceptorInfo> boundInterceptors = bean.getBoundInterceptors();
if (boundInterceptors.isEmpty()) {
interceptors = List.of();
} else {
interceptors = new ArrayList<>();
for (InterceptorInfo interceptor : boundInterceptors) {
interceptors.add(interceptor.getIdentifier());
}
}
if (bean.getTarget().isPresent()) {
AnnotationTarget target = bean.getTarget().get();
DevBeanKind kind;
String memberName;
boolean isApplicationBean;
boolean isGenerated = false;
Name declaringClass;
if (target.kind() == Kind.METHOD) {
MethodInfo method = target.asMethod();
memberName = method.name();
kind = DevBeanKind.METHOD;
isApplicationBean = predicate.test(bean.getDeclaringBean().getBeanClass());
declaringClass = Name.from(bean.getDeclaringBean().getBeanClass());
isGenerated = bean.getDeclaringBean().getImplClazz().isSynthetic();
} else if (target.kind() == Kind.FIELD) {
FieldInfo field = target.asField();
memberName = field.name();
kind = DevBeanKind.FIELD;
isApplicationBean = predicate.test(bean.getDeclaringBean().getBeanClass());
declaringClass = Name.from(bean.getDeclaringBean().getBeanClass());
isGenerated = bean.getDeclaringBean().getImplClazz().isSynthetic();
} else if (target.kind() == Kind.CLASS) {
ClassInfo clazz = target.asClass();
kind = DevBeanKind.CLASS;
memberName = null;
isApplicationBean = predicate.test(clazz.name());
isGenerated = clazz.isSynthetic();
declaringClass = null;
} else {
throw new IllegalArgumentException("Invalid annotation target: " + target);
}
return new DevBeanInfo(bean.getIdentifier(), kind, isApplicationBean, providerType, memberName, types, qualifiers,
scope, declaringClass,
interceptors, isGenerated);
} else {
// Synthetic bean
return new DevBeanInfo(bean.getIdentifier(), DevBeanKind.SYNTHETIC, false, providerType, null, types, qualifiers,
scope, null,
interceptors, bean.getImplClazz().isSynthetic());
}
}
public DevBeanInfo(String id, DevBeanKind kind, boolean isApplicationBean, Name providerType, String memberName,
Set<Name> types,
Set<Name> qualifiers, Name scope, Name declaringClass, List<String> boundInterceptors,
boolean isGenerated) {
this.id = id;
this.kind = kind;
this.isApplicationBean = isApplicationBean;
this.providerType = providerType;
this.memberName = memberName;
this.types = types;
this.qualifiers = qualifiers;
this.scope = scope;
this.declaringClass = declaringClass;
this.interceptors = boundInterceptors;
this.isGenerated = isGenerated;
}
private final String id;
private final DevBeanKind kind;
private final boolean isApplicationBean;
private final Name providerType;
private final String memberName;
private final Set<Name> types;
private final Set<Name> qualifiers;
private final Name scope;
private final Name declaringClass;
private final List<String> interceptors;
private final boolean isGenerated;
public String getId() {
return id;
}
public DevBeanKind getKind() {
return kind;
}
public Name getScope() {
return scope;
}
public Set<Name> getQualifiers() {
return qualifiers;
}
public Set<Name> getNonDefaultQualifiers() {
Set<Name> nonDefault = new HashSet<>();
String atDefault = DotNames.DEFAULT.toString();
String atAny = DotNames.ANY.toString();
for (Name qualifier : qualifiers) {
if (qualifier.toString().endsWith(atDefault) || qualifier.toString().endsWith(atAny)) {
continue;
}
nonDefault.add(qualifier);
}
return nonDefault;
}
public Set<Name> getTypes() {
return types;
}
public Name getProviderType() {
return providerType;
}
public String getMemberName() {
return memberName;
}
public boolean isApplicationBean() {
return isApplicationBean;
}
public Name getDeclaringClass() {
return declaringClass;
}
public List<String> getInterceptors() {
return interceptors;
}
public boolean isGenerated() {
return isGenerated;
}
// only exists to make sure that the JSON objects already have the field
// and don't have to change their shape later
public boolean isInactive() {
return false;
}
public String getDescription() {
return description(false);
}
public String getSimpleDescription() {
return description(true);
}
private String description(boolean simple) {
String typeInfo = typeInfo(simple);
switch (kind) {
case FIELD:
return typeInfo + "#" + memberName;
case METHOD:
return typeInfo + "#" + memberName + "()";
case SYNTHETIC:
return "Synthetic: " + typeInfo;
default:
return typeInfo;
}
}
public String typeInfo(boolean simple) {
String type;
switch (kind) {
case FIELD:
case METHOD:
type = declaringClass.toString();
break;
default:
type = providerType.toString();
break;
}
if (simple) {
int idx = type.lastIndexOf(".");
return idx != -1 && type.length() > 1 ? type.substring(idx + 1) : type;
}
return type;
}
@Override
public int compareTo(DevBeanInfo o) {
// application beans come first
int result = Boolean.compare(o.isApplicationBean, isApplicationBean);
if (result != 0) {
return result;
}
// generated beans comes last
result = Boolean.compare(isGenerated, o.isGenerated);
if (result != 0) {
return result;
}
// fallback to name comparison
return providerType.compareTo(o.providerType);
}
@Override
public int hashCode() {
return Objects.hash(id);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
DevBeanInfo other = (DevBeanInfo) obj;
return Objects.equals(id, other.id);
}
}
| DevBeanInfo |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/typelevel/MockitoSpyBeansTests.java | {
"start": 1528,
"end": 2585
} | class ____ {
@Test
void registrationOrderForTopLevelClass() {
Stream<Class<?>> mockedServices = getRegisteredMockTypes(MockitoSpyBeansByTypeIntegrationTests.class);
assertThat(mockedServices).containsExactly(
Service01.class, Service02.class, Service03.class, Service04.class,
Service05.class, Service06.class, Service07.class);
}
@Test
void registrationOrderForNestedClass() {
Stream<Class<?>> mockedServices = getRegisteredMockTypes(MockitoSpyBeansByTypeIntegrationTests.NestedTests.class);
assertThat(mockedServices).containsExactly(
Service01.class, Service02.class, Service03.class, Service04.class,
Service05.class, Service06.class, Service07.class, Service08.class,
Service09.class, Service10.class, Service11.class, Service12.class,
Service13.class);
}
private static Stream<Class<?>> getRegisteredMockTypes(Class<?> testClass) {
return BeanOverrideTestUtils.findAllHandlers(testClass)
.stream()
.map(BeanOverrideHandler::getBeanType)
.map(ResolvableType::getRawClass);
}
}
| MockitoSpyBeansTests |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservableGroupJoinTest.java | {
"start": 1472,
"end": 4150
} | class ____ extends RxJavaTest {
Observer<Object> observer = TestHelper.mockObserver();
BiFunction<Integer, Integer, Integer> add = new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1 + t2;
}
};
<T> Function<Integer, Observable<T>> just(final Observable<T> observable) {
return new Function<Integer, Observable<T>>() {
@Override
public Observable<T> apply(Integer t1) {
return observable;
}
};
}
<T, R> Function<T, Observable<R>> just2(final Observable<R> observable) {
return new Function<T, Observable<R>>() {
@Override
public Observable<R> apply(T t1) {
return observable;
}
};
}
BiFunction<Integer, Observable<Integer>, Observable<Integer>> add2 = new BiFunction<Integer, Observable<Integer>, Observable<Integer>>() {
@Override
public Observable<Integer> apply(final Integer leftValue, Observable<Integer> rightValues) {
return rightValues.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer rightValue) throws Throwable {
return add.apply(leftValue, rightValue);
}
});
}
};
@Before
public void before() {
MockitoAnnotations.openMocks(this);
}
@Test
public void behaveAsJoin() {
PublishSubject<Integer> source1 = PublishSubject.create();
PublishSubject<Integer> source2 = PublishSubject.create();
Observable<Integer> m = Observable.merge(source1.groupJoin(source2,
just(Observable.never()),
just(Observable.never()), add2));
m.subscribe(observer);
source1.onNext(1);
source1.onNext(2);
source1.onNext(4);
source2.onNext(16);
source2.onNext(32);
source2.onNext(64);
source1.onComplete();
source2.onComplete();
verify(observer, times(1)).onNext(17);
verify(observer, times(1)).onNext(18);
verify(observer, times(1)).onNext(20);
verify(observer, times(1)).onNext(33);
verify(observer, times(1)).onNext(34);
verify(observer, times(1)).onNext(36);
verify(observer, times(1)).onNext(65);
verify(observer, times(1)).onNext(66);
verify(observer, times(1)).onNext(68);
verify(observer, times(1)).onComplete(); //Never emitted?
verify(observer, never()).onError(any(Throwable.class));
}
| ObservableGroupJoinTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/ReservedStateHandlerMetadata.java | {
"start": 1224,
"end": 3972
} | class ____ hold a set of reserved keys in the cluster state, set by each {@link ReservedClusterStateHandler}.
*
* <p>
* Since we hold reserved metadata state for multiple namespaces, the same handler can appear in
* multiple namespaces. See {@link ReservedStateMetadata} and {@link Metadata}.
*/
public record ReservedStateHandlerMetadata(String name, Set<String> keys)
implements
SimpleDiffable<ReservedStateHandlerMetadata>,
ToXContentFragment {
static final ParseField KEYS = new ParseField("keys");
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeStringCollection(keys);
}
/**
* Reads an {@link ReservedStateHandlerMetadata} from a {@link StreamInput}
*
* @param in the {@link StreamInput} to read from
* @return {@link ReservedStateHandlerMetadata}
* @throws IOException
*/
public static ReservedStateHandlerMetadata readFrom(StreamInput in) throws IOException {
return new ReservedStateHandlerMetadata(in.readString(), in.readCollectionAsSet(StreamInput::readString));
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name());
builder.array(KEYS.getPreferredName(), keys().stream().sorted().toArray(String[]::new)); // ordered keys for output consistency
builder.endObject();
return builder;
}
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<ReservedStateHandlerMetadata, String> PARSER = new ConstructingObjectParser<>(
"reserved_state_handler_metadata",
false,
(a, name) -> new ReservedStateHandlerMetadata(name, Set.copyOf((List<String>) a[0]))
);
static {
PARSER.declareStringArray(optionalConstructorArg(), KEYS);
}
/**
* Reads an {@link ReservedStateHandlerMetadata} from xContent
*
* @param parser {@link XContentParser}
* @return {@link ReservedStateHandlerMetadata}
* @throws IOException
*/
public static ReservedStateHandlerMetadata fromXContent(XContentParser parser, String name) throws IOException {
return PARSER.apply(parser, name);
}
/**
* Reads an {@link ReservedStateHandlerMetadata} {@link Diff} from {@link StreamInput}
*
* @param in the {@link StreamInput} to read the diff from
* @return a {@link Diff} of {@link ReservedStateHandlerMetadata}
* @throws IOException
*/
public static Diff<ReservedStateHandlerMetadata> readDiffFrom(StreamInput in) throws IOException {
return SimpleDiffable.readDiffFrom(ReservedStateHandlerMetadata::readFrom, in);
}
}
| to |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit4/rules/ProgrammaticTxMgmtSpringRuleTests.java | {
"start": 2434,
"end": 2777
} | class ____ a copy of
* {@link org.springframework.test.context.transaction.programmatic.ProgrammaticTxMgmtTests}
* that has been modified to use {@link SpringClassRule} and {@link SpringMethodRule}.
*
* @author Sam Brannen
* @since 4.2
*/
@RunWith(JUnit4.class)
@ContextConfiguration
@Transactional
@SuppressWarnings("deprecation")
public | is |
java | elastic__elasticsearch | x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/DocValuesWhitelistExtension.java | {
"start": 1061,
"end": 2216
} | class ____ implements PainlessExtension {
private static final Whitelist WHITELIST = WhitelistLoader.loadFromResourceFiles(
DocValuesWhitelistExtension.class,
"org.elasticsearch.xpack.unsignedlong.txt"
);
@Override
public Map<ScriptContext<?>, List<Whitelist>> getContextWhitelists() {
List<Whitelist> whitelist = singletonList(WHITELIST);
return Map.ofEntries(
entry(FieldScript.CONTEXT, whitelist),
entry(ScoreScript.CONTEXT, whitelist),
entry(FilterScript.CONTEXT, whitelist),
entry(AggregationScript.CONTEXT, whitelist),
entry(NumberSortScript.CONTEXT, whitelist),
entry(StringSortScript.CONTEXT, whitelist),
entry(BucketAggregationSelectorScript.CONTEXT, whitelist),
entry(ScriptedMetricAggContexts.InitScript.CONTEXT, whitelist),
entry(ScriptedMetricAggContexts.MapScript.CONTEXT, whitelist),
entry(ScriptedMetricAggContexts.CombineScript.CONTEXT, whitelist),
entry(ScriptedMetricAggContexts.ReduceScript.CONTEXT, whitelist)
);
}
}
| DocValuesWhitelistExtension |
java | quarkusio__quarkus | extensions/hibernate-validator/deployment/src/test/java/io/quarkus/hibernate/validator/test/CustomConfigurationViaBeansTest.java | {
"start": 5509,
"end": 5646
} | class ____ extends DefaultGetterPropertySelectionStrategy {
}
@ApplicationScoped
public static | MyGetterPropertySelectionStrategy |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/util/IterableUtil.java | {
"start": 1030,
"end": 4915
} | class ____ {
/**
* Indicates whether the given {@link Iterable} is {@code null} or empty.
*
* @param iterable the given {@code Iterable} to check.
* @return {@code true} if the given {@code Iterable} is {@code null} or empty, otherwise {@code false}.
*/
public static boolean isNullOrEmpty(Iterable<?> iterable) {
if (iterable == null) return true;
if (iterable instanceof Collection<?> collection && collection.isEmpty()) return true;
return !iterable.iterator().hasNext();
}
/**
* Returns the size of the given {@link Iterable}.
*
* @param iterable the {@link Iterable} to get size.
* @return the size of the given {@link Iterable}.
* @throws NullPointerException if given {@link Iterable} is null.
*/
public static int sizeOf(Iterable<?> iterable) {
requireNonNull(iterable, "Iterable must not be null");
if (iterable instanceof Collection<?> collection) return collection.size();
return Math.toIntExact(Streams.stream(iterable).count());
}
/**
* Returns all the non-{@code null} elements in the given {@link Iterable}.
*
* @param <T> the type of elements of the {@code Iterable}.
* @param i the given {@code Iterable}.
* @return all the non-{@code null} elements in the given {@code Iterable}. An empty list is returned if the given
* {@code Iterable} is {@code null}.
*/
public static <T> List<T> nonNullElementsIn(Iterable<? extends T> i) {
if (isNullOrEmpty(i)) return emptyList();
return Streams.stream(i).filter(Objects::nonNull).collect(toList());
}
/**
* Create an array from an {@link Iterable}.
* <p>
* Note: this method will return Object[]. If you require a typed array please use {@link #toArray(Iterable, Class)}.
* It's main usage is to keep the generic type for chaining call like in:
* <pre><code class='java'> public S containsOnlyElementsOf(Iterable<? extends T> iterable) {
* return containsOnly(toArray(iterable));
* }</code></pre>
*
* @param iterable an {@link Iterable} to translate in an array.
* @param <T> the type of elements of the {@code Iterable}.
* @return all the elements from the given {@link Iterable} in an array. {@code null} if given {@link Iterable} is
* null.
*/
@SuppressWarnings("unchecked")
public static <T> T[] toArray(Iterable<? extends T> iterable) {
if (iterable == null) return null;
return (T[]) newArrayList(iterable).toArray();
}
/**
* Create an typed array from an {@link Iterable}.
*
* @param iterable an {@link Iterable} to translate in an array.
* @param type the type of the resulting array.
* @param <T> the type of elements of the {@code Iterable}.
* @return all the elements from the given {@link Iterable} in an array. {@code null} if given {@link Iterable} is
* null.
*/
public static <T> T[] toArray(Iterable<? extends T> iterable, Class<T> type) {
if (iterable == null) return null;
Collection<? extends T> collection = toCollection(iterable);
T[] array = newArray(type, collection.size());
return collection.toArray(array);
}
public static <T> Collection<T> toCollection(Iterable<T> iterable) {
return iterable instanceof Collection ? (Collection<T>) iterable : newArrayList(iterable);
}
@SafeVarargs
public static <T> Iterable<T> iterable(T... elements) {
if (elements == null) return null;
ArrayList<T> list = newArrayList();
java.util.Collections.addAll(list, elements);
return list;
}
@SafeVarargs
public static <T> Iterator<T> iterator(T... elements) {
if (elements == null) return null;
return iterable(elements).iterator();
}
@SuppressWarnings("unchecked")
private static <T> T[] newArray(Class<T> type, int length) {
return (T[]) Array.newInstance(type, length);
}
private IterableUtil() {}
}
| IterableUtil |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/associations/OneToManyEagerDiscriminatorTest.java | {
"start": 1582,
"end": 2647
} | class ____ {
@BeforeAll
public void setUp(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final User user = new User();
final UserValueBase value = new UserValueBase();
value.setData( "value_1" );
value.setEntity( user );
session.persist( user );
session.persist( value );
} );
}
@AfterAll
public void tearDown(SessionFactoryScope scope) {
scope.inTransaction( session -> session.createMutationQuery( "delete from ValueBase" ).executeUpdate() );
scope.inTransaction( session -> session.createMutationQuery( "delete from User" ).executeUpdate() );
}
@Test
public void test(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final User user = session.find( User.class, 1L );
assertNotNull( user );
assertTrue( Hibernate.isInitialized( user.getProperties() ) );
assertEquals( 1, user.getProperties().size() );
assertEquals( "value_1", user.getProperties().iterator().next().getData() );
} );
}
@Entity(name = "User")
@Table(name = "Users")
public static | OneToManyEagerDiscriminatorTest |
java | google__guice | extensions/assistedinject/src/com/google/inject/assistedinject/FactoryProvider.java | {
"start": 6530,
"end": 7671
} | class ____ the old @AssistedInject implementation that manually matches constructors
* to factory methods. The new child injector implementation lives in FactoryProvider2.
*/
private Injector injector;
private final TypeLiteral<F> factoryType;
private final TypeLiteral<?> implementationType;
private final Map<Method, AssistedConstructor<?>> factoryMethodToConstructor;
public static <F> Provider<F> newFactory(Class<F> factoryType, Class<?> implementationType) {
return newFactory(TypeLiteral.get(factoryType), TypeLiteral.get(implementationType));
}
public static <F> Provider<F> newFactory(
TypeLiteral<F> factoryType, TypeLiteral<?> implementationType) {
Map<Method, AssistedConstructor<?>> factoryMethodToConstructor =
createMethodMapping(factoryType, implementationType);
if (!factoryMethodToConstructor.isEmpty()) {
return new FactoryProvider<F>(factoryType, implementationType, factoryMethodToConstructor);
} else {
BindingCollector collector = new BindingCollector();
// Preserving backwards-compatibility: Map all return types in a factory
// | implements |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/aggregator/AggregateSimplePredicateTest.java | {
"start": 1045,
"end": 2379
} | class ____ extends ContextTestSupport {
@Test
public void testAggregateSimplePredicate() throws Exception {
getMockEndpoint("mock:aggregated").expectedBodiesReceived("A+B+C");
template.sendBodyAndHeader("direct:start", "A", "id", 123);
template.sendBodyAndHeader("direct:start", "B", "id", 123);
template.sendBodyAndHeader("direct:start", "C", "id", 123);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// START SNIPPET: e1
from("direct:start")
// aggregate all exchanges correlated by the id header.
// Aggregate them using the BodyInAggregatingStrategy
// strategy which
// and when the aggregated body contains A+B+C then complete
// the aggregation
// and send it to mock:aggregated
.aggregate(header("id"), new BodyInAggregatingStrategy()).completionPredicate(body().contains("A+B+C"))
.to("mock:aggregated");
// END SNIPPET: e1
}
};
}
}
| AggregateSimplePredicateTest |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/subclassmapping/abstractsuperclass/AbstractSuperClassTest.java | {
"start": 925,
"end": 2206
} | class ____ {
@ProcessorTest
@WithClasses( SubclassWithAbstractSuperClassMapper.class )
void downcastMappingInCollection() {
VehicleCollection vehicles = new VehicleCollection();
vehicles.getVehicles().add( new Car() );
vehicles.getVehicles().add( new Bike() );
VehicleCollectionDto result = SubclassWithAbstractSuperClassMapper.INSTANCE.map( vehicles );
assertThat( result.getVehicles() ).doesNotContainNull();
assertThat( result.getVehicles() ) // remove generic so that test works.
.extracting( vehicle -> (Class) vehicle.getClass() )
.containsExactly( CarDto.class, BikeDto.class );
}
@ProcessorTest
@WithClasses( SubclassWithAbstractSuperClassMapper.class )
void mappingOfUnknownChildThrowsIllegalArgumentException() {
VehicleCollection vehicles = new VehicleCollection();
vehicles.getVehicles().add( new Car() );
vehicles.getVehicles().add( new Motorcycle() );
assertThatThrownBy( () -> SubclassWithAbstractSuperClassMapper.INSTANCE.map( vehicles ) )
.isInstanceOf( IllegalArgumentException.class )
.hasMessage( "Not all subclasses are supported for this mapping. "
+ "Missing for | AbstractSuperClassTest |
java | quarkusio__quarkus | independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/buildfile/BuildFile.java | {
"start": 841,
"end": 6844
} | class ____ implements ExtensionManager {
private final Path projectDirPath;
private final ExtensionCatalog catalog;
public BuildFile(final Path projectDirPath, ExtensionCatalog catalog) {
this.projectDirPath = requireNonNull(projectDirPath, "projectPath is required");
this.catalog = requireNonNull(catalog, "catalog is required");
}
@Override
public final InstallResult install(Collection<ArtifactCoords> coords) throws IOException {
final ExtensionInstallPlan.Builder builder = ExtensionInstallPlan.builder();
for (ArtifactCoords coord : coords) {
if ("pom".equals(coord.getType())) {
builder.addPlatform(coord);
} else if (coord.getVersion() == null) {
builder.addManagedExtension(coord);
} else {
builder.addIndependentExtension(coord);
}
}
return install(builder.build());
}
@Override
public InstallResult install(ExtensionInstallPlan plan) throws IOException {
this.refreshData();
List<ArtifactCoords> installedManagedExtensions = new ArrayList<>();
List<ArtifactCoords> installedIndependentExtensions = new ArrayList<>();
List<ArtifactCoords> installedPlatforms = new ArrayList<>();
final Set<ArtifactKey> alreadyInstalled = alreadyInstalled(plan.toCollection());
for (ArtifactCoords platform : withoutAlreadyInstalled(alreadyInstalled, plan.getPlatforms())) {
if (importBom(platform)) {
installedPlatforms.add(platform);
}
}
for (ArtifactCoords managedExtension : withoutAlreadyInstalled(alreadyInstalled, plan.getManagedExtensions())) {
if (addDependency(managedExtension, true)) {
installedManagedExtensions.add(managedExtension);
}
}
for (ArtifactCoords independentExtension : withoutAlreadyInstalled(alreadyInstalled, plan.getIndependentExtensions())) {
if (addDependency(independentExtension, false)) {
installedIndependentExtensions.add(independentExtension);
}
}
writeToDisk();
return new InstallResult(installedPlatforms, installedManagedExtensions, installedIndependentExtensions,
alreadyInstalled);
}
@Override
public final Collection<ArtifactCoords> getInstalled() throws IOException {
if (catalog == null) {
return Collections.emptyList();
}
this.refreshData();
final Set<ArtifactKey> catalogKeys = catalog.getExtensions().stream().map(e -> e.getArtifact().getKey())
.collect(Collectors.toSet());
return getDependencies().stream()
.filter(d -> catalogKeys.contains(d.getKey()))
.collect(toList());
}
@Override
public final UninstallResult uninstall(Collection<ArtifactKey> keys) throws IOException {
this.refreshData();
final Set<ArtifactKey> existingKeys = getDependenciesKeys();
final List<ArtifactKey> uninstalled = keys.stream()
.distinct()
.filter(existingKeys::contains)
.filter(k -> {
try {
removeDependency(k);
return true;
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
}).collect(toList());
this.writeToDisk();
return new UninstallResult(uninstalled);
}
private Set<ArtifactKey> alreadyInstalled(Collection<ArtifactCoords> extensions) throws IOException {
final Set<ArtifactKey> existingKeys = getDependenciesKeys();
return extensions.stream()
.distinct()
.filter(a -> existingKeys.contains(a.getKey()))
.map(ArtifactCoords::getKey)
.collect(Collectors.toSet());
}
private Collection<ArtifactCoords> withoutAlreadyInstalled(Set<ArtifactKey> existingKeys,
Collection<ArtifactCoords> extensions) {
return extensions.stream()
.distinct()
.filter(a -> !existingKeys.contains(a.getKey()))
.collect(toList());
}
protected abstract boolean importBom(ArtifactCoords coords);
protected abstract boolean addDependency(ArtifactCoords coords, boolean managed);
protected abstract void removeDependency(ArtifactKey key) throws IOException;
protected abstract List<ArtifactCoords> getDependencies() throws IOException;
protected abstract void writeToDisk() throws IOException;
protected abstract String getProperty(String propertyName) throws IOException;
protected abstract void refreshData();
protected Path getProjectDirPath() {
return projectDirPath;
}
protected boolean hasProjectFile(final String fileName) throws IOException {
final Path filePath = projectDirPath.resolve(fileName);
return Files.exists(filePath);
}
protected byte[] readProjectFile(final String fileName) throws IOException {
final Path filePath = projectDirPath.resolve(fileName);
return Files.readAllBytes(filePath);
}
protected void writeToProjectFile(final String fileName, final byte[] content) throws IOException {
Files.write(projectDirPath.resolve(fileName), content);
}
protected void writeToProjectFile(final String fileName, final String content) throws IOException {
Files.writeString(projectDirPath.resolve(fileName), content);
}
private Set<ArtifactKey> getDependenciesKeys() throws IOException {
return getDependencies().stream().map(ArtifactCoords::getKey).collect(Collectors.toSet());
}
public static boolean isDefinedInRegistry(Collection<Extension> registry, final ArtifactKey key) {
return Extensions.findInList(registry, key).isPresent();
}
}
| BuildFile |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/streams/WriteStreamTest.java | {
"start": 749,
"end": 805
} | class ____ extends AsyncTestBase {
static | WriteStreamTest |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/interval/Intervals.java | {
"start": 2447,
"end": 8088
} | enum ____ {
YEAR,
MONTH,
DAY,
HOUR,
MINUTE,
SECOND,
MILLISECOND;
}
private Intervals() {}
public static long inMillis(Literal literal) {
Object fold = Foldables.valueOf(literal);
Check.isTrue(fold instanceof Interval, "Expected interval, received [{}]", fold);
TemporalAmount interval = ((Interval<?>) fold).interval();
long millis = 0;
if (interval instanceof Period p) {
millis = p.toTotalMonths() * 30 * 24 * 60 * 60 * 1000;
} else {
Duration d = (Duration) interval;
millis = d.toMillis();
}
return millis;
}
public static TemporalAmount of(Source source, long duration, TimeUnit unit) {
// Cannot use Period.of since it accepts int so use plus which accepts long
// Further more Period and Duration have inconsistent addition methods but plus is there
try {
return switch (unit) {
case YEAR -> Period.ZERO.plusYears(duration);
case MONTH -> Period.ZERO.plusMonths(duration);
case DAY -> Duration.ZERO.plusDays(duration);
case HOUR -> Duration.ZERO.plusHours(duration);
case MINUTE -> Duration.ZERO.plusMinutes(duration);
case SECOND -> Duration.ZERO.plusSeconds(duration);
case MILLISECOND -> Duration.ZERO.plusMillis(duration);
};
} catch (ArithmeticException ae) {
throw new ParsingException(source, "Value [{}] cannot be used as it is too large to convert into [{}]s", duration, unit);
}
}
public static DataType intervalType(Source source, TimeUnit leading, TimeUnit trailing) {
if (trailing == null) {
return switch (leading) {
case YEAR -> INTERVAL_YEAR;
case MONTH -> INTERVAL_MONTH;
case DAY -> INTERVAL_DAY;
case HOUR -> INTERVAL_HOUR;
case MINUTE -> INTERVAL_MINUTE;
case SECOND -> INTERVAL_SECOND;
default -> throw new ParsingException(source, "Cannot determine datatype for [{}]", leading);
};
} else {
if (leading == TimeUnit.YEAR && trailing == TimeUnit.MONTH) {
return INTERVAL_YEAR_TO_MONTH;
}
if (leading == TimeUnit.DAY && trailing == TimeUnit.HOUR) {
return INTERVAL_DAY_TO_HOUR;
}
if (leading == TimeUnit.DAY && trailing == TimeUnit.MINUTE) {
return INTERVAL_DAY_TO_MINUTE;
}
if (leading == TimeUnit.DAY && trailing == TimeUnit.SECOND) {
return INTERVAL_DAY_TO_SECOND;
}
if (leading == TimeUnit.HOUR && trailing == TimeUnit.MINUTE) {
return INTERVAL_HOUR_TO_MINUTE;
}
if (leading == TimeUnit.HOUR && trailing == TimeUnit.SECOND) {
return INTERVAL_HOUR_TO_SECOND;
}
if (leading == TimeUnit.MINUTE && trailing == TimeUnit.SECOND) {
return INTERVAL_MINUTE_TO_SECOND;
}
throw new ParsingException(source, "Cannot determine datatype for combination [{}] [{}]", leading, trailing);
}
}
// return the compatible interval between the two - it is assumed the types are intervals
// YEAR and MONTH -> YEAR_TO_MONTH
// DAY... SECOND -> DAY_TIME
// YEAR_MONTH and DAY_SECOND are NOT compatible
public static DataType compatibleInterval(DataType left, DataType right) {
if (left == right) {
return left;
}
if (isYearMonthInterval(left) && isYearMonthInterval(right)) {
// no need to look at YEAR/YEAR or MONTH/MONTH as these are equal and already handled
return INTERVAL_YEAR_TO_MONTH;
}
if (isDayTimeInterval(left) && isDayTimeInterval(right)) {
int PREFIX = "INTERVAL_".length();
// to avoid specifying the combinations, extract the leading and trailing unit from the name
// D > H > S > M which is also the alphabetical order
String lName = left.typeName().toUpperCase(Locale.ROOT).substring(PREFIX);
String rName = right.typeName().toUpperCase(Locale.ROOT).substring(PREFIX);
char leading = lName.charAt(0);
if (rName.charAt(0) < leading) {
leading = rName.charAt(0);
}
// look at the trailing unit
if (lName.length() > 6) {
int indexOf = lName.indexOf("_TO_");
lName = lName.substring(indexOf + 4);
}
if (rName.length() > 6) {
int indexOf = rName.indexOf("_TO_");
rName = rName.substring(indexOf + 4);
}
char trailing = lName.charAt(0);
if (rName.charAt(0) > trailing) {
trailing = rName.charAt(0);
}
return fromTypeName("INTERVAL_" + intervalUnit(leading) + "_TO_" + intervalUnit(trailing));
}
return null;
}
private static String intervalUnit(char unitChar) {
return switch (unitChar) {
case 'D' -> "DAY";
case 'H' -> "HOUR";
case 'M' -> "MINUTE";
case 'S' -> "SECOND";
default -> throw new QlIllegalArgumentException("Unknown unit {}", unitChar);
};
}
//
// String parsers
//
// For consistency and validation, each pattern has its own parser
private static | TimeUnit |
java | elastic__elasticsearch | x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlPartialEnablementDfaOnlyIT.java | {
"start": 1257,
"end": 2935
} | class ____ extends MlSingleNodeTestCase {
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(
LocalStateMachineLearningDfaOnly.class,
DataStreamsPlugin.class,
ReindexPlugin.class,
IngestCommonPlugin.class,
MockPainlessScriptEngine.TestPlugin.class,
// ILM is required for .ml-state template index settings
IndexLifecycle.class,
// Needed for scaled_float and wildcard fields
MapperExtrasPlugin.class,
Wildcard.class
);
}
/**
* The objective here is to detect if one of these very basic actions relies on some other action that is not available.
* We don't expect them to return anything, but if they are unexpectedly calling an action that has been disabled then
* an exception will be thrown which will fail the test.
*/
public void testBasicInfoCalls() {
client().execute(MlInfoAction.INSTANCE, new MlInfoAction.Request()).actionGet();
client().execute(MlMemoryAction.INSTANCE, new MlMemoryAction.Request("*")).actionGet();
client().execute(GetDataFrameAnalyticsAction.INSTANCE, new GetDataFrameAnalyticsAction.Request("*")).actionGet();
client().execute(GetDataFrameAnalyticsStatsAction.INSTANCE, new GetDataFrameAnalyticsStatsAction.Request("*")).actionGet();
client().execute(GetTrainedModelsAction.INSTANCE, new GetTrainedModelsAction.Request("*")).actionGet();
client().execute(GetTrainedModelsStatsAction.INSTANCE, new GetTrainedModelsStatsAction.Request("*")).actionGet();
}
}
| MlPartialEnablementDfaOnlyIT |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/handler/predicate/VersionRoutePredicateFactoryIntegrationTests.java | {
"start": 5686,
"end": 6518
} | class ____ implements WebFluxConfigurer {
@Value("${test.uri}")
String uri;
@Bean
ApiVersionResolver customApiVersionResolver() {
return exchange -> exchange.getRequest().getQueryParams().getFirst("customApiVersionParam");
}
@Bean
public RouteLocator testRouteLocator(RouteLocatorBuilder builder) {
return builder.routes()
.route("version11plus_dsl",
r -> r.path("/anything/version11plus")
.and()
.version("1.1+")
.filters(f -> f.prefixPath("/httpbin").setResponseHeader("X-Matched-Version", "1.1+"))
.uri(uri))
.route("version20plus_dsl",
r -> r.path("/anything/version20plus")
.and()
.version("2.0+")
.filters(f -> f.prefixPath("/httpbin").setResponseHeader("X-Matched-Version", "2.0+"))
.uri(uri))
.build();
}
}
}
| TestConfig |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ConfigurationClassAndBeanMethodTests.java | {
"start": 6855,
"end": 7057
} | class ____ {
@Bean
String bean0() {
return "";
}
@Bean
String bean1(String text) {
return "";
}
@Bean
String bean2(String text, Integer num) {
return "";
}
}
static | Config1 |
java | apache__kafka | clients/src/main/java/org/apache/kafka/server/authorizer/AclCreateResult.java | {
"start": 933,
"end": 1473
} | class ____ {
public static final AclCreateResult SUCCESS = new AclCreateResult();
private final ApiException exception;
private AclCreateResult() {
this(null);
}
public AclCreateResult(ApiException exception) {
this.exception = exception;
}
/**
* Returns any exception during create. If exception is empty, the request has succeeded.
*/
public Optional<ApiException> exception() {
return exception == null ? Optional.empty() : Optional.of(exception);
}
}
| AclCreateResult |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/stereotype/DoubleScopedBeanTestCase.java | {
"start": 1052,
"end": 1139
} | interface ____ {
}
@SessionStereotype
@RequestStereotype
| RequestStereotype |
java | spring-projects__spring-framework | spring-core/src/testFixtures/java/org/springframework/core/testfixture/nullness/CustomNullableProcessor.java | {
"start": 689,
"end": 889
} | interface ____ {
@org.springframework.core.testfixture.nullness.custom.Nullable
String process(@org.springframework.core.testfixture.nullness.custom.Nullable String nullable);
}
| CustomNullableProcessor |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryAssignmentTest.java | {
"start": 8095,
"end": 8446
} | class ____ {
@Inject boolean myFoo;
void sin() {}
}
""")
.doTest();
}
@Test
public void inject_assignedElsewhereButOptional_noFinding() {
refactoringHelper
.addInputLines(
"Test.java",
"""
import com.google.inject.Inject;
| Test |
java | apache__camel | components/camel-leveldb/src/main/java/org/apache/camel/component/leveldb/serializer/jackson/HolderBodyMixin.java | {
"start": 1360,
"end": 1646
} | class ____ {
@JsonSerialize(using = BodySerializer.class)
@JsonDeserialize(using = BodyDeserializer.class)
private Object inBody;
@JsonSerialize(using = BodySerializer.class)
@JsonDeserialize(using = BodyDeserializer.class)
private Object outBody;
}
| HolderBodyMixin |
java | spring-projects__spring-framework | spring-core-test/src/main/java/org/springframework/aot/agent/InvocationsRecorderClassTransformer.java | {
"start": 1266,
"end": 3885
} | class ____ implements ClassFileTransformer {
private static final String AGENT_PACKAGE = InvocationsRecorderClassTransformer.class.getPackageName().replace('.', '/');
private static final String AOT_DYNAMIC_CLASSLOADER = "org/springframework/aot/test/generate/compile/DynamicClassLoader";
private final String[] instrumentedPackages;
private final String[] ignoredPackages;
public InvocationsRecorderClassTransformer(String[] instrumentedPackages, String[] ignoredPackages) {
Assert.notNull(instrumentedPackages, "instrumentedPackages must not be null");
Assert.notNull(ignoredPackages, "ignoredPackages must not be null");
this.instrumentedPackages = rewriteToAsmFormat(instrumentedPackages);
this.ignoredPackages = rewriteToAsmFormat(ignoredPackages);
}
private String[] rewriteToAsmFormat(String[] packages) {
return Arrays.stream(packages).map(pack -> pack.replace('.', '/'))
.toArray(String[]::new);
}
@Override
public byte[] transform(@Nullable ClassLoader classLoader, String className, Class<?> classBeingRedefined,
ProtectionDomain protectionDomain, byte[] classfileBuffer) throws IllegalClassFormatException {
if (isTransformationCandidate(classLoader, className)) {
return attemptClassTransformation(classfileBuffer);
}
return classfileBuffer;
}
private boolean isTransformationCandidate(@Nullable ClassLoader classLoader, String className) {
// Ignore system classes
if (classLoader == null) {
return false;
}
// Ignore agent classes and spring-core-test DynamicClassLoader
else if (className.startsWith(AGENT_PACKAGE) || className.equals(AOT_DYNAMIC_CLASSLOADER)) {
return false;
}
// Do not instrument CGlib classes
else if (className.contains("$$")) {
return false;
}
// Only some packages are instrumented
else {
for (String ignoredPackage : this.ignoredPackages) {
if (className.startsWith(ignoredPackage)) {
return false;
}
}
for (String instrumentedPackage : this.instrumentedPackages) {
if (className.startsWith(instrumentedPackage)) {
return true;
}
}
}
return false;
}
private byte[] attemptClassTransformation(byte[] classfileBuffer) {
ClassReader fileReader = new ClassReader(classfileBuffer);
InvocationsRecorderClassVisitor classVisitor = new InvocationsRecorderClassVisitor();
try {
fileReader.accept(classVisitor, 0);
}
catch (Exception ex) {
ex.printStackTrace();
return classfileBuffer;
}
if (classVisitor.isTransformed()) {
return classVisitor.getTransformedClassBuffer();
}
return classfileBuffer;
}
}
| InvocationsRecorderClassTransformer |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/context/support/WebApplicationObjectSupportTests.java | {
"start": 2269,
"end": 2344
} | class ____ extends WebApplicationObjectSupport {
}
}
| TestWebApplicationObject |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/id/generators/UnnamedGeneratorTests.java | {
"start": 4391,
"end": 4520
} | class ____ {
@Id
@GeneratedValue
private Integer id;
private String name;
}
@Entity(name="Entity2")
public static | Entity1 |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/annotations/Any.java | {
"start": 1445,
"end": 3797
} | class ____ {
* ...
* @Any
* @AnyKeyJavaClass(UUID.class) //the foreign key type
* @JoinColumn(name="payment_id") //the foreign key column
* @Column(name="payment_type") //the discriminator column
* @AnyDiscriminatorValue(discriminator="CASH", entity=CashPayment.class)
* @AnyDiscriminatorValue(discriminator="CREDIT", entity=CreditCardPayment.class)
* Payment payment;
* ...
* }
* </pre>
* <p>
* In this example, {@code Payment} is <em>not</em> be declared as an entity type, and
* is not annotated {@link jakarta.persistence.Entity @Entity}. It might even be an
* interface, or at most just a {@linkplain jakarta.persistence.MappedSuperclass mapped
* superclass}, of {@code CashPayment} and {@code CreditCardPayment}. So in terms of the
* object/relational mappings, {@code CashPayment} and {@code CreditCardPayment} would
* <em>not</em> be considered to participate in the same entity inheritance hierarchy.
* On the other hand, {@code CashPayment} and {@code CreditCardPayment} must have the
* same identifier type.
* <p>
* It's reasonable to think of the "foreign key" in an {@code Any} mapping is really a
* composite value made up of the foreign key and discriminator taken together. Note,
* however, that this composite foreign key is only conceptual and cannot be declared
* as a physical constraint on the relational database table.
* <ul>
* <li>{@link AnyDiscriminator}, {@link JdbcType}, or {@link JdbcTypeCode} specifies
* the type of the discriminator.
* <li>{@link AnyDiscriminatorValue} specifies how discriminator values map to entity
* types.
* <li>{@link jakarta.persistence.Column} or {@link Formula} specifies the column or
* formula in which the discriminator value is stored.
* <li>{@link AnyKeyJavaType}, {@link AnyKeyJavaClass}, {@link AnyKeyJdbcType}, or
* {@link AnyKeyJdbcTypeCode} specifies the type of the foreign key.
* <li>{@link jakarta.persistence.JoinColumn} specifies the foreign key column.
* </ul>
* <p>
* Of course, {@code Any} mappings are disfavored, except in extremely special cases,
* since it's much more difficult to enforce referential integrity at the database
* level.
*
* @see ManyToAny
*/
@Target({METHOD, FIELD})
@Retention(RUNTIME)
public @ | Order |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/issues/MulticastPipelineAggregateIssueTest.java | {
"start": 1049,
"end": 2653
} | class ____ extends ContextTestSupport {
@Test
public void testMulticastPipelineAggregateIssue() throws Exception {
getMockEndpoint("mock:a").expectedBodiesReceived(8);
getMockEndpoint("mock:b").expectedBodiesReceived(8);
getMockEndpoint("mock:c").expectedBodiesReceived(8);
template.sendBody("direct:a", 1);
template.sendBody("direct:b", 1);
template.sendBody("direct:c", 1);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:a").multicast(new SumAggregateBean()).pipeline().bean(IncreaseOne.class).bean(new IncreaseTwo())
.end().pipeline().bean(IncreaseOne.class)
.bean(new IncreaseTwo()).end().end().to("mock:a");
from("direct:b").multicast(new SumAggregateBean()).pipeline().transform(method(IncreaseOne.class))
.bean(new IncreaseTwo()).end().pipeline()
.transform(method(IncreaseOne.class)).bean(new IncreaseTwo()).end().end().to("mock:b");
from("direct:c").multicast(new SumAggregateBean()).pipeline().transform(method(IncreaseOne.class))
.transform(method(new IncreaseTwo())).end().pipeline()
.transform(method(IncreaseOne.class)).transform(method(new IncreaseTwo())).end().end().to("mock:c");
}
};
}
public static | MulticastPipelineAggregateIssueTest |
java | apache__camel | components/camel-hl7/src/main/java/org/apache/camel/component/hl7/HL7MLLPDecoder.java | {
"start": 1372,
"end": 7835
} | class ____ extends CumulativeProtocolDecoder {
private static final Logger LOG = LoggerFactory.getLogger(HL7MLLPDecoder.class);
private static final String DECODER_STATE = HL7MLLPDecoder.class.getName() + ".STATE";
private static final String CHARSET_DECODER = HL7MLLPDecoder.class.getName() + ".charsetdecoder";
private HL7MLLPConfig config;
HL7MLLPDecoder(HL7MLLPConfig config) {
this.config = config;
}
@Override
protected boolean doDecode(IoSession session, IoBuffer in, ProtocolDecoderOutput out) throws Exception {
// Get the state of the current message and
// Skip what we have already scanned before
DecoderState state = decoderState(session);
in.position(state.current());
LOG.debug("Received data, checking from position {} to {}", in.position(), in.limit());
boolean messageDecoded = false;
while (in.hasRemaining()) {
int previousPosition = in.position();
byte current = in.get();
// Check if we are at the end of an HL7 message
if (current == config.getEndByte2() && state.previous() == config.getEndByte1()) {
if (state.isStarted()) {
// Save the current buffer pointers and reset them to surround the identifier message
int currentPosition = in.position();
int currentLimit = in.limit();
LOG.debug("Message ends at position {} with length {}", previousPosition,
previousPosition - state.start() + 1);
in.position(state.start());
in.limit(currentPosition);
LOG.debug("Set start to position {} and limit to {}", in.position(), in.limit());
// Now create string or byte[] from this part of the buffer and restore the buffer pointers
try {
out.write(config.isProduceString()
? parseMessageToString(in.slice(), charsetDecoder(session))
: parseMessageToByteArray(in.slice()));
messageDecoded = true;
} finally {
LOG.debug("Resetting to position {} and limit to {}", currentPosition, currentLimit);
in.position(currentPosition);
in.limit(currentLimit);
state.reset();
}
} else {
LOG.warn("Ignoring message end at position {} until start byte has been seen.", previousPosition);
}
} else {
// Check if we are at the start of an HL7 message
if (current == config.getStartByte()) {
state.markStart(previousPosition);
} else {
// Remember previous byte in state object because the buffer could
// be theoretically exhausted right between the two end bytes
state.markPrevious(current);
}
messageDecoded = false;
}
}
if (!messageDecoded) {
// Could not find a complete message in the buffer.
// Reset to the initial position (just as nothing had been read yet)
// and return false so that this method is called again with more data.
LOG.debug("No complete message yet at position {}", in.position());
state.markCurrent(in.position());
in.position(0);
}
return messageDecoded;
}
// Make a defensive byte copy (the buffer will be reused)
// and omit the start and the two end bytes of the MLLP message
// returning a byte array
private Object parseMessageToByteArray(IoBuffer buf) {
int len = buf.limit() - 3;
LOG.debug("Making byte array of length {}", len);
byte[] dst = new byte[len];
buf.skip(1); // skip start byte
buf.get(dst, 0, len);
buf.skip(2); // skip end bytes
// Only do this if conversion is enabled
if (config.isConvertLFtoCR()) {
LOG.debug("Replacing LF by CR");
for (int i = 0; i < dst.length; i++) {
if (dst[i] == (byte) '\n') {
dst[i] = (byte) '\r';
}
}
}
return dst;
}
// Make a defensive byte copy (the buffer will be reused)
// and omit the start and the two end bytes of the MLLP message
// returning a String
private Object parseMessageToString(IoBuffer buf, CharsetDecoder decoder) throws CharacterCodingException {
int len = buf.limit() - 3;
LOG.debug("Making string of length {} using charset {}", len, decoder.charset());
buf.skip(1); // skip start byte
String message = buf.getString(len, decoder);
buf.skip(2); // skip end bytes
// Only do this if conversion is enabled
if (config.isConvertLFtoCR()) {
LOG.debug("Replacing LF by CR");
message = message.replace('\n', '\r');
}
return message;
}
@Override
public void dispose(IoSession session) throws Exception {
session.removeAttribute(DECODER_STATE);
session.removeAttribute(CHARSET_DECODER);
}
private CharsetDecoder charsetDecoder(IoSession session) {
synchronized (session) {
CharsetDecoder decoder = (CharsetDecoder) session.getAttribute(CHARSET_DECODER);
if (decoder == null) {
decoder = config.getCharset().newDecoder()
.onMalformedInput(config.getMalformedInputErrorAction())
.onUnmappableCharacter(config.getUnmappableCharacterErrorAction());
session.setAttribute(CHARSET_DECODER, decoder);
}
return decoder;
}
}
private DecoderState decoderState(IoSession session) {
synchronized (session) {
DecoderState decoderState = (DecoderState) session.getAttribute(DECODER_STATE);
if (decoderState == null) {
decoderState = new DecoderState();
session.setAttribute(DECODER_STATE, decoderState);
}
return decoderState;
}
}
/**
* Holds the state of the decoding process
*/
private static | HL7MLLPDecoder |
java | spring-projects__spring-security | webauthn/src/main/java/org/springframework/security/web/webauthn/jackson/AuthenticationExtensionsClientInputsSerializer.java | {
"start": 1216,
"end": 1795
} | class ____ extends StdSerializer<AuthenticationExtensionsClientInputs> {
/**
* Creates a new instance.
*/
AuthenticationExtensionsClientInputsSerializer() {
super(AuthenticationExtensionsClientInputs.class);
}
@Override
public void serialize(AuthenticationExtensionsClientInputs inputs, JsonGenerator jgen, SerializationContext ctxt)
throws JacksonException {
jgen.writeStartObject();
for (AuthenticationExtensionsClientInput input : inputs.getInputs()) {
jgen.writePOJO(input);
}
jgen.writeEndObject();
}
}
| AuthenticationExtensionsClientInputsSerializer |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/Assertions_assertThatNoException_Test.java | {
"start": 1034,
"end": 2459
} | class ____ {
@Test
void should_fail_when_asserting_no_exception_raised_but_exception_occurs() {
// GIVEN
Exception exception = new Exception("boom");
ThrowingCallable boom = raisingException(exception);
// WHEN
var assertionError = expectAssertionError(() -> assertThatNoException().isThrownBy(boom));
// THEN
then(assertionError).hasMessage(shouldNotHaveThrown(exception).create());
}
@Test
void can_use_description_in_error_message() {
// GIVEN
ThrowingCallable boom = raisingException(new Exception("boom"));
// WHEN
var assertionError = expectAssertionError(() -> assertThatNoException().as("Test").isThrownBy(boom));
// THEN
then(assertionError).hasMessageStartingWith("[Test]");
}
@Test
void error_message_contains_stacktrace() {
// GIVEN
ThrowingCallable boom = raisingException(new Exception("boom"));
// WHEN
var assertionError = expectAssertionError(() -> assertThatNoException().isThrownBy(boom));
// THEN
then(assertionError).hasMessageContainingAll("java.lang.Exception: boom",
"at org.assertj.tests.core/org.assertj.tests.core.api.Assertions_assertThatNoException_Test.error_message_contains_stacktrace");
}
private ThrowingCallable raisingException(Exception exception) {
return () -> {
throw exception;
};
}
}
| Assertions_assertThatNoException_Test |
java | apache__hadoop | hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/SimulatedDataNodes.java | {
"start": 3498,
"end": 7731
} | class ____ more detail.";
static void printUsageExit(String err) {
System.out.println(err);
System.out.println(USAGE);
throw new RuntimeException(err);
}
public static void main(String[] args) throws Exception {
SimulatedDataNodes datanodes = new SimulatedDataNodes();
ToolRunner.run(new HdfsConfiguration(), datanodes, args);
}
public int run(String[] args) throws Exception {
if (args.length < 2) {
printUsageExit("Not enough arguments");
}
String bpid = args[0];
List<Path> blockListFiles = new ArrayList<>();
for (int i = 1; i < args.length; i++) {
blockListFiles.add(new Path(args[i]));
}
URI defaultFS = FileSystem.getDefaultUri(getConf());
if (!HdfsConstants.HDFS_URI_SCHEME.equals(defaultFS.getScheme())) {
printUsageExit(
"Must specify an HDFS-based default FS! Got <" + defaultFS + ">");
}
String nameNodeAdr = defaultFS.getAuthority();
if (nameNodeAdr == null) {
printUsageExit("No NameNode address and port in config");
}
System.out.println("DataNodes will connect to NameNode at " + nameNodeAdr);
String loc = DataNode.getStorageLocations(getConf()).get(0).toString();
loc = loc.substring(loc.indexOf("]") + 1); // delete storage type
String path = new URI(loc).getPath();
System.setProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA, path);
SimulatedFSDataset.setFactory(getConf());
getConf().setLong(SimulatedFSDataset.CONFIG_PROPERTY_CAPACITY,
STORAGE_CAPACITY);
UserGroupInformation.setConfiguration(getConf());
MiniDFSCluster mc = new MiniDFSCluster();
try {
mc.formatDataNodeDirs();
} catch (IOException e) {
System.out.println("Error formatting DataNode dirs: " + e);
throw new RuntimeException("Error formatting DataNode dirs", e);
}
try {
System.out.println("Found " + blockListFiles.size()
+ " block listing files; launching DataNodes accordingly.");
mc.startDataNodes(getConf(), blockListFiles.size(), null, false,
StartupOption.REGULAR, null, null, null, null, false, true, true,
null, null, null);
long startTime = Time.monotonicNow();
System.out.println("Waiting for DataNodes to connect to NameNode and "
+ "init storage directories.");
Set<DataNode> datanodesWithoutFSDataset = new HashSet<>(
mc.getDataNodes());
while (!datanodesWithoutFSDataset.isEmpty()) {
datanodesWithoutFSDataset
.removeIf((dn) -> DataNodeTestUtils.getFSDataset(dn) != null);
Thread.sleep(100);
}
System.out.println("Waited " + (Time.monotonicNow() - startTime)
+ " ms for DataNode FSDatasets to be ready");
for (int dnIndex = 0; dnIndex < blockListFiles.size(); dnIndex++) {
Path blockListFile = blockListFiles.get(dnIndex);
try (FSDataInputStream fsdis =
blockListFile.getFileSystem(getConf()).open(blockListFile);
BufferedReader reader = new BufferedReader(
new InputStreamReader(fsdis, StandardCharsets.UTF_8))) {
List<Block> blockList = new ArrayList<>();
int cnt = 0;
for (String line = reader.readLine(); line != null; line = reader
.readLine()) {
// Format of the listing files is blockID,blockGenStamp,blockSize
String[] blockInfo = line.split(",");
blockList.add(new Block(Long.parseLong(blockInfo[0]),
Long.parseLong(blockInfo[2]), Long.parseLong(blockInfo[1])));
cnt++;
}
try {
mc.injectBlocks(dnIndex, blockList, bpid);
} catch (IOException ioe) {
System.out.printf("Error injecting blocks into DataNode %d for "
+ "block pool %s: %s%n", dnIndex, bpid,
ExceptionUtils.getStackTrace(ioe));
}
System.out.printf(
"Injected %d blocks into DataNode %d for block pool %s%n",
cnt, dnIndex, bpid);
}
}
} catch (IOException e) {
System.out.println(
"Error creating DataNodes: " + ExceptionUtils.getStackTrace(e));
return 1;
}
return 0;
}
}
| for |
java | netty__netty | microbench/src/main/java/io/netty/microbench/http/HttpFragmentedRequestDecoderBenchmark.java | {
"start": 2359,
"end": 5110
} | class ____ extends AbstractMicrobenchmark {
@Param({ "64", "128" })
public int headerFragmentBytes;
@Param({ "false", "true" })
public boolean direct;
@Param({ "false", "true" })
public boolean pooled;
@Param({ "true", "false"})
public boolean validateHeaders;
private EmbeddedChannel channel;
private ByteBuf[] fragmentedRequest;
private static ByteBuf[] stepsBuffers(ByteBufAllocator alloc, byte[] content, int fragmentSize, boolean direct) {
// allocate a single big buffer and just slice it
final int headerLength = content.length - CONTENT_LENGTH;
final ArrayList<ByteBuf> bufs = new ArrayList<ByteBuf>();
for (int a = 0; a < headerLength;) {
int amount = fragmentSize;
if (a + amount > headerLength) {
amount = headerLength - a;
}
final ByteBuf buf = direct? alloc.directBuffer(amount, amount) : alloc.heapBuffer(amount, amount);
buf.writeBytes(content, a, amount);
bufs.add(buf);
a += amount;
}
// don't split the content
// Should produce HttpContent
final ByteBuf buf = direct?
alloc.directBuffer(CONTENT_LENGTH, CONTENT_LENGTH) :
alloc.heapBuffer(CONTENT_LENGTH, CONTENT_LENGTH);
buf.writeBytes(content, content.length - CONTENT_LENGTH, CONTENT_LENGTH);
bufs.add(buf);
return bufs.toArray(new ByteBuf[0]);
}
@Setup
public void initPipeline() {
final ByteBufAllocator allocator = pooled? PooledByteBufAllocator.DEFAULT : UnpooledByteBufAllocator.DEFAULT;
fragmentedRequest = stepsBuffers(allocator, CONTENT_MIXED_DELIMITERS, headerFragmentBytes, direct);
channel = new EmbeddedChannel(
new HttpRequestDecoder(DEFAULT_MAX_INITIAL_LINE_LENGTH, DEFAULT_MAX_HEADER_SIZE, DEFAULT_MAX_CHUNK_SIZE,
validateHeaders, DEFAULT_INITIAL_BUFFER_SIZE));
}
@TearDown
public void releaseStepBuffers() {
for (ByteBuf buf : fragmentedRequest) {
buf.release();
}
}
@Benchmark
@CompilerControl(Mode.DONT_INLINE)
public void testDecodeWholeRequestInMultipleStepsMixedDelimiters() {
final EmbeddedChannel channel = this.channel;
for (ByteBuf buf : this.fragmentedRequest) {
buf.resetReaderIndex();
buf.retain();
channel.writeInbound(buf);
final Queue<Object> decoded = channel.inboundMessages();
Object o;
while ((o = decoded.poll()) != null) {
ReferenceCountUtil.release(o);
}
}
}
}
| HttpFragmentedRequestDecoderBenchmark |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/sortedset/Range.java | {
"start": 55,
"end": 1360
} | class ____<V> {
private final V min;
private final V max;
private final boolean inclusiveMin;
private final boolean inclusiveMax;
public static final Range<?> UNBOUNDED = new Range<>(null, null);
@SuppressWarnings("unchecked")
public static <V> Range<V> unbounded() {
return (Range<V>) UNBOUNDED;
}
public Range(V min, V max) {
this.min = min;
this.max = max;
this.inclusiveMin = true;
this.inclusiveMax = true;
}
public Range(V min, boolean inclusiveMin, V max, boolean inclusiveMax) {
this.min = min;
this.max = max;
this.inclusiveMin = inclusiveMin;
this.inclusiveMax = inclusiveMax;
}
public boolean isUnbounded() {
return this == UNBOUNDED;
}
public String getLowerBound() {
if (isUnbounded() || min == null || min.equals("-")) {
return "-";
}
if (!inclusiveMin) {
return "(" + min;
} else {
return "[" + min;
}
}
public String getUpperBound() {
if (isUnbounded() || max == null || max.equals("+")) {
return "+";
}
if (!inclusiveMax) {
return "(" + max;
} else {
return "[" + max;
}
}
}
| Range |
java | mockito__mockito | mockito-integration-tests/extensions-tests/src/test/java/org/mockitousage/plugins/donotmockenforcer/DoNotmockEnforcerTest.java | {
"start": 3440,
"end": 3518
} | class ____ extends StaticallyNotMockable {}
static | StaticallyNotMockableChild |
java | elastic__elasticsearch | modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java | {
"start": 42885,
"end": 49807
} | class ____ implements HttpHandler {
@Override
public void handle(HttpExchange exchange) throws IOException {
numberOfReads.incrementAndGet();
exchange.sendResponseHeaders(HttpStatus.SC_NOT_FOUND, -1);
exchange.close();
}
}
httpServer.createContext(downloadStorageEndpoint(blobContainer, "read_blob_not_found"), new NotFoundReadHandler());
expectThrows(NoSuchFileException.class, () -> {
try (
InputStream inputStream = randomBoolean()
? blobContainer.readBlob(randomRetryingPurpose(), "read_blob_not_found")
: blobContainer.readBlob(randomRetryingPurpose(), "read_blob_not_found", between(0, 100), between(1, 100))
) {
Streams.readFully(inputStream);
}
});
assertThat(numberOfReads.get(), equalTo(1));
assertThat(getRetryStartedMeasurements(), empty());
assertThat(getRetryCompletedMeasurements(), empty());
assertThat(getRetryHistogramMeasurements(), empty());
}
public void testSnapshotDeletesRetryOnThrottlingError() throws IOException {
final BlobContainer blobContainer = blobContainerBuilder()
// disable AWS-client retries
.maxRetries(0)
.disableChunkedEncoding(true)
.build();
int numBlobsToDelete = randomIntBetween(500, 3000);
List<String> blobsToDelete = new ArrayList<>();
for (int i = 0; i < numBlobsToDelete; i++) {
blobsToDelete.add(randomIdentifier());
}
int throttleTimesBeforeSuccess = randomIntBetween(1, MAX_NUMBER_SNAPSHOT_DELETE_RETRIES);
logger.info("--> Throttling {} times before success", throttleTimesBeforeSuccess);
ThrottlingDeleteHandler handler = new ThrottlingDeleteHandler(throttleTimesBeforeSuccess, attempt -> {});
httpServer.createContext("/", handler);
blobContainer.deleteBlobsIgnoringIfNotExists(randomFrom(operationPurposesThatRetryOnDelete()), blobsToDelete.iterator());
int expectedNumberOfBatches = expectedNumberOfBatches(numBlobsToDelete);
assertThat(handler.numberOfDeleteAttempts.get(), equalTo(throttleTimesBeforeSuccess + expectedNumberOfBatches));
assertThat(handler.numberOfSuccessfulDeletes.get(), equalTo(expectedNumberOfBatches));
}
public void testSnapshotDeletesAbortRetriesWhenThreadIsInterrupted() {
final BlobContainer blobContainer = blobContainerBuilder()
// disable AWS-client retries
.maxRetries(0)
.disableChunkedEncoding(true)
.build();
int numBlobsToDelete = randomIntBetween(500, 3000);
List<String> blobsToDelete = new ArrayList<>();
for (int i = 0; i < numBlobsToDelete; i++) {
blobsToDelete.add(randomIdentifier());
}
final Thread clientThread = Thread.currentThread();
int interruptBeforeAttempt = randomIntBetween(0, randomIntBetween(1, 10));
logger.info("--> Deleting {} blobs, interrupting before attempt {}", numBlobsToDelete, interruptBeforeAttempt);
ThrottlingDeleteHandler handler = new ThrottlingDeleteHandler(Integer.MAX_VALUE, attempt -> {
if (attempt == interruptBeforeAttempt) {
clientThread.interrupt();
}
});
httpServer.createContext("/", handler);
try {
IOException exception = assertThrows(
IOException.class,
() -> blobContainer.deleteBlobsIgnoringIfNotExists(
randomFrom(operationPurposesThatRetryOnDelete()),
blobsToDelete.iterator()
)
);
assertThat(exception.getCause(), instanceOf(SdkException.class));
assertThat(handler.numberOfDeleteAttempts.get(), equalTo(interruptBeforeAttempt + 1));
assertThat(handler.numberOfSuccessfulDeletes.get(), equalTo(0));
} finally {
// interrupt should be preserved, clear it to prevent it leaking between tests
assertTrue(Thread.interrupted());
}
}
public void testNonSnapshotDeletesAreNotRetried() {
final BlobContainer blobContainer = blobContainerBuilder()
// disable AWS-client retries
.maxRetries(0)
.disableChunkedEncoding(true)
.build();
int numBlobsToDelete = randomIntBetween(500, 3000);
List<String> blobsToDelete = new ArrayList<>();
for (int i = 0; i < numBlobsToDelete; i++) {
blobsToDelete.add(randomIdentifier());
}
ThrottlingDeleteHandler handler = new ThrottlingDeleteHandler(Integer.MAX_VALUE, attempt -> {});
httpServer.createContext("/", handler);
IOException exception = assertThrows(
IOException.class,
() -> blobContainer.deleteBlobsIgnoringIfNotExists(
randomValueOtherThanMany(
op -> operationPurposesThatRetryOnDelete().contains(op),
() -> randomFrom(OperationPurpose.values())
),
blobsToDelete.iterator()
)
);
assertEquals(
ThrottlingDeleteHandler.THROTTLING_ERROR_CODE,
asInstanceOf(S3Exception.class, exception.getCause()).awsErrorDetails().errorCode()
);
assertThat(handler.numberOfDeleteAttempts.get(), equalTo(expectedNumberOfBatches(numBlobsToDelete)));
assertThat(handler.numberOfSuccessfulDeletes.get(), equalTo(0));
}
public void testNonThrottlingErrorsAreNotRetried() {
final BlobContainer blobContainer = blobContainerBuilder()
// disable AWS-client retries
.maxRetries(0)
.disableChunkedEncoding(true)
.build();
int numBlobsToDelete = randomIntBetween(500, 3000);
List<String> blobsToDelete = new ArrayList<>();
for (int i = 0; i < numBlobsToDelete; i++) {
blobsToDelete.add(randomIdentifier());
}
ThrottlingDeleteHandler handler = new ThrottlingDeleteHandler(Integer.MAX_VALUE, attempt -> {}, "NotThrottling");
httpServer.createContext("/", handler);
assertThrows(
IOException.class,
() -> blobContainer.deleteBlobsIgnoringIfNotExists(randomFrom(operationPurposesThatRetryOnDelete()), blobsToDelete.iterator())
);
assertThat(handler.numberOfDeleteAttempts.get(), equalTo(expectedNumberOfBatches(numBlobsToDelete)));
assertThat(handler.numberOfSuccessfulDeletes.get(), equalTo(0));
}
private int expectedNumberOfBatches(int blobsToDelete) {
return (blobsToDelete / 1_000) + (blobsToDelete % 1_000 == 0 ? 0 : 1);
}
@SuppressForbidden(reason = "use a http server")
private static | NotFoundReadHandler |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/sample/SampleEvaluationContextExtension.java | {
"start": 922,
"end": 1291
} | class ____ implements EvaluationContextExtension {
@Override
public String getExtensionId() {
return "security";
}
@Override
public Map<String, Object> getProperties() {
return Collections.singletonMap("principal", SampleSecurityContextHolder.getCurrent().getPrincipal());
}
/**
* @author Thomas Darimont
*/
public static | SampleEvaluationContextExtension |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ThreadPriorityCheckTest.java | {
"start": 981,
"end": 1291
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(ThreadPriorityCheck.class, getClass());
@Test
public void yieldThread() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| ThreadPriorityCheckTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/App.java | {
"start": 1037,
"end": 1382
} | class ____ {
final AppContext context;
private Job job;
private Task task;
@Inject
public App(AppContext ctx) {
context = ctx;
}
void setJob(Job job) {
this.job = job;
}
public Job getJob() {
return job;
}
void setTask(Task task) {
this.task = task;
}
public Task getTask() {
return task;
}
}
| App |
java | grpc__grpc-java | core/src/test/java/io/grpc/internal/MaxConnectionIdleManagerTest.java | {
"start": 1013,
"end": 3013
} | class ____ {
@Rule public final MockitoRule mocks = MockitoJUnit.rule();
private final FakeClock fakeClock = new FakeClock();
private final MaxConnectionIdleManager.Ticker ticker = new MaxConnectionIdleManager.Ticker() {
@Override
public long nanoTime() {
return fakeClock.getTicker().read();
}
};
@Mock
private Runnable closure;
@Test
public void maxIdleReached() {
MaxConnectionIdleManager maxConnectionIdleManager =
new MaxConnectionIdleManager(123L, ticker);
maxConnectionIdleManager.start(closure, fakeClock.getScheduledExecutorService());
maxConnectionIdleManager.onTransportIdle();
fakeClock.forwardNanos(123L);
verify(closure).run();
}
@Test
public void maxIdleNotReachedAndReached() {
MaxConnectionIdleManager maxConnectionIdleManager =
new MaxConnectionIdleManager(123L, ticker);
maxConnectionIdleManager.start(closure, fakeClock.getScheduledExecutorService());
maxConnectionIdleManager.onTransportIdle();
fakeClock.forwardNanos(100L);
// max idle not reached
maxConnectionIdleManager.onTransportActive();
maxConnectionIdleManager.onTransportIdle();
fakeClock.forwardNanos(100L);
// max idle not reached although accumulative idle time exceeds max idle time
maxConnectionIdleManager.onTransportActive();
fakeClock.forwardNanos(100L);
verify(closure, never()).run();
// max idle reached
maxConnectionIdleManager.onTransportIdle();
fakeClock.forwardNanos(123L);
verify(closure).run();
}
@Test
public void shutdownThenMaxIdleReached() {
MaxConnectionIdleManager maxConnectionIdleManager =
new MaxConnectionIdleManager(123L, ticker);
maxConnectionIdleManager.start(closure, fakeClock.getScheduledExecutorService());
maxConnectionIdleManager.onTransportIdle();
maxConnectionIdleManager.onTransportTermination();
fakeClock.forwardNanos(123L);
verify(closure, never()).run();
}
}
| MaxConnectionIdleManagerTest |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-kafka/src/main/java/smoketest/kafka/SampleMessage.java | {
"start": 767,
"end": 1235
} | class ____ {
private final Integer id;
private final String message;
@JsonCreator
public SampleMessage(@JsonProperty("id") Integer id, @JsonProperty("message") String message) {
this.id = id;
this.message = message;
}
public Integer getId() {
return this.id;
}
public String getMessage() {
return this.message;
}
@Override
public String toString() {
return "SampleMessage{id=" + this.id + ", message='" + this.message + "'}";
}
}
| SampleMessage |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/builder/CompareToBuilderTest.java | {
"start": 2892,
"end": 47086
} | class ____ extends TestObject {
@SuppressWarnings("unused")
private final transient int t;
TestTransientSubObject(final int a, final int t) {
super(a);
this.t = t;
}
}
/**
* See "Effective Java" under "Consider Implementing Comparable".
*
* @param x an object to compare
* @param y an object to compare
* @param z an object to compare
* @param testTransients Whether to include transients in the comparison
* @param excludeFields fields to exclude
*/
private void assertReflectionCompareContract(final Object x, final Object y, final Object z, final boolean testTransients, final String[] excludeFields) {
// signum
assertEquals(reflectionCompareSignum(x, y, testTransients, excludeFields), -reflectionCompareSignum(y, x, testTransients, excludeFields));
// transitive
if (CompareToBuilder.reflectionCompare(x, y, testTransients, null, excludeFields) > 0
&& CompareToBuilder.reflectionCompare(y, z, testTransients, null, excludeFields) > 0) {
assertTrue(CompareToBuilder.reflectionCompare(x, z, testTransients, null, excludeFields) > 0);
}
// un-named
if (CompareToBuilder.reflectionCompare(x, y, testTransients, null, excludeFields) == 0) {
assertEquals(reflectionCompareSignum(x, z, testTransients, excludeFields), -reflectionCompareSignum(y, z, testTransients, excludeFields));
}
// strongly recommended but not strictly required
assertTrue(CompareToBuilder.reflectionCompare(x, y, testTransients) == 0 == EqualsBuilder.reflectionEquals(x, y, testTransients));
}
private void assertXYZCompareOrder(final Object x, final Object y, final Object z, final boolean testTransients, final String[] excludeFields) {
assertEquals(0, CompareToBuilder.reflectionCompare(x, x, testTransients, null, excludeFields));
assertEquals(0, CompareToBuilder.reflectionCompare(y, y, testTransients, null, excludeFields));
assertEquals(0, CompareToBuilder.reflectionCompare(z, z, testTransients, null, excludeFields));
assertTrue(0 > CompareToBuilder.reflectionCompare(x, y, testTransients, null, excludeFields));
assertTrue(0 > CompareToBuilder.reflectionCompare(x, z, testTransients, null, excludeFields));
assertTrue(0 > CompareToBuilder.reflectionCompare(y, z, testTransients, null, excludeFields));
assertTrue(0 < CompareToBuilder.reflectionCompare(y, x, testTransients, null, excludeFields));
assertTrue(0 < CompareToBuilder.reflectionCompare(z, x, testTransients, null, excludeFields));
assertTrue(0 < CompareToBuilder.reflectionCompare(z, y, testTransients, null, excludeFields));
}
/**
* Returns the signum of the result of comparing x and y with
* {@code CompareToBuilder.reflectionCompare}
*
* @param lhs The "left-hand-side" of the comparison.
* @param rhs The "right-hand-side" of the comparison.
* @param testTransients Whether to include transients in the comparison
* @param excludeFields fields to exclude
* @return int The signum
*/
private int reflectionCompareSignum(final Object lhs, final Object rhs, final boolean testTransients, final String[] excludeFields) {
return BigInteger.valueOf(CompareToBuilder.reflectionCompare(lhs, rhs, testTransients)).signum();
}
@Test
void testAppendSuper() {
final TestObject o1 = new TestObject(4);
final TestObject o2 = new TestObject(5);
assertEquals(0, new CompareToBuilder().appendSuper(0).append(o1, o1).toComparison());
assertTrue(new CompareToBuilder().appendSuper(0).append(o1, o2).toComparison() < 0);
assertTrue(new CompareToBuilder().appendSuper(0).append(o2, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().appendSuper(-1).append(o1, o1).toComparison() < 0);
assertTrue(new CompareToBuilder().appendSuper(-1).append(o1, o2).toComparison() < 0);
assertTrue(new CompareToBuilder().appendSuper(1).append(o1, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().appendSuper(1).append(o1, o2).toComparison() > 0);
}
@Test
void testBoolean() {
final boolean o1 = true;
final boolean o2 = false;
assertEquals(0, new CompareToBuilder().append(o1, o1).toComparison());
assertEquals(0, new CompareToBuilder().append(o2, o2).toComparison());
assertTrue(new CompareToBuilder().append(o1, o2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o2, o1).toComparison() < 0);
}
@Test
void testBooleanArray() {
final boolean[] obj1 = new boolean[2];
obj1[0] = true;
obj1[1] = false;
final boolean[] obj2 = new boolean[2];
obj2[0] = true;
obj2[1] = false;
final boolean[] obj3 = new boolean[3];
obj3[0] = true;
obj3[1] = false;
obj3[2] = true;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
obj1[1] = true;
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj1, null).toComparison() > 0);
assertEquals(0, new CompareToBuilder().append((boolean[]) null, null).toComparison());
assertTrue(new CompareToBuilder().append(null, obj1).toComparison() < 0);
}
@Test
void testBooleanArrayHiddenByObject() {
final boolean[] array1 = new boolean[2];
array1[0] = true;
array1[1] = false;
final boolean[] array2 = new boolean[2];
array2[0] = true;
array2[1] = false;
final boolean[] array3 = new boolean[3];
array3[0] = true;
array3[1] = false;
array3[2] = true;
final Object obj1 = array1;
final Object obj2 = array2;
final Object obj3 = array3;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
array1[1] = true;
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
}
@Test
void testByte() {
final byte o1 = 1;
final byte o2 = 2;
assertEquals(0, new CompareToBuilder().append(o1, o1).toComparison());
assertTrue(new CompareToBuilder().append(o1, o2).toComparison() < 0);
assertTrue(new CompareToBuilder().append(o2, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, Byte.MAX_VALUE).toComparison() < 0);
assertTrue(new CompareToBuilder().append(Byte.MAX_VALUE, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, Byte.MIN_VALUE).toComparison() > 0);
assertTrue(new CompareToBuilder().append(Byte.MIN_VALUE, o1).toComparison() < 0);
}
@Test
void testByteArray() {
final byte[] obj1 = new byte[2];
obj1[0] = 5;
obj1[1] = 6;
final byte[] obj2 = new byte[2];
obj2[0] = 5;
obj2[1] = 6;
final byte[] obj3 = new byte[3];
obj3[0] = 5;
obj3[1] = 6;
obj3[2] = 7;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
obj1[1] = 7;
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj1, null).toComparison() > 0);
assertEquals(0, new CompareToBuilder().append((byte[]) null, null).toComparison());
assertTrue(new CompareToBuilder().append(null, obj1).toComparison() < 0);
}
@Test
void testByteArrayHiddenByObject() {
final byte[] array1 = new byte[2];
array1[0] = 5;
array1[1] = 6;
final byte[] array2 = new byte[2];
array2[0] = 5;
array2[1] = 6;
final byte[] array3 = new byte[3];
array3[0] = 5;
array3[1] = 6;
array3[2] = 7;
final Object obj1 = array1;
final Object obj2 = array2;
final Object obj3 = array3;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
array1[1] = 7;
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
}
@Test
void testChar() {
final char o1 = 1;
final char o2 = 2;
assertEquals(0, new CompareToBuilder().append(o1, o1).toComparison());
assertTrue(new CompareToBuilder().append(o1, o2).toComparison() < 0);
assertTrue(new CompareToBuilder().append(o2, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, Character.MAX_VALUE).toComparison() < 0);
assertTrue(new CompareToBuilder().append(Character.MAX_VALUE, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, Character.MIN_VALUE).toComparison() > 0);
assertTrue(new CompareToBuilder().append(Character.MIN_VALUE, o1).toComparison() < 0);
}
@Test
void testCharArray() {
final char[] obj1 = new char[2];
obj1[0] = 5;
obj1[1] = 6;
final char[] obj2 = new char[2];
obj2[0] = 5;
obj2[1] = 6;
final char[] obj3 = new char[3];
obj3[0] = 5;
obj3[1] = 6;
obj3[2] = 7;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
obj1[1] = 7;
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj1, null).toComparison() > 0);
assertEquals(0, new CompareToBuilder().append((char[]) null, null).toComparison());
assertTrue(new CompareToBuilder().append(null, obj1).toComparison() < 0);
}
@Test
void testCharArrayHiddenByObject() {
final char[] array1 = new char[2];
array1[0] = 5;
array1[1] = 6;
final char[] array2 = new char[2];
array2[0] = 5;
array2[1] = 6;
final char[] array3 = new char[3];
array3[0] = 5;
array3[1] = 6;
array3[2] = 7;
final Object obj1 = array1;
final Object obj2 = array2;
final Object obj3 = array3;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
array1[1] = 7;
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
}
@Test
void testDouble() {
final double o1 = 1;
final double o2 = 2;
assertEquals(0, new CompareToBuilder().append(o1, o1).toComparison());
assertTrue(new CompareToBuilder().append(o1, o2).toComparison() < 0);
assertTrue(new CompareToBuilder().append(o2, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, Double.MAX_VALUE).toComparison() < 0);
assertTrue(new CompareToBuilder().append(Double.MAX_VALUE, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, Double.MIN_VALUE).toComparison() > 0);
assertTrue(new CompareToBuilder().append(Double.MIN_VALUE, o1).toComparison() < 0);
assertEquals(0, new CompareToBuilder().append(Double.NaN, Double.NaN).toComparison());
assertTrue(new CompareToBuilder().append(Double.NaN, Double.MAX_VALUE).toComparison() > 0);
assertTrue(new CompareToBuilder().append(Double.POSITIVE_INFINITY, Double.MAX_VALUE).toComparison() > 0);
assertTrue(new CompareToBuilder().append(Double.NEGATIVE_INFINITY, Double.MIN_VALUE).toComparison() < 0);
assertTrue(new CompareToBuilder().append(o1, Double.NaN).toComparison() < 0);
assertTrue(new CompareToBuilder().append(Double.NaN, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(-0.0, 0.0).toComparison() < 0);
assertTrue(new CompareToBuilder().append(0.0, -0.0).toComparison() > 0);
}
@Test
void testDoubleArray() {
final double[] obj1 = new double[2];
obj1[0] = 5;
obj1[1] = 6;
final double[] obj2 = new double[2];
obj2[0] = 5;
obj2[1] = 6;
final double[] obj3 = new double[3];
obj3[0] = 5;
obj3[1] = 6;
obj3[2] = 7;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
obj1[1] = 7;
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj1, null).toComparison() > 0);
assertEquals(0, new CompareToBuilder().append((double[]) null, null).toComparison());
assertTrue(new CompareToBuilder().append(null, obj1).toComparison() < 0);
}
@Test
void testDoubleArrayHiddenByObject() {
final double[] array1 = new double[2];
array1[0] = 5;
array1[1] = 6;
final double[] array2 = new double[2];
array2[0] = 5;
array2[1] = 6;
final double[] array3 = new double[3];
array3[0] = 5;
array3[1] = 6;
array3[2] = 7;
final Object obj1 = array1;
final Object obj2 = array2;
final Object obj3 = array3;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
array1[1] = 7;
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
}
@Test
void testFloat() {
final float o1 = 1;
final float o2 = 2;
assertEquals(0, new CompareToBuilder().append(o1, o1).toComparison());
assertTrue(new CompareToBuilder().append(o1, o2).toComparison() < 0);
assertTrue(new CompareToBuilder().append(o2, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, Float.MAX_VALUE).toComparison() < 0);
assertTrue(new CompareToBuilder().append(Float.MAX_VALUE, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, Float.MIN_VALUE).toComparison() > 0);
assertTrue(new CompareToBuilder().append(Float.MIN_VALUE, o1).toComparison() < 0);
assertEquals(0, new CompareToBuilder().append(Float.NaN, Float.NaN).toComparison());
assertTrue(new CompareToBuilder().append(Float.NaN, Float.MAX_VALUE).toComparison() > 0);
assertTrue(new CompareToBuilder().append(Float.POSITIVE_INFINITY, Float.MAX_VALUE).toComparison() > 0);
assertTrue(new CompareToBuilder().append(Float.NEGATIVE_INFINITY, Float.MIN_VALUE).toComparison() < 0);
assertTrue(new CompareToBuilder().append(o1, Float.NaN).toComparison() < 0);
assertTrue(new CompareToBuilder().append(Float.NaN, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(-0.0, 0.0).toComparison() < 0);
assertTrue(new CompareToBuilder().append(0.0, -0.0).toComparison() > 0);
}
@Test
void testFloatArray() {
final float[] obj1 = new float[2];
obj1[0] = 5;
obj1[1] = 6;
final float[] obj2 = new float[2];
obj2[0] = 5;
obj2[1] = 6;
final float[] obj3 = new float[3];
obj3[0] = 5;
obj3[1] = 6;
obj3[2] = 7;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
obj1[1] = 7;
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj1, null).toComparison() > 0);
assertEquals(0, new CompareToBuilder().append((float[]) null, null).toComparison());
assertTrue(new CompareToBuilder().append(null, obj1).toComparison() < 0);
}
@Test
void testFloatArrayHiddenByObject() {
final float[] array1 = new float[2];
array1[0] = 5;
array1[1] = 6;
final float[] array2 = new float[2];
array2[0] = 5;
array2[1] = 6;
final float[] array3 = new float[3];
array3[0] = 5;
array3[1] = 6;
array3[2] = 7;
final Object obj1 = array1;
final Object obj2 = array2;
final Object obj3 = array3;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
array1[1] = 7;
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
}
@Test
void testInt() {
final int o1 = 1;
final int o2 = 2;
assertEquals(0, new CompareToBuilder().append(o1, o1).toComparison());
assertTrue(new CompareToBuilder().append(o1, o2).toComparison() < 0);
assertTrue(new CompareToBuilder().append(o2, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, Integer.MAX_VALUE).toComparison() < 0);
assertTrue(new CompareToBuilder().append(Integer.MAX_VALUE, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, Integer.MIN_VALUE).toComparison() > 0);
assertTrue(new CompareToBuilder().append(Integer.MIN_VALUE, o1).toComparison() < 0);
}
@Test
void testIntArray() {
final int[] obj1 = new int[2];
obj1[0] = 5;
obj1[1] = 6;
final int[] obj2 = new int[2];
obj2[0] = 5;
obj2[1] = 6;
final int[] obj3 = new int[3];
obj3[0] = 5;
obj3[1] = 6;
obj3[2] = 7;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
obj1[1] = 7;
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj1, null).toComparison() > 0);
assertEquals(0, new CompareToBuilder().append((int[]) null, null).toComparison());
assertTrue(new CompareToBuilder().append(null, obj1).toComparison() < 0);
}
@Test
void testIntArrayHiddenByObject() {
final int[] array1 = new int[2];
array1[0] = 5;
array1[1] = 6;
final int[] array2 = new int[2];
array2[0] = 5;
array2[1] = 6;
final int[] array3 = new int[3];
array3[0] = 5;
array3[1] = 6;
array3[2] = 7;
final Object obj1 = array1;
final Object obj2 = array2;
final Object obj3 = array3;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
array1[1] = 7;
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
}
@Test
void testLong() {
final long o1 = 1L;
final long o2 = 2L;
assertEquals(0, new CompareToBuilder().append(o1, o1).toComparison());
assertTrue(new CompareToBuilder().append(o1, o2).toComparison() < 0);
assertTrue(new CompareToBuilder().append(o2, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, Long.MAX_VALUE).toComparison() < 0);
assertTrue(new CompareToBuilder().append(Long.MAX_VALUE, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, Long.MIN_VALUE).toComparison() > 0);
assertTrue(new CompareToBuilder().append(Long.MIN_VALUE, o1).toComparison() < 0);
}
@Test
void testLongArray() {
final long[] obj1 = new long[2];
obj1[0] = 5L;
obj1[1] = 6L;
final long[] obj2 = new long[2];
obj2[0] = 5L;
obj2[1] = 6L;
final long[] obj3 = new long[3];
obj3[0] = 5L;
obj3[1] = 6L;
obj3[2] = 7L;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
obj1[1] = 7;
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj1, null).toComparison() > 0);
assertEquals(0, new CompareToBuilder().append((long[]) null, null).toComparison());
assertTrue(new CompareToBuilder().append(null, obj1).toComparison() < 0);
}
@Test
void testLongArrayHiddenByObject() {
final long[] array1 = new long[2];
array1[0] = 5L;
array1[1] = 6L;
final long[] array2 = new long[2];
array2[0] = 5L;
array2[1] = 6L;
final long[] array3 = new long[3];
array3[0] = 5L;
array3[1] = 6L;
array3[2] = 7L;
final Object obj1 = array1;
final Object obj2 = array2;
final Object obj3 = array3;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
array1[1] = 7;
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
}
@Test
void testMixedArray() {
final Object[] array1 = new Object[2];
final Object[] array2 = new Object[2];
final Object[] array3 = new Object[2];
for (int i = 0; i < array1.length; ++i) {
array1[i] = new long[2];
array2[i] = new long[2];
array3[i] = new long[3];
for (int j = 0; j < 2; ++j) {
((long[]) array1[i])[j] = (i + 1) * (j + 1);
((long[]) array2[i])[j] = (i + 1) * (j + 1);
((long[]) array3[i])[j] = (i + 1) * (j + 1);
}
}
((long[]) array3[0])[2] = 1;
((long[]) array3[1])[2] = 1;
assertEquals(0, new CompareToBuilder().append(array1, array1).toComparison());
assertEquals(0, new CompareToBuilder().append(array1, array2).toComparison());
assertTrue(new CompareToBuilder().append(array1, array3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(array3, array1).toComparison() > 0);
((long[]) array1[1])[1] = 200;
assertTrue(new CompareToBuilder().append(array1, array2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(array2, array1).toComparison() < 0);
}
@Test
void testMultiBooleanArray() {
final boolean[][] array1 = new boolean[2][2];
final boolean[][] array2 = new boolean[2][2];
final boolean[][] array3 = new boolean[2][3];
for (int i = 0; i < array1.length; ++i) {
for (int j = 0; j < array1[0].length; j++) {
array1[i][j] = i == 1 ^ j == 1;
array2[i][j] = i == 1 ^ j == 1;
array3[i][j] = i == 1 ^ j == 1;
}
}
array3[1][2] = false;
array3[1][2] = false;
assertEquals(0, new CompareToBuilder().append(array1, array1).toComparison());
assertEquals(0, new CompareToBuilder().append(array1, array2).toComparison());
assertTrue(new CompareToBuilder().append(array1, array3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(array3, array1).toComparison() > 0);
array1[1][1] = true;
assertTrue(new CompareToBuilder().append(array1, array2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(array2, array1).toComparison() < 0);
}
@Test
void testMultiByteArray() {
final byte[][] array1 = new byte[2][2];
final byte[][] array2 = new byte[2][2];
final byte[][] array3 = new byte[2][3];
for (byte i = 0; i < array1.length; ++i) {
for (byte j = 0; j < array1[0].length; j++) {
array1[i][j] = (byte) ((i + 1) * (j + 1));
array2[i][j] = (byte) ((i + 1) * (j + 1));
array3[i][j] = (byte) ((i + 1) * (j + 1));
}
}
array3[1][2] = 100;
array3[1][2] = 100;
assertEquals(0, new CompareToBuilder().append(array1, array1).toComparison());
assertEquals(0, new CompareToBuilder().append(array1, array2).toComparison());
assertTrue(new CompareToBuilder().append(array1, array3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(array3, array1).toComparison() > 0);
array1[1][1] = 127;
assertTrue(new CompareToBuilder().append(array1, array2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(array2, array1).toComparison() < 0);
}
@Test
void testMultiCharArray() {
final char[][] array1 = new char[2][2];
final char[][] array2 = new char[2][2];
final char[][] array3 = new char[2][3];
for (short i = 0; i < array1.length; ++i) {
for (short j = 0; j < array1[0].length; j++) {
array1[i][j] = (char) ((i + 1) * (j + 1));
array2[i][j] = (char) ((i + 1) * (j + 1));
array3[i][j] = (char) ((i + 1) * (j + 1));
}
}
array3[1][2] = 100;
array3[1][2] = 100;
assertEquals(0, new CompareToBuilder().append(array1, array1).toComparison());
assertEquals(0, new CompareToBuilder().append(array1, array2).toComparison());
assertTrue(new CompareToBuilder().append(array1, array3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(array3, array1).toComparison() > 0);
array1[1][1] = 200;
assertTrue(new CompareToBuilder().append(array1, array2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(array2, array1).toComparison() < 0);
}
@Test
void testMultiDoubleArray() {
final double[][] array1 = new double[2][2];
final double[][] array2 = new double[2][2];
final double[][] array3 = new double[2][3];
for (int i = 0; i < array1.length; ++i) {
for (int j = 0; j < array1[0].length; j++) {
array1[i][j] = (i + 1) * (j + 1);
array2[i][j] = (i + 1) * (j + 1);
array3[i][j] = (i + 1) * (j + 1);
}
}
array3[1][2] = 100;
array3[1][2] = 100;
assertEquals(0, new CompareToBuilder().append(array1, array1).toComparison());
assertEquals(0, new CompareToBuilder().append(array1, array2).toComparison());
assertTrue(new CompareToBuilder().append(array1, array3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(array3, array1).toComparison() > 0);
array1[1][1] = 127;
assertTrue(new CompareToBuilder().append(array1, array2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(array2, array1).toComparison() < 0);
}
@Test
void testMultiFloatArray() {
final float[][] array1 = new float[2][2];
final float[][] array2 = new float[2][2];
final float[][] array3 = new float[2][3];
for (int i = 0; i < array1.length; ++i) {
for (int j = 0; j < array1[0].length; j++) {
array1[i][j] = (i + 1) * (j + 1);
array2[i][j] = (i + 1) * (j + 1);
array3[i][j] = (i + 1) * (j + 1);
}
}
array3[1][2] = 100;
array3[1][2] = 100;
assertEquals(0, new CompareToBuilder().append(array1, array1).toComparison());
assertEquals(0, new CompareToBuilder().append(array1, array2).toComparison());
assertTrue(new CompareToBuilder().append(array1, array3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(array3, array1).toComparison() > 0);
array1[1][1] = 127;
assertTrue(new CompareToBuilder().append(array1, array2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(array2, array1).toComparison() < 0);
}
@Test
void testMultiIntArray() {
final int[][] array1 = new int[2][2];
final int[][] array2 = new int[2][2];
final int[][] array3 = new int[2][3];
for (int i = 0; i < array1.length; ++i) {
for (int j = 0; j < array1[0].length; j++) {
array1[i][j] = (i + 1) * (j + 1);
array2[i][j] = (i + 1) * (j + 1);
array3[i][j] = (i + 1) * (j + 1);
}
}
array3[1][2] = 100;
array3[1][2] = 100;
assertEquals(0, new CompareToBuilder().append(array1, array1).toComparison());
assertEquals(0, new CompareToBuilder().append(array1, array2).toComparison());
assertTrue(new CompareToBuilder().append(array1, array3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(array3, array1).toComparison() > 0);
array1[1][1] = 200;
assertTrue(new CompareToBuilder().append(array1, array2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(array2, array1).toComparison() < 0);
}
@Test
void testMultiLongArray() {
final long[][] array1 = new long[2][2];
final long[][] array2 = new long[2][2];
final long[][] array3 = new long[2][3];
for (int i = 0; i < array1.length; ++i) {
for (int j = 0; j < array1[0].length; j++) {
array1[i][j] = (i + 1) * (j + 1);
array2[i][j] = (i + 1) * (j + 1);
array3[i][j] = (i + 1) * (j + 1);
}
}
array3[1][2] = 100;
array3[1][2] = 100;
assertEquals(0, new CompareToBuilder().append(array1, array1).toComparison());
assertEquals(0, new CompareToBuilder().append(array1, array2).toComparison());
assertTrue(new CompareToBuilder().append(array1, array3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(array3, array1).toComparison() > 0);
array1[1][1] = 200;
assertTrue(new CompareToBuilder().append(array1, array2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(array2, array1).toComparison() < 0);
}
@Test
void testMultiShortArray() {
final short[][] array1 = new short[2][2];
final short[][] array2 = new short[2][2];
final short[][] array3 = new short[2][3];
for (short i = 0; i < array1.length; ++i) {
for (short j = 0; j < array1[0].length; j++) {
array1[i][j] = (short) ((i + 1) * (j + 1));
array2[i][j] = (short) ((i + 1) * (j + 1));
array3[i][j] = (short) ((i + 1) * (j + 1));
}
}
array3[1][2] = 100;
array3[1][2] = 100;
assertEquals(0, new CompareToBuilder().append(array1, array1).toComparison());
assertEquals(0, new CompareToBuilder().append(array1, array2).toComparison());
assertTrue(new CompareToBuilder().append(array1, array3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(array3, array1).toComparison() > 0);
array1[1][1] = 200;
assertTrue(new CompareToBuilder().append(array1, array2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(array2, array1).toComparison() < 0);
}
@Test
void testObject() {
final TestObject o1 = new TestObject(4);
final TestObject o2 = new TestObject(4);
assertEquals(0, new CompareToBuilder().append(o1, o1).toComparison());
assertEquals(0, new CompareToBuilder().append(o1, o2).toComparison());
o2.setA(5);
assertTrue(new CompareToBuilder().append(o1, o2).toComparison() < 0);
assertTrue(new CompareToBuilder().append(o2, o1).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, null).toComparison() > 0);
assertEquals(0, new CompareToBuilder().append((Object) null, null).toComparison());
assertTrue(new CompareToBuilder().append(null, o1).toComparison() < 0);
}
@Test
void testObjectArray() {
final TestObject[] obj1 = new TestObject[2];
obj1[0] = new TestObject(4);
obj1[1] = new TestObject(5);
final TestObject[] obj2 = new TestObject[2];
obj2[0] = new TestObject(4);
obj2[1] = new TestObject(5);
final TestObject[] obj3 = new TestObject[3];
obj3[0] = new TestObject(4);
obj3[1] = new TestObject(5);
obj3[2] = new TestObject(6);
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
obj1[1] = new TestObject(7);
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj1, null).toComparison() > 0);
assertEquals(0, new CompareToBuilder().append((Object[]) null, null).toComparison());
assertTrue(new CompareToBuilder().append(null, obj1).toComparison() < 0);
}
@Test
void testObjectArrayHiddenByObject() {
final TestObject[] array1 = new TestObject[2];
array1[0] = new TestObject(4);
array1[1] = new TestObject(5);
final TestObject[] array2 = new TestObject[2];
array2[0] = new TestObject(4);
array2[1] = new TestObject(5);
final TestObject[] array3 = new TestObject[3];
array3[0] = new TestObject(4);
array3[1] = new TestObject(5);
array3[2] = new TestObject(6);
final Object obj1 = array1;
final Object obj2 = array2;
final Object obj3 = array3;
assertEquals(0, new CompareToBuilder().append(obj1, obj1).toComparison());
assertEquals(0, new CompareToBuilder().append(obj1, obj2).toComparison());
assertTrue(new CompareToBuilder().append(obj1, obj3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(obj3, obj1).toComparison() > 0);
array1[1] = new TestObject(7);
assertTrue(new CompareToBuilder().append(obj1, obj2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(obj2, obj1).toComparison() < 0);
}
@Test
void testObjectBuild() {
final TestObject o1 = new TestObject(4);
final TestObject o2 = new TestObject(4);
assertEquals(Integer.valueOf(0), new CompareToBuilder().append(o1, o1).build());
assertEquals(Integer.valueOf(0), new CompareToBuilder().append(o1, o2).build());
o2.setA(5);
assertTrue(new CompareToBuilder().append(o1, o2).build().intValue() < 0);
assertTrue(new CompareToBuilder().append(o2, o1).build().intValue() > 0);
assertTrue(new CompareToBuilder().append(o1, null).build().intValue() > 0);
assertEquals(Integer.valueOf(0), new CompareToBuilder().append((Object) null, null).build());
assertTrue(new CompareToBuilder().append(null, o1).build().intValue() < 0);
}
@Test
void testObjectComparator() {
final String o1 = "Fred";
String o2 = "Fred";
assertEquals(0, new CompareToBuilder().append(o1, o1, String.CASE_INSENSITIVE_ORDER).toComparison());
assertEquals(0, new CompareToBuilder().append(o1, o2, String.CASE_INSENSITIVE_ORDER).toComparison());
o2 = "FRED";
assertEquals(0, new CompareToBuilder().append(o1, o2, String.CASE_INSENSITIVE_ORDER).toComparison());
assertEquals(0, new CompareToBuilder().append(o2, o1, String.CASE_INSENSITIVE_ORDER).toComparison());
o2 = "FREDA";
assertTrue(new CompareToBuilder().append(o1, o2, String.CASE_INSENSITIVE_ORDER).toComparison() < 0);
assertTrue(new CompareToBuilder().append(o2, o1, String.CASE_INSENSITIVE_ORDER).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, null, String.CASE_INSENSITIVE_ORDER).toComparison() > 0);
assertEquals(0, new CompareToBuilder().append(null, null, String.CASE_INSENSITIVE_ORDER).toComparison());
assertTrue(new CompareToBuilder().append(null, o1, String.CASE_INSENSITIVE_ORDER).toComparison() < 0);
}
@Test
void testObjectComparatorNull() {
final String o1 = "Fred";
String o2 = "Fred";
assertEquals(0, new CompareToBuilder().append(o1, o1, null).toComparison());
assertEquals(0, new CompareToBuilder().append(o1, o2, null).toComparison());
o2 = "Zebra";
assertTrue(new CompareToBuilder().append(o1, o2, null).toComparison() < 0);
assertTrue(new CompareToBuilder().append(o2, o1, null).toComparison() > 0);
assertTrue(new CompareToBuilder().append(o1, null, null).toComparison() > 0);
assertEquals(0, new CompareToBuilder().append(null, null, null).toComparison());
assertTrue(new CompareToBuilder().append(null, o1, null).toComparison() < 0);
}
@Test
void testObjectEx2() {
final TestObject o1 = new TestObject(4);
final Object o2 = new Object();
assertThrows(ClassCastException.class, () -> new CompareToBuilder().append(o1, o2));
}
@Test
void testRaggedArray() {
final long[][] array1 = new long[2][];
final long[][] array2 = new long[2][];
final long[][] array3 = new long[3][];
for (int i = 0; i < array1.length; ++i) {
array1[i] = new long[2];
array2[i] = new long[2];
array3[i] = new long[3];
for (int j = 0; j < array1[i].length; ++j) {
array1[i][j] = (i + 1) * (j + 1);
array2[i][j] = (i + 1) * (j + 1);
array3[i][j] = (i + 1) * (j + 1);
}
}
array3[1][2] = 100;
array3[1][2] = 100;
assertEquals(0, new CompareToBuilder().append(array1, array1).toComparison());
assertEquals(0, new CompareToBuilder().append(array1, array2).toComparison());
assertTrue(new CompareToBuilder().append(array1, array3).toComparison() < 0);
assertTrue(new CompareToBuilder().append(array3, array1).toComparison() > 0);
array1[1][1] = 200;
assertTrue(new CompareToBuilder().append(array1, array2).toComparison() > 0);
assertTrue(new CompareToBuilder().append(array2, array1).toComparison() < 0);
}
@Test
void testReflectionCompare() {
final TestObject o1 = new TestObject(4);
final TestObject o2 = new TestObject(4);
assertEquals(0, CompareToBuilder.reflectionCompare(o1, o1));
assertEquals(0, CompareToBuilder.reflectionCompare(o1, o2));
o2.setA(5);
assertTrue(CompareToBuilder.reflectionCompare(o1, o2) < 0);
assertTrue(CompareToBuilder.reflectionCompare(o2, o1) > 0);
}
@Test
void testReflectionCompareEx1() {
final TestObject o1 = new TestObject(4);
assertNullPointerException(() -> CompareToBuilder.reflectionCompare(o1, null));
}
@Test
void testReflectionCompareEx2() {
final TestObject o1 = new TestObject(4);
final Object o2 = new Object();
assertThrows(ClassCastException.class, () -> CompareToBuilder.reflectionCompare(o1, o2));
}
@Test
void testReflectionHierarchyCompare() {
testReflectionHierarchyCompare(false, null);
}
private void testReflectionHierarchyCompare(final boolean testTransients, final String[] excludeFields) {
final TestObject to1 = new TestObject(1);
final TestObject to2 = new TestObject(2);
final TestObject to3 = new TestObject(3);
final TestSubObject tso1 = new TestSubObject(1, 1);
final TestSubObject tso2 = new TestSubObject(2, 2);
final TestSubObject tso3 = new TestSubObject(3, 3);
assertReflectionCompareContract(to1, to1, to1, false, excludeFields);
assertReflectionCompareContract(to1, to2, to3, false, excludeFields);
assertReflectionCompareContract(tso1, tso1, tso1, false, excludeFields);
assertReflectionCompareContract(tso1, tso2, tso3, false, excludeFields);
assertReflectionCompareContract("1", "2", "3", false, excludeFields);
assertTrue(0 != CompareToBuilder.reflectionCompare(tso1, new TestSubObject(1, 0), testTransients));
assertTrue(0 != CompareToBuilder.reflectionCompare(tso1, new TestSubObject(0, 1), testTransients));
// root | TestTransientSubObject |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/internal/EmptyInterceptor.java | {
"start": 592,
"end": 1616
} | class ____ implements Interceptor, Serializable {
public static final Interceptor INSTANCE = new EmptyInterceptor();
private EmptyInterceptor() {
}
@Override
public boolean onLoad(Object entity, Object id, Object[] state, String[] propertyNames, Type[] types) {
return false;
}
@Override
public boolean onFlushDirty(
Object entity,
Object id,
Object[] currentState,
Object[] previousState,
String[] propertyNames,
Type[] types) {
return false;
}
@Override
public boolean onSave(Object entity, Object id, Object[] state, String[] propertyNames, Type[] types) {
return false;
}
@Override
public void onDelete(Object entity, Object id, Object[] state, String[] propertyNames, Type[] types) {
}
@Override
public int[] findDirty(
Object entity,
Object id,
Object[] currentState,
Object[] previousState,
String[] propertyNames,
Type[] types) {
return null;
}
@Override
public Object getEntity(String entityName, Object id) {
return null;
}
}
| EmptyInterceptor |
java | grpc__grpc-java | okhttp/src/main/java/io/grpc/okhttp/SslSocketFactoryChannelCredentials.java | {
"start": 1216,
"end": 1637
} | class ____ extends io.grpc.ChannelCredentials {
private final SSLSocketFactory factory;
private ChannelCredentials(SSLSocketFactory factory) {
this.factory = Preconditions.checkNotNull(factory, "factory");
}
public SSLSocketFactory getFactory() {
return factory;
}
@Override
public io.grpc.ChannelCredentials withoutBearerTokens() {
return this;
}
}
}
| ChannelCredentials |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/BindableRuntimeHintsRegistrarTests.java | {
"start": 20527,
"end": 20704
} | class ____<T> {
private final T value;
GenericObject(T value) {
this.value = value;
}
public T getValue() {
return this.value;
}
}
public static | GenericObject |
java | apache__flink | flink-test-utils-parent/flink-test-utils/src/main/java/org/apache/flink/test/util/MultipleProgramsTestBaseJUnit4.java | {
"start": 1069,
"end": 2014
} | class ____ unit tests that run multiple tests and want to reuse the same Flink cluster. This
* saves a significant amount of time, since the startup and shutdown of the Flink clusters
* (including actor systems, etc) usually dominates the execution of the actual tests.
*
* <p>To write a unit test against this test base, simply extend it and add one or more regular test
* methods and retrieve the ExecutionEnvironment from the context:
*
* <pre>{@code
* {@literal @}Test
* public void someTest() {
* StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
* // test code
* env.execute();
* }
*
* {@literal @}Test
* public void anotherTest() {
* StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
* // test code
* env.execute();
* }
*
* }</pre>
*
* @deprecated Use {@link MultipleProgramsTestBase} instead.
*/
@Deprecated
public | for |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/IrqHandler.java | {
"start": 3947,
"end": 4165
} | interface ____ {
/**
* Handle an interrupt.
* @param interruptData data
*/
void interrupted(InterruptData interruptData);
}
/**
* Interrupt data to pass on.
*/
public static | Interrupted |
java | hibernate__hibernate-orm | documentation/src/main/asciidoc/integrationguide/chapters/services/extras/override/LatestAndGreatestConnectionProviderImpl.java | {
"start": 7,
"end": 1391
} | class ____
implements ConnectionProvider, Startable, Stoppable, Configurable {
private LatestAndGreatestPoolBuilder lagPoolBuilder;
private LatestAndGreatestPool lagPool;
private boolean available = false;
@Override
public void configure(Map<String, Object> configurationValues) {
// extract our config from the settings map
lagPoolBuilder = buildBuilder( configurationValues );
}
@Override
public void start() {
// start the underlying pool
lagPool = lagPoolBuilder.buildPool();
available = true;
}
@Override
public void stop() {
available = false;
// stop the underlying pool
lagPool.shutdown();
}
@Override
public Connection getConnection() throws SQLException {
if ( !available ) {
throw new HibernateException(
"LatestAndGreatest ConnectionProvider not available for use" )
}
return lagPool.borrowConnection();
}
@Override
public void closeConnection(Connection conn) throws SQLException {
if ( !available ) {
warn(
"LatestAndGreatestConnectionProvider not available for use" )
}
if ( conn == null ) {
return;
}
lagPool.releaseConnection( conn );
}
...
} | LatestAndGreatestConnectionProviderImpl |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/streaming/runtime/PartitionerITCase.java | {
"start": 9976,
"end": 10654
} | class ____
extends RichMapFunction<Tuple1<String>, Tuple2<Integer, String>> {
private static final long serialVersionUID = 1L;
private int indexOfSubtask;
@Override
public void open(OpenContext openContext) throws Exception {
super.open(openContext);
RuntimeContext runtimeContext = getRuntimeContext();
indexOfSubtask = runtimeContext.getTaskInfo().getIndexOfThisSubtask();
}
@Override
public Tuple2<Integer, String> map(Tuple1<String> value) throws Exception {
return new Tuple2<Integer, String>(indexOfSubtask, value.f0);
}
}
}
| SubtaskIndexAssigner |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/metadata/FlinkRelMetadataQuery.java | {
"start": 4005,
"end": 13796
} | class ____ {
private FlinkMetadata.ColumnInterval.Handler columnIntervalHandler =
initialHandler(FlinkMetadata.ColumnInterval.Handler.class);
private FlinkMetadata.FilteredColumnInterval.Handler filteredColumnInterval =
initialHandler(FlinkMetadata.FilteredColumnInterval.Handler.class);
private FlinkMetadata.ColumnNullCount.Handler columnNullCountHandler =
initialHandler(FlinkMetadata.ColumnNullCount.Handler.class);
private FlinkMetadata.ColumnOriginNullCount.Handler columnOriginNullCountHandler =
initialHandler(FlinkMetadata.ColumnOriginNullCount.Handler.class);
private FlinkMetadata.UniqueGroups.Handler uniqueGroupsHandler =
initialHandler(FlinkMetadata.UniqueGroups.Handler.class);
private FlinkMetadata.FlinkDistribution.Handler distributionHandler =
initialHandler(FlinkMetadata.FlinkDistribution.Handler.class);
private FlinkMetadata.ModifiedMonotonicity.Handler modifiedMonotonicityHandler =
initialHandler(FlinkMetadata.ModifiedMonotonicity.Handler.class);
private FlinkMetadata.WindowProperties.Handler windowPropertiesHandler =
initialHandler(FlinkMetadata.WindowProperties.Handler.class);
private FlinkMetadata.UpsertKeys.Handler upsertKeysHandler =
initialHandler(FlinkMetadata.UpsertKeys.Handler.class);
}
/**
* Returns the {@link FlinkMetadata.ColumnInterval} statistic.
*
* @param rel the relational expression
* @param index the index of the given column
* @return the interval of the given column of a specified relational expression. Returns null
* if interval cannot be estimated, Returns {@link
* org.apache.flink.table.planner.plan.stats.EmptyValueInterval} if column values does not
* contains any value except for null.
*/
public ValueInterval getColumnInterval(RelNode rel, int index) {
for (; ; ) {
try {
return columnIntervalHandler.getColumnInterval(rel, this, index);
} catch (JaninoRelMetadataProvider.NoHandler e) {
columnIntervalHandler = revise(e.relClass, FlinkMetadata.ColumnInterval.DEF);
}
}
}
/**
* Returns the {@link FlinkMetadata.ColumnInterval} of the given column under the given filter
* argument.
*
* @param rel the relational expression
* @param columnIndex the index of the given column
* @param filterArg the index of the filter argument
* @return the interval of the given column of a specified relational expression. Returns null
* if interval cannot be estimated, Returns {@link
* org.apache.flink.table.planner.plan.stats.EmptyValueInterval} if column values does not
* contains any value except for null.
*/
public ValueInterval getFilteredColumnInterval(RelNode rel, int columnIndex, int filterArg) {
for (; ; ) {
try {
return filteredColumnInterval.getFilteredColumnInterval(
rel, this, columnIndex, filterArg);
} catch (JaninoRelMetadataProvider.NoHandler e) {
filteredColumnInterval =
revise(e.relClass, FlinkMetadata.FilteredColumnInterval.DEF);
}
}
}
/**
* Returns the null count of the given column.
*
* @param rel the relational expression
* @param index the index of the given column
* @return the null count of the given column if can be estimated, else return null.
*/
public Double getColumnNullCount(RelNode rel, int index) {
for (; ; ) {
try {
return columnNullCountHandler.getColumnNullCount(rel, this, index);
} catch (JaninoRelMetadataProvider.NoHandler e) {
columnNullCountHandler = revise(e.relClass, FlinkMetadata.ColumnNullCount.DEF);
}
}
}
/**
* Returns origin null count of the given column.
*
* @param rel the relational expression
* @param index the index of the given column
* @return the null count of the given column if can be estimated, else return null.
*/
public Double getColumnOriginNullCount(RelNode rel, int index) {
for (; ; ) {
try {
return columnOriginNullCountHandler.getColumnOriginNullCount(rel, this, index);
} catch (JaninoRelMetadataProvider.NoHandler e) {
columnOriginNullCountHandler =
revise(e.relClass, FlinkMetadata.ColumnOriginNullCount.DEF);
}
}
}
/**
* Returns the (minimum) unique groups of the given columns.
*
* @param rel the relational expression
* @param columns the given columns in a specified relational expression. The given columns
* should not be null.
* @return the (minimum) unique columns which should be a sub-collection of the given columns,
* and should not be null or empty. If none unique columns can be found, return the given
* columns.
*/
public ImmutableBitSet getUniqueGroups(RelNode rel, ImmutableBitSet columns) {
for (; ; ) {
try {
Preconditions.checkArgument(columns != null);
if (columns.isEmpty()) {
return columns;
}
ImmutableBitSet uniqueGroups =
uniqueGroupsHandler.getUniqueGroups(rel, this, columns);
Preconditions.checkArgument(uniqueGroups != null && !uniqueGroups.isEmpty());
Preconditions.checkArgument(columns.contains(uniqueGroups));
return uniqueGroups;
} catch (JaninoRelMetadataProvider.NoHandler e) {
uniqueGroupsHandler = revise(e.relClass, FlinkMetadata.UniqueGroups.DEF);
}
}
}
/**
* Returns the {@link FlinkRelDistribution} statistic.
*
* @param rel the relational expression
* @return description of how the rows in the relational expression are physically distributed
*/
public FlinkRelDistribution flinkDistribution(RelNode rel) {
for (; ; ) {
try {
return distributionHandler.flinkDistribution(rel, this);
} catch (JaninoRelMetadataProvider.NoHandler e) {
distributionHandler = revise(e.relClass, FlinkMetadata.FlinkDistribution.DEF);
}
}
}
/**
* Returns the {@link RelModifiedMonotonicity} statistic.
*
* @param rel the relational expression
* @return the monotonicity for the corresponding RelNode
*/
public RelModifiedMonotonicity getRelModifiedMonotonicity(RelNode rel) {
for (; ; ) {
try {
return modifiedMonotonicityHandler.getRelModifiedMonotonicity(rel, this);
} catch (JaninoRelMetadataProvider.NoHandler e) {
modifiedMonotonicityHandler =
revise(e.relClass, FlinkMetadata.ModifiedMonotonicity.DEF);
}
}
}
/**
* Returns the {@link RelWindowProperties} statistic.
*
* @param rel the relational expression
* @return the window properties for the corresponding RelNode
*/
public RelWindowProperties getRelWindowProperties(RelNode rel) {
for (; ; ) {
try {
return windowPropertiesHandler.getWindowProperties(rel, this);
} catch (JaninoRelMetadataProvider.NoHandler e) {
windowPropertiesHandler = revise(e.relClass, FlinkMetadata.WindowProperties.DEF);
}
}
}
/**
* Determines the set of upsert minimal keys for this expression. A key is represented as an
* {@link org.apache.calcite.util.ImmutableBitSet}, where each bit position represents a 0-based
* output column ordinal.
*
* <p>Different from the unique keys: In distributed streaming computing, one record may be
* divided into RowKind.UPDATE_BEFORE and RowKind.UPDATE_AFTER. If a key changing join is
* connected downstream, the two records will be divided into different tasks, resulting in
* disorder. In this case, the downstream cannot rely on the order of the original key. So in
* this case, it has unique keys in the traditional sense, but it doesn't have upsert keys.
*
* @return set of keys, or null if this information cannot be determined (whereas empty set
* indicates definitely no keys at all)
*/
public Set<ImmutableBitSet> getUpsertKeys(RelNode rel) {
for (; ; ) {
try {
return upsertKeysHandler.getUpsertKeys(rel, this);
} catch (JaninoRelMetadataProvider.NoHandler e) {
upsertKeysHandler = revise(e.relClass, FlinkMetadata.UpsertKeys.DEF);
}
}
}
/**
* Determines the set of upsert minimal keys in a single key group range, which means can ignore
* exchange by partition keys.
*
* <p>Some optimizations can rely on this ability to do upsert in a single key group range.
*/
public Set<ImmutableBitSet> getUpsertKeysInKeyGroupRange(RelNode rel, int[] partitionKeys) {
if (rel instanceof Exchange) {
Exchange exchange = (Exchange) rel;
if (Arrays.equals(
exchange.getDistribution().getKeys().stream()
.mapToInt(Integer::intValue)
.toArray(),
partitionKeys)) {
rel = exchange.getInput();
}
}
return getUpsertKeys(rel);
}
}
| Handlers |
java | apache__flink | flink-filesystems/flink-oss-fs-hadoop/src/test/java/org/apache/flink/fs/osshadoop/HadoopOSSRecoverableWriterExceptionITCase.java | {
"start": 1501,
"end": 2843
} | class ____
extends AbstractHadoopRecoverableWriterExceptionITCase {
// ----------------------- OSS general configuration -----------------------
private static final int MAX_CONCURRENT_UPLOADS_VALUE = 2;
@BeforeAll
static void checkCredentialsAndSetup() throws IOException {
// check whether credentials exist
OSSTestCredentials.assumeCredentialsAvailable();
basePath = new Path(OSSTestCredentials.getTestBucketUri() + "tests-" + UUID.randomUUID());
// initialize configuration with valid credentials
final Configuration conf = new Configuration();
conf.setString("fs.oss.endpoint", OSSTestCredentials.getOSSEndpoint());
conf.setString("fs.oss.accessKeyId", OSSTestCredentials.getOSSAccessKey());
conf.setString("fs.oss.accessKeySecret", OSSTestCredentials.getOSSSecretKey());
conf.set(MAX_CONCURRENT_UPLOADS, MAX_CONCURRENT_UPLOADS_VALUE);
final String defaultTmpDir = tempFolder.getAbsolutePath() + "/oss_tmp_dir";
conf.set(CoreOptions.TMP_DIRS, defaultTmpDir);
FileSystem.initialize(conf);
skipped = false;
}
@Override
protected String getLocalTmpDir() throws Exception {
return ((FlinkOSSFileSystem) getFileSystem()).getLocalTmpDir();
}
}
| HadoopOSSRecoverableWriterExceptionITCase |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/AvroTypeException.java | {
"start": 885,
"end": 1105
} | class ____ extends AvroRuntimeException {
public AvroTypeException(String message) {
super(message);
}
public AvroTypeException(String message, Throwable cause) {
super(message, cause);
}
}
| AvroTypeException |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/pagination/TopLimitHandler.java | {
"start": 461,
"end": 1349
} | class ____ extends AbstractNoOffsetLimitHandler {
public static TopLimitHandler INSTANCE = new TopLimitHandler(true);
public TopLimitHandler(boolean variableLimit) {
super(variableLimit);
}
@Override
protected String limitClause() {
return " top ? ";
}
@Override
protected String limitClause(int jdbcParameterCount, ParameterMarkerStrategy parameterMarkerStrategy) {
return " top " + parameterMarkerStrategy.createMarker( 1, null ) + " rows only";
}
@Override
protected String insert(String limitClause, String sql) {
return insertAfterDistinct( limitClause, sql );
}
@Override
public boolean bindLimitParametersFirst() {
return true;
}
@Override
public boolean processSqlMutatesState() {
return false;
}
@Override
public int getParameterPositionStart(Limit limit) {
return hasMaxRows( limit ) && supportsVariableLimit() ? 2 : 1;
}
}
| TopLimitHandler |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/BeanProcessorSpecializedMessageTest.java | {
"start": 2894,
"end": 3181
} | class ____ extends DefaultMessage {
public MyMessage(CamelContext camelContext) {
super(camelContext);
}
@Override
public MyMessage newInstance() {
return new MyMessage(getCamelContext());
}
}
public static | MyMessage |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/dialect/functional/SQLServerDialectTempTableCollationTest.java | {
"start": 1467,
"end": 6180
} | class ____ extends BaseSessionFactoryFunctionalTest {
private String originalDBCollation;
private final String changedDBCollation = "SQL_Latin1_General_CP437_BIN";
private boolean collationChanged;
@Override
protected void applySettings(StandardServiceRegistryBuilder builder) {
builder.applySetting( AvailableSettings.KEYWORD_AUTO_QUOTING_ENABLED, Boolean.TRUE );
}
@AfterEach
protected void releaseSessionFactory() {
if ( originalDBCollation != null && collationChanged && !changedDBCollation.equals( originalDBCollation ) ) {
StandardServiceRegistry ssr = ServiceRegistryUtil.serviceRegistryBuilder().build();
try {
TransactionUtil.doWithJDBC(
ssr,
connection -> {
try (Statement statement = connection.createStatement()) {
connection.setAutoCommit( true );
String dbName;
try ( ResultSet rs = statement.executeQuery( "SELECT DB_NAME()" ) ) {
rs.next();
dbName = rs.getString( 1 );
}
statement.execute( "USE master" );
statement.execute( "ALTER DATABASE " + dbName + " SET SINGLE_USER WITH ROLLBACK IMMEDIATE" );
statement.executeUpdate( "ALTER DATABASE " + dbName + " COLLATE " + originalDBCollation );
statement.execute( "ALTER DATABASE " + dbName + " SET MULTI_USER WITH ROLLBACK IMMEDIATE" );
statement.execute( "USE " + dbName );
}
}
);
}
catch (SQLException e) {
throw new RuntimeException( "Failed to revert back database collation to " + originalDBCollation, e );
}
finally {
ssr.close();
}
}
// The alter database calls could lead to issues with existing connections, so we reset the shared pool here
SharedDriverManagerConnectionProvider.getInstance().reset();
}
@Override
public SessionFactoryImplementor produceSessionFactory(MetadataImplementor model) {
StandardServiceRegistry ssr = ServiceRegistryUtil.serviceRegistryBuilder().build();
try {
try {
TransactionUtil.doWithJDBC(
ssr,
connection -> {
try (Statement statement = connection.createStatement()) {
connection.setAutoCommit( true );
try ( ResultSet rs = statement.executeQuery( "SELECT DATABASEPROPERTYEX(DB_NAME(),'collation')" ) ) {
rs.next();
String instanceCollation = rs.getString( 1 );
Assertions.assertNotEquals( instanceCollation, changedDBCollation );
}
}
}
);
}
catch (SQLException e) {
fail( e );
}
try {
TransactionUtil.doWithJDBC(
ssr,
connection -> {
try (Statement statement = connection.createStatement()) {
connection.setAutoCommit( true );
try ( ResultSet rs = statement.executeQuery( "SELECT CONVERT (varchar(256), DATABASEPROPERTYEX(DB_NAME(),'collation'))" ) ) {
rs.next();
originalDBCollation = rs.getString( 1 );
}
}
}
);
}
catch (SQLException e) {
fail( e );
}
TransactionUtil.doWithJDBC(
ssr,
connection -> {
try (Statement statement = connection.createStatement()) {
connection.setAutoCommit( true );
String dbName;
try ( ResultSet rs = statement.executeQuery( "SELECT DB_NAME()" ) ) {
rs.next();
dbName = rs.getString( 1 );
}
statement.execute( "USE master" );
statement.execute( "ALTER DATABASE " + dbName + " SET SINGLE_USER WITH ROLLBACK IMMEDIATE" );
statement.executeUpdate( "ALTER DATABASE " + dbName + " COLLATE " + changedDBCollation );
statement.execute( "ALTER DATABASE " + dbName + " SET MULTI_USER WITH ROLLBACK IMMEDIATE" );
statement.execute( "USE " + dbName );
collationChanged = true;
}
}
);
}
catch ( SQLException e ) {
throw new RuntimeException( e );
}
finally {
ssr.close();
}
return super.produceSessionFactory(model);
}
@Test
public void testTemporaryTableCreateWithoutCollationConflict() {
// without fixing "HHH-3326", the following exception will be thrown:
// Cannot resolve the collation conflict between "SQL_Latin1_General_CP1_CI_AS" and "SQL_Latin1_General_CP437_BIN" in the equal to operation.
doInHibernate( this::sessionFactory, session -> {
session.createQuery( "update Woman w set w.description = :description where w.age > :age" )
.setParameter( "description", "your are old" )
.setParameter( "age", 30 )
.executeUpdate();
} );
}
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] {
Human.class,
Woman.class
};
}
@Entity(name = "Human")
@Table(name = "Human")
@Inheritance(strategy = InheritanceType.JOINED)
public static abstract | SQLServerDialectTempTableCollationTest |
java | spring-projects__spring-framework | spring-websocket/src/main/java/org/springframework/web/socket/config/annotation/EnableWebSocket.java | {
"start": 1180,
"end": 1415
} | class ____ {
*
* }
* </pre>
*
* <p>Customize the imported configuration by implementing the
* {@link WebSocketConfigurer} interface:
*
* <pre class="code">
* @Configuration
* @EnableWebSocket
* public | MyWebSocketConfig |
java | apache__hadoop | hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Histogram.java | {
"start": 1161,
"end": 4806
} | class ____ implements Iterable<Map.Entry<Long, Long>> {
private TreeMap<Long, Long> content = new TreeMap<Long, Long>();
private String name;
private long totalCount;
public Histogram() {
this("(anonymous)");
}
public Histogram(String name) {
super();
this.name = name;
totalCount = 0L;
}
public void dump(PrintStream stream) {
stream.print("dumping Histogram " + name + ":\n");
Iterator<Map.Entry<Long, Long>> iter = iterator();
while (iter.hasNext()) {
Map.Entry<Long, Long> ent = iter.next();
stream.print("val/count pair: " + (long) ent.getKey() + ", "
+ (long) ent.getValue() + "\n");
}
stream.print("*** end *** \n");
}
public Iterator<Map.Entry<Long, Long>> iterator() {
return content.entrySet().iterator();
}
public long get(long key) {
Long result = content.get(key);
return result == null ? 0 : result;
}
public long getTotalCount() {
return totalCount;
}
public void enter(long value) {
Long existingValue = content.get(value);
if (existingValue == null) {
content.put(value, 1L);
} else {
content.put(value, existingValue + 1L);
}
++totalCount;
}
/**
* Produces a discrete approximation of the CDF. The user provides the points
* on the {@code Y} axis he wants, and we give the corresponding points on the
* {@code X} axis, plus the minimum and maximum from the data.
*
* @param scale
* the denominator applied to every element of buckets. For example,
* if {@code scale} is {@code 1000}, a {@code buckets} element of 500
* will specify the median in that output slot.
* @param buckets
* an array of int, all less than scale and each strictly greater
* than its predecessor if any. We don't check these requirements.
* @return a {@code long[]}, with two more elements than {@code buckets} has.
* The first resp. last element is the minimum resp. maximum value
* that was ever {@code enter}ed. The rest of the elements correspond
* to the elements of {@code buckets} and carry the first element
* whose rank is no less than {@code #content elements * scale /
* bucket}.
*
*/
public long[] getCDF(int scale, int[] buckets) {
if (totalCount == 0) {
return null;
}
long[] result = new long[buckets.length + 2];
// fill in the min and the max
result[0] = content.firstEntry().getKey();
result[buckets.length + 1] = content.lastEntry().getKey();
Iterator<Map.Entry<Long, Long>> iter = content.entrySet().iterator();
long cumulativeCount = 0;
int bucketCursor = 0;
// Loop invariant: the item at buckets[bucketCursor] can still be reached
// from iter, and the number of logged elements no longer available from
// iter is cumulativeCount.
//
// cumulativeCount/totalCount is therefore strictly less than
// buckets[bucketCursor]/scale .
while (iter.hasNext()) {
long targetCumulativeCount = buckets[bucketCursor] * totalCount / scale;
Map.Entry<Long, Long> elt = iter.next();
cumulativeCount += elt.getValue();
while (cumulativeCount >= targetCumulativeCount) {
result[bucketCursor + 1] = elt.getKey();
++bucketCursor;
if (bucketCursor < buckets.length) {
targetCumulativeCount = buckets[bucketCursor] * totalCount / scale;
} else {
break;
}
}
if (bucketCursor == buckets.length) {
break;
}
}
return result;
}
}
| Histogram |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.