language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
google__guava
android/guava/src/com/google/common/collect/Synchronized.java
{ "start": 33610, "end": 35219 }
class ____<K extends @Nullable Object, V extends @Nullable Object> extends SynchronizedMap<K, Collection<V>> { transient @Nullable Set<Map.Entry<K, Collection<V>>> asMapEntrySet; transient @Nullable Collection<Collection<V>> asMapValues; SynchronizedAsMap(Map<K, Collection<V>> delegate, @Nullable Object mutex) { super(delegate, mutex); } @Override public @Nullable Collection<V> get(@Nullable Object key) { synchronized (mutex) { Collection<V> collection = super.get(key); return (collection == null) ? null : typePreservingCollection(collection, mutex); } } @Override public Set<Map.Entry<K, Collection<V>>> entrySet() { synchronized (mutex) { if (asMapEntrySet == null) { asMapEntrySet = new SynchronizedAsMapEntries<>(delegate().entrySet(), mutex); } return asMapEntrySet; } } @Override public Collection<Collection<V>> values() { synchronized (mutex) { if (asMapValues == null) { asMapValues = new SynchronizedAsMapValues<V>(delegate().values(), mutex); } return asMapValues; } } @Override // A forwarding implementation can't do any better than the underlying object. @SuppressWarnings("CollectionUndefinedEquality") public boolean containsValue(@Nullable Object o) { // values() and its contains() method are both synchronized. return values().contains(o); } @GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0; } static final
SynchronizedAsMap
java
elastic__elasticsearch
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequestBuilder.java
{ "start": 686, "end": 1445 }
class ____ extends ActionRequestBuilder<SetEnabledRequest, ActionResponse.Empty> implements WriteRequestBuilder<SetEnabledRequestBuilder> { public SetEnabledRequestBuilder(ElasticsearchClient client) { super(client, TransportSetEnabledAction.TYPE, new SetEnabledRequest()); } /** * Set the username of the user that should enabled or disabled. Must not be {@code null} */ public SetEnabledRequestBuilder username(String username) { request.username(username); return this; } /** * Set whether the user should be enabled or not */ public SetEnabledRequestBuilder enabled(boolean enabled) { request.enabled(enabled); return this; } }
SetEnabledRequestBuilder
java
apache__camel
components/camel-cxf/camel-cxf-spring-soap/src/test/java/org/apache/camel/component/cxf/CxfDispatchMessageTest.java
{ "start": 1616, "end": 4516 }
class ____ extends CxfDispatchTestSupport { @Override protected AbstractApplicationContext createApplicationContext() { return new ClassPathXmlApplicationContext("org/apache/camel/component/cxf/CxfDispatchMessageBeans.xml"); } @Test public void testDipatchMessage() throws Exception { final String name = "Tila"; Exchange exchange = sendJaxWsDispatchMessage(name, false); assertEquals(false, exchange.isFailed(), "The request should be handled sucessfully"); org.apache.camel.Message response = exchange.getMessage(); assertNotNull(response, "The response message must not be null"); String value = decodeResponseFromMessage(response.getBody(InputStream.class), exchange); assertTrue(value.endsWith(name), "The response body must match the request"); } @Test public void testDipatchMessageOneway() throws Exception { final String name = "Tila"; Exchange exchange = sendJaxWsDispatchMessage(name, true); assertEquals(false, exchange.isFailed(), "The request should be handled sucessfully"); org.apache.camel.Message response = exchange.getOut(); assertNotNull(response, "The response message must not be null"); assertNull(response.getBody(), "The response body must be null"); } protected Exchange sendJaxWsDispatchMessage(final String name, final boolean oneway) { Exchange exchange = template.send("direct:producer", new Processor() { public void process(final Exchange exchange) { InputStream request = encodeRequestInMessage(oneway ? MESSAGE_ONEWAY_TEMPLATE : MESSAGE_TEMPLATE, name, exchange); exchange.getIn().setBody(request, InputStream.class); // set the operation for oneway; otherwise use the default operation if (oneway) { exchange.getIn().setHeader(CxfConstants.OPERATION_NAME, INVOKE_ONEWAY_NAME); } } }); return exchange; } private static InputStream encodeRequestInMessage(String form, String name, Exchange exchange) { String payloadstr = String.format(form, name); InputStream message = null; try { message = new ByteArrayInputStream(payloadstr.getBytes(StandardCharsets.UTF_8)); } catch (Exception e) { // ignore and let it fail } return message; } private String decodeResponseFromMessage(InputStream message, Exchange exchange) { String value = null; try { Document doc = getDocumentBuilderFactory().newDocumentBuilder().parse(message); value = getResponseType(doc.getDocumentElement()); } catch (Exception e) { // ignore and let it fail } return value; } }
CxfDispatchMessageTest
java
apache__maven
api/maven-api-core/src/main/java/org/apache/maven/api/services/ExtensibleEnumRegistry.java
{ "start": 1313, "end": 1423 }
enum ____ that have been registered through SPI providers. * * @param <T> the specific type of extensible
values
java
redisson__redisson
redisson-hibernate/redisson-hibernate-5/src/main/java/org/redisson/hibernate/strategy/NonStrictReadWriteNaturalIdRegionAccessStrategy.java
{ "start": 1225, "end": 3701 }
class ____ extends BaseRegionAccessStrategy implements NaturalIdRegionAccessStrategy { public NonStrictReadWriteNaturalIdRegionAccessStrategy(Settings settings, GeneralDataRegion region) { super(settings, region); } @Override public Object get(SessionImplementor session, Object key, long txTimestamp) throws CacheException { return region.get(session, key); } @Override public boolean putFromLoad(SessionImplementor session, Object key, Object value, long txTimestamp, Object version, boolean minimalPutOverride) throws CacheException { if (minimalPutOverride && region.contains(key)) { return false; } region.put(session, key, value); return true; } @Override public SoftLock lockItem(SessionImplementor session, Object key, Object version) throws CacheException { return null; } @Override public void unlockItem(SessionImplementor session, Object key, SoftLock lock) throws CacheException { evict(key); } @Override public NaturalIdRegion getRegion() { return (NaturalIdRegion) region; } @Override public boolean insert(SessionImplementor session, Object key, Object value) throws CacheException { return false; } @Override public boolean afterInsert(SessionImplementor session, Object key, Object value) throws CacheException { return false; } @Override public boolean update(SessionImplementor session, Object key, Object value) throws CacheException { remove(session, key); return false; } @Override public boolean afterUpdate(SessionImplementor session, Object key, Object value, SoftLock lock) throws CacheException { unlockItem(session, key, lock); return false; } @Override public void remove(SessionImplementor session, Object key) throws CacheException { region.evict(key); } @Override public Object generateCacheKey(Object[] naturalIdValues, EntityPersister persister, SessionImplementor session) { return ((RedissonNaturalIdRegion)region).getCacheKeysFactory().createNaturalIdKey(naturalIdValues, persister, session); } @Override public Object[] getNaturalIdValues(Object cacheKey) { return ((RedissonNaturalIdRegion)region).getCacheKeysFactory().getNaturalIdValues(cacheKey); } }
NonStrictReadWriteNaturalIdRegionAccessStrategy
java
google__gson
gson/src/test/java/com/google/gson/ObjectTypeAdapterTest.java
{ "start": 952, "end": 3792 }
class ____ { private final Gson gson = new GsonBuilder().create(); private final TypeAdapter<Object> adapter = gson.getAdapter(Object.class); @Test public void testDeserialize() throws Exception { Map<?, ?> map = (Map<?, ?>) adapter.fromJson("{\"a\":5,\"b\":[1,2,null],\"c\":{\"x\":\"y\"}}"); assertThat(map.get("a")).isEqualTo(5.0); assertThat(map.get("b")).isEqualTo(Arrays.asList(1.0, 2.0, null)); assertThat(map.get("c")).isEqualTo(Collections.singletonMap("x", "y")); assertThat(map).hasSize(3); } @Test public void testSerialize() { Object object = new RuntimeType(); assertThat(adapter.toJson(object).replace("\"", "'")).isEqualTo("{'a':5,'b':[1,2,null]}"); } @Test public void testSerializeNullValue() { Map<String, Object> map = new LinkedHashMap<>(); map.put("a", null); assertThat(adapter.toJson(map).replace('"', '\'')).isEqualTo("{'a':null}"); } @Test public void testDeserializeNullValue() throws Exception { Map<String, Object> map = new LinkedHashMap<>(); map.put("a", null); assertThat(adapter.fromJson("{\"a\":null}")).isEqualTo(map); } @Test public void testSerializeObject() { assertThat(adapter.toJson(new Object())).isEqualTo("{}"); } /** Deeply nested JSON arrays should not cause {@link StackOverflowError} */ @SuppressWarnings("unchecked") @Test public void testDeserializeDeeplyNestedArrays() throws IOException { int times = 10000; // [[[ ... ]]] String json = "[".repeat(times) + "]".repeat(times); JsonReader jsonReader = new JsonReader(new StringReader(json)); jsonReader.setNestingLimit(Integer.MAX_VALUE); int actualTimes = 0; List<List<?>> current = (List<List<?>>) adapter.read(jsonReader); while (true) { actualTimes++; if (current.isEmpty()) { break; } assertThat(current).hasSize(1); current = (List<List<?>>) current.get(0); } assertThat(actualTimes).isEqualTo(times); } /** Deeply nested JSON objects should not cause {@link StackOverflowError} */ @SuppressWarnings("unchecked") @Test public void testDeserializeDeeplyNestedObjects() throws IOException { int times = 10000; // {"a":{"a": ... {"a":null} ... }} String json = "{\"a\":".repeat(times) + "null" + "}".repeat(times); JsonReader jsonReader = new JsonReader(new StringReader(json)); jsonReader.setNestingLimit(Integer.MAX_VALUE); int actualTimes = 0; Map<String, Map<?, ?>> current = (Map<String, Map<?, ?>>) adapter.read(jsonReader); while (current != null) { assertThat(current).hasSize(1); actualTimes++; current = (Map<String, Map<?, ?>>) current.get("a"); } assertThat(actualTimes).isEqualTo(times); } @SuppressWarnings({"unused", "ClassCanBeStatic"}) private
ObjectTypeAdapterTest
java
spring-projects__spring-security
core/src/test/java/org/springframework/security/core/context/ReactiveSecurityContextHolderThreadLocalAccessorTests.java
{ "start": 1276, "end": 4187 }
class ____ { private ReactiveSecurityContextHolderThreadLocalAccessor threadLocalAccessor; @BeforeEach public void setUp() { this.threadLocalAccessor = new ReactiveSecurityContextHolderThreadLocalAccessor(); } @AfterEach public void tearDown() { this.threadLocalAccessor.setValue(); } @Test public void keyAlwaysReturnsSecurityContextClass() { assertThat(this.threadLocalAccessor.key()).isEqualTo(SecurityContext.class); } @Test public void getValueWhenThreadLocalNotSetThenReturnsNull() { assertThat(this.threadLocalAccessor.getValue()).isNull(); } @Test public void getValueWhenThreadLocalSetThenReturnsSecurityContextMono() { SecurityContext securityContext = SecurityContextHolder.createEmptyContext(); securityContext.setAuthentication(new TestingAuthenticationToken("user", "password")); Mono<SecurityContext> mono = Mono.just(securityContext); this.threadLocalAccessor.setValue(mono); assertThat(this.threadLocalAccessor.getValue()).isSameAs(mono); } @Test public void getValueWhenThreadLocalSetOnAnotherThreadThenReturnsNull() throws InterruptedException { CountDownLatch threadLocalSet = new CountDownLatch(1); CountDownLatch threadLocalRead = new CountDownLatch(1); CountDownLatch threadLocalCleared = new CountDownLatch(1); Runnable task = () -> { SecurityContext securityContext = SecurityContextHolder.createEmptyContext(); securityContext.setAuthentication(new TestingAuthenticationToken("user", "password")); Mono<SecurityContext> mono = Mono.just(securityContext); this.threadLocalAccessor.setValue(mono); threadLocalSet.countDown(); try { threadLocalRead.await(); } catch (InterruptedException ignored) { } finally { this.threadLocalAccessor.setValue(); threadLocalCleared.countDown(); } }; try (SimpleAsyncTaskExecutor taskExecutor = new SimpleAsyncTaskExecutor()) { taskExecutor.execute(task); threadLocalSet.await(); assertThat(this.threadLocalAccessor.getValue()).isNull(); threadLocalRead.countDown(); threadLocalCleared.await(); } } @Test public void setValueWhenNullThenThrowsIllegalArgumentException() { // @formatter:off assertThatIllegalArgumentException() .isThrownBy(() -> this.threadLocalAccessor.setValue(null)) .withMessage("securityContext cannot be null"); // @formatter:on } @Test public void setValueWhenThreadLocalSetThenClearsThreadLocal() { SecurityContext securityContext = SecurityContextHolder.createEmptyContext(); securityContext.setAuthentication(new TestingAuthenticationToken("user", "password")); Mono<SecurityContext> mono = Mono.just(securityContext); this.threadLocalAccessor.setValue(mono); assertThat(this.threadLocalAccessor.getValue()).isSameAs(mono); this.threadLocalAccessor.setValue(); assertThat(this.threadLocalAccessor.getValue()).isNull(); } }
ReactiveSecurityContextHolderThreadLocalAccessorTests
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/TestParametersNotInitializedTest.java
{ "start": 2130, "end": 2537 }
class ____ { @TestParameter public boolean foo; } """) .doTest(); } @Test public void onlyFlagsJunit4Runner() { refactoringHelper .addInputLines( "MyRunner.java", """ import org.junit.runners.BlockJUnit4ClassRunner; import org.junit.runners.model.InitializationError; public final
Test
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/source/expressions/java/SourceList.java
{ "start": 271, "end": 469 }
class ____ { private List<String> list; public List<String> getList() { return list; } public void setList( List<String> list ) { this.list = list; } }
SourceList
java
dropwizard__dropwizard
dropwizard-jetty/src/main/java/io/dropwizard/jetty/HttpConnectorFactory.java
{ "start": 21555, "end": 21791 }
class ____ extends StringMethodSerializer<CookieCompliance> { public CookieComplianceSerializer() { super(CookieCompliance.class, CookieCompliance::getName); } } private static
CookieComplianceSerializer
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumn.java
{ "start": 1587, "end": 3884 }
enum ____ implements Column<FlowRunTable> { /** * When the flow was started. This is the minimum of currently known * application start times. */ MIN_START_TIME(FlowRunColumnFamily.INFO, "min_start_time", AggregationOperation.GLOBAL_MIN, new LongConverter()), /** * When the flow ended. This is the maximum of currently known application end * times. */ MAX_END_TIME(FlowRunColumnFamily.INFO, "max_end_time", AggregationOperation.GLOBAL_MAX, new LongConverter()), /** * The version of the flow that this flow belongs to. */ FLOW_VERSION(FlowRunColumnFamily.INFO, "flow_version", null); private final ColumnFamily<FlowRunTable> columnFamily; private final String columnQualifier; private final byte[] columnQualifierBytes; private final AggregationOperation aggOp; private final ValueConverter valueConverter; private FlowRunColumn(ColumnFamily<FlowRunTable> columnFamily, String columnQualifier, AggregationOperation aggOp) { this(columnFamily, columnQualifier, aggOp, GenericConverter.getInstance()); } private FlowRunColumn(ColumnFamily<FlowRunTable> columnFamily, String columnQualifier, AggregationOperation aggOp, ValueConverter converter) { this.columnFamily = columnFamily; this.columnQualifier = columnQualifier; this.aggOp = aggOp; // Future-proof by ensuring the right column prefix hygiene. this.columnQualifierBytes = Bytes.toBytes(Separator.SPACE .encode(columnQualifier)); this.valueConverter = converter; } /** * @return the column name value */ private String getColumnQualifier() { return columnQualifier; } @Override public byte[] getColumnQualifierBytes() { return columnQualifierBytes.clone(); } @Override public byte[] getColumnFamilyBytes() { return columnFamily.getBytes(); } public AggregationOperation getAggregationOperation() { return aggOp; } @Override public ValueConverter getValueConverter() { return valueConverter; } @Override public Attribute[] getCombinedAttrsWithAggr(Attribute... attributes) { return HBaseTimelineSchemaUtils.combineAttributes(attributes, aggOp); } @Override public boolean supplementCellTimestamp() { return true; } }
FlowRunColumn
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/jsontype/ext/ExternalTypeIdTest.java
{ "start": 7256, "end": 7550 }
class ____ { Object _payload; public Envelope928(@JsonProperty("payload") @JsonTypeInfo(use=JsonTypeInfo.Id.CLASS, include=JsonTypeInfo.As.EXTERNAL_PROPERTY, property="class") Object payload) { _payload = payload; } } static
Envelope928
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/component/bean/BeanWithHeaderAnnotation.java
{ "start": 915, "end": 1046 }
class ____ { public String hello(String body, @Header("foo") Document doc) { return body; } }
BeanWithHeaderAnnotation
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/rest/messages/json/SerializedValueSerializerTest.java
{ "start": 2931, "end": 3106 }
class ____ implements Serializable { private static final long serialVersionUID = 1L; private String foo = "baz"; private int bar = 1; } }
TestClass
java
apache__maven
impl/maven-core/src/main/java/org/apache/maven/plugin/ExtensionRealmCache.java
{ "start": 1520, "end": 2942 }
class ____ { private final ClassRealm realm; private final ExtensionDescriptor descriptor; private final List<Artifact> artifacts; CacheRecord(ClassRealm realm, ExtensionDescriptor descriptor, List<Artifact> artifacts) { this.realm = realm; this.descriptor = descriptor; this.artifacts = artifacts; } public ClassRealm getRealm() { return realm; } public ExtensionDescriptor getDescriptor() { return descriptor; } public List<Artifact> getArtifacts() { return artifacts; } } Key createKey(List<Artifact> extensionArtifacts); CacheRecord get(Key key); CacheRecord put( Key key, ClassRealm extensionRealm, ExtensionDescriptor extensionDescriptor, List<Artifact> artifacts); void flush(); /** * Registers the specified cache record for usage with the given project. Integrators can use the information * collected from this method in combination with a custom cache implementation to dispose unused records from the * cache. * * @param project The project that employs the plugin realm, must not be {@code null}. * @param record The cache record being used for the project, must not be {@code null}. */ void register(MavenProject project, Key key, CacheRecord record); }
CacheRecord
java
apache__kafka
tools/src/main/java/org/apache/kafka/tools/consumer/LoggingMessageFormatter.java
{ "start": 1152, "end": 2207 }
class ____ implements MessageFormatter { private static final Logger LOG = LoggerFactory.getLogger(LoggingMessageFormatter.class); private final DefaultMessageFormatter defaultWriter = new DefaultMessageFormatter(); @Override public void configure(Map<String, ?> configs) { defaultWriter.configure(configs); } @Override public void writeTo(ConsumerRecord<byte[], byte[]> consumerRecord, PrintStream output) { defaultWriter.writeTo(consumerRecord, output); String timestamp = consumerRecord.timestampType() != TimestampType.NO_TIMESTAMP_TYPE ? consumerRecord.timestampType() + ":" + consumerRecord.timestamp() + ", " : ""; String key = "key:" + (consumerRecord.key() == null ? "null " : new String(consumerRecord.key(), StandardCharsets.UTF_8) + ", "); String value = "value:" + (consumerRecord.value() == null ? "null" : new String(consumerRecord.value(), StandardCharsets.UTF_8)); LOG.info(timestamp + key + value); } }
LoggingMessageFormatter
java
spring-projects__spring-framework
spring-test/src/main/java/org/springframework/test/annotation/DirtiesContext.java
{ "start": 5758, "end": 6234 }
enum ____ { /** * The associated {@code ApplicationContext} will be marked as * <em>dirty</em> before the corresponding test method. */ BEFORE_METHOD, /** * The associated {@code ApplicationContext} will be marked as * <em>dirty</em> after the corresponding test method. */ AFTER_METHOD } /** * Defines <i>modes</i> which determine how {@code @DirtiesContext} is * interpreted when used to annotate a test class. * @since 3.0 */
MethodMode
java
elastic__elasticsearch
test/framework/src/main/java/org/elasticsearch/cluster/coordination/CoordinationStateTestCluster.java
{ "start": 1808, "end": 4206 }
class ____ { public static ClusterState clusterState( long term, long version, DiscoveryNode localNode, CoordinationMetadata.VotingConfiguration lastCommittedConfig, CoordinationMetadata.VotingConfiguration lastAcceptedConfig, long value ) { return clusterState( term, version, DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).build(), lastCommittedConfig, lastAcceptedConfig, value ); } public static ClusterState clusterState( long term, long version, DiscoveryNodes discoveryNodes, CoordinationMetadata.VotingConfiguration lastCommittedConfig, CoordinationMetadata.VotingConfiguration lastAcceptedConfig, long value ) { return setValue( ClusterState.builder(ClusterName.DEFAULT) .version(version) .nodes(discoveryNodes) .metadata( Metadata.builder() .clusterUUID(UUIDs.randomBase64UUID(random())) // generate cluster UUID deterministically for repeatable tests .coordinationMetadata( CoordinationMetadata.builder() .term(term) .lastCommittedConfiguration(lastCommittedConfig) .lastAcceptedConfiguration(lastAcceptedConfig) .build() ) ) .stateUUID(UUIDs.randomBase64UUID(random())) // generate cluster state UUID deterministically for repeatable tests .build(), value ); } public static ClusterState setValue(ClusterState clusterState, long value) { return ClusterState.builder(clusterState) .metadata( Metadata.builder(clusterState.metadata()) .persistentSettings(Settings.builder().put(clusterState.metadata().persistentSettings()).put("value", value).build()) .build() ) .build(); } public static long value(ClusterState clusterState) { return clusterState.metadata().persistentSettings().getAsLong("value", 0L); } static
CoordinationStateTestCluster
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/SampleIntAggregator.java
{ "start": 1886, "end": 5161 }
class ____ { private static final DefaultUnsortableTopNEncoder ENCODER = new DefaultUnsortableTopNEncoder(); public static SingleState initSingle(BigArrays bigArrays, int limit) { return new SingleState(bigArrays, limit); } public static void combine(SingleState state, int value) { state.add(value); } public static void combineIntermediate(SingleState state, BytesRefBlock values) { int start = values.getFirstValueIndex(0); int end = start + values.getValueCount(0); BytesRef scratch = new BytesRef(); for (int i = start; i < end; i++) { state.internalState.sort.collect(values.getBytesRef(i, scratch), 0); } } public static Block evaluateFinal(SingleState state, DriverContext driverContext) { return stripWeights(driverContext, state.toBlock(driverContext.blockFactory())); } public static GroupingState initGrouping(BigArrays bigArrays, int limit) { return new GroupingState(bigArrays, limit); } public static void combine(GroupingState state, int groupId, int value) { state.add(groupId, value); } public static void combineIntermediate(GroupingState state, int groupId, BytesRefBlock values, int valuesPosition) { int start = values.getFirstValueIndex(valuesPosition); int end = start + values.getValueCount(valuesPosition); BytesRef scratch = new BytesRef(); for (int i = start; i < end; i++) { state.sort.collect(values.getBytesRef(i, scratch), groupId); } } public static Block evaluateFinal(GroupingState state, IntVector selected, GroupingAggregatorEvaluationContext ctx) { return stripWeights(ctx.driverContext(), state.toBlock(ctx.blockFactory(), selected)); } private static Block stripWeights(DriverContext driverContext, Block block) { if (block.areAllValuesNull()) { return block; } try ( BytesRefBlock bytesRefBlock = (BytesRefBlock) block; IntBlock.Builder intBlock = driverContext.blockFactory().newIntBlockBuilder(bytesRefBlock.getPositionCount()) ) { BytesRef scratch = new BytesRef(); for (int position = 0; position < block.getPositionCount(); position++) { if (bytesRefBlock.isNull(position)) { intBlock.appendNull(); } else { int valueCount = bytesRefBlock.getValueCount(position); if (valueCount > 1) { intBlock.beginPositionEntry(); } int start = bytesRefBlock.getFirstValueIndex(position); int end = start + valueCount; for (int i = start; i < end; i++) { BytesRef value = bytesRefBlock.getBytesRef(i, scratch).clone(); ENCODER.decodeLong(value); intBlock.appendInt(ENCODER.decodeInt(value)); } if (valueCount > 1) { intBlock.endPositionEntry(); } } } return intBlock.build(); } } public static
SampleIntAggregator
java
elastic__elasticsearch
x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/StringPattern.java
{ "start": 324, "end": 799 }
interface ____ { /** * Returns the pattern in (Java) regex format. */ String asJavaRegex(); /** * Hint method on whether this pattern matches everything or not. */ default boolean matchesAll() { return false; } /** * Returns the match if this pattern is exact, that is has no wildcard * or other patterns inside. * If the pattern is not exact, null is returned. */ String exactMatch(); }
StringPattern
java
hibernate__hibernate-orm
tooling/hibernate-ant/src/main/java/org/hibernate/tool/hbm2ddl/SchemaExport.java
{ "start": 2906, "end": 14174 }
enum ____ { /** * None - duh :P */ NONE, /** * Create only */ CREATE, /** * Drop only */ DROP, /** * Drop and then create */ BOTH; public boolean doCreate() { return this == BOTH || this == CREATE; } public boolean doDrop() { return this == BOTH || this == DROP; } private static Action interpret(boolean justDrop, boolean justCreate) { if ( justDrop ) { return Action.DROP; } else if ( justCreate ) { return Action.CREATE; } else { return Action.BOTH; } } public static Action parseCommandLineOption(String actionText) { if ( actionText.equalsIgnoreCase( "create" ) ) { return CREATE; } else if ( actionText.equalsIgnoreCase( "drop" ) ) { return DROP; } else if ( actionText.equalsIgnoreCase( "drop-and-create" ) ) { return BOTH; } else { return NONE; } } } boolean append = true; boolean haltOnError = false; boolean format = false; boolean manageNamespaces = false; String delimiter = null; String outputFile = null; private String importFiles; private final List<Exception> exceptions = new ArrayList<>(); /** * For generating a export script file, this is the file which will be written. * * @param filename The name of the file to which to write the export script. * * @return this */ public SchemaExport setOutputFile(String filename) { outputFile = filename; return this; } /** * For generating a export script file, by default the content will be appended at the begin or end of the file. * * The sql will be written at the beginning of the file rather append to the end. * * @return this */ public SchemaExport setOverrideOutputFileContent() { append = false; return this; } /** * Comma-separated list of resource names to use for database init commands on create. * * @param importFiles The comma-separated list of init file resources names * * @return this */ public SchemaExport setImportFiles(String importFiles) { this.importFiles = importFiles; return this; } /** * Set the end of statement delimiter * * @param delimiter The delimiter * * @return this */ public SchemaExport setDelimiter(String delimiter) { this.delimiter = delimiter; return this; } /** * Should we format the sql strings? * * @param format Should we format SQL strings * * @return this */ public SchemaExport setFormat(boolean format) { this.format = format; return this; } /** * Should we stop once an error occurs? * * @param haltOnError True if export should stop after error. * * @return this */ public SchemaExport setHaltOnError(boolean haltOnError) { this.haltOnError = haltOnError; return this; } public SchemaExport setManageNamespaces(boolean manageNamespaces) { this.manageNamespaces = manageNamespaces; return this; } public void drop(EnumSet<TargetType> targetTypes, Metadata metadata) { execute( targetTypes, Action.DROP, metadata ); } public void create(EnumSet<TargetType> targetTypes, Metadata metadata) { execute( targetTypes, Action.BOTH, metadata ); } public void createOnly(EnumSet<TargetType> targetTypes, Metadata metadata) { execute( targetTypes, Action.CREATE, metadata ); } public void execute(EnumSet<TargetType> targetTypes, Action action, Metadata metadata) { execute( targetTypes, action, metadata, ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry() ); } public void execute(EnumSet<TargetType> targetTypes, Action action, Metadata metadata, ServiceRegistry serviceRegistry) { if ( action == Action.NONE ) { CORE_LOGGER.debug( "Skipping SchemaExport as Action.NONE was passed" ); return; } if ( targetTypes.isEmpty() ) { CORE_LOGGER.debug( "Skipping SchemaExport as no targets were specified" ); return; } exceptions.clear(); CORE_LOGGER.runningHbm2ddlSchemaExport(); final TargetDescriptor targetDescriptor = buildTargetDescriptor( targetTypes, outputFile, append, serviceRegistry ); doExecution( action, needsJdbcConnection( targetTypes ), metadata, serviceRegistry, targetDescriptor ); } public void doExecution( Action action, boolean needsJdbc, Metadata metadata, ServiceRegistry serviceRegistry, TargetDescriptor targetDescriptor) { Map<String,Object> config = new HashMap<>( serviceRegistry.requireService( ConfigurationService.class ).getSettings() ); config.put( AvailableSettings.HBM2DDL_DELIMITER, delimiter ); config.put( AvailableSettings.FORMAT_SQL, format ); config.put( AvailableSettings.HBM2DDL_IMPORT_FILES, importFiles ); final SchemaManagementTool tool = serviceRegistry.requireService( SchemaManagementTool.class ); final ExceptionHandler exceptionHandler = haltOnError ? ExceptionHandlerHaltImpl.INSTANCE : new ExceptionHandlerCollectingImpl(); final ExecutionOptions executionOptions = SchemaManagementToolCoordinator.buildExecutionOptions( config, exceptionHandler ); final SourceDescriptor sourceDescriptor = new SourceDescriptor() { @Override public SourceType getSourceType() { return SourceType.METADATA; } @Override public ScriptSourceInput getScriptSourceInput() { return null; } }; try { if ( action.doDrop() ) { tool.getSchemaDropper( config ).doDrop( metadata, executionOptions, ContributableMatcher.ALL, sourceDescriptor, targetDescriptor ); } if ( action.doCreate() ) { tool.getSchemaCreator( config ).doCreation( metadata, executionOptions, ContributableMatcher.ALL, sourceDescriptor, targetDescriptor ); } } finally { if ( exceptionHandler instanceof ExceptionHandlerCollectingImpl handler ) { exceptions.addAll( handler.getExceptions() ); } } } private boolean needsJdbcConnection(EnumSet<TargetType> targetTypes) { return targetTypes.contains( TargetType.DATABASE ); } public static TargetDescriptor buildTargetDescriptor( EnumSet<TargetType> targetTypes, String outputFile, ServiceRegistry serviceRegistry) { return buildTargetDescriptor( targetTypes, outputFile, true, serviceRegistry ); } public static TargetDescriptor buildTargetDescriptor( EnumSet<TargetType> targetTypes, String outputFile, boolean append, ServiceRegistry serviceRegistry) { final ScriptTargetOutput scriptTarget; if ( targetTypes.contains( TargetType.SCRIPT ) ) { if ( outputFile == null ) { throw new SchemaManagementException( "Writing to script was requested, but no script file was specified" ); } scriptTarget = Helper.interpretScriptTargetSetting( outputFile, serviceRegistry.getService( ClassLoaderService.class ), (String) serviceRegistry.requireService( ConfigurationService.class ) .getSettings().get( AvailableSettings.HBM2DDL_CHARSET_NAME ), append ); } else { scriptTarget = null; } return new TargetDescriptorImpl( targetTypes, scriptTarget ); } /** * For testing use */ public void perform(Action action, Metadata metadata, ScriptTargetOutput target) { doExecution( action, false, metadata, ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry(), new TargetDescriptorImpl( EnumSet.of( TargetType.SCRIPT ), target ) ); } public static void main(String[] args) { try { final CommandLineArgs commandLineArgs = CommandLineArgs.parseCommandLineArgs( args ); execute( commandLineArgs ); } catch (Exception e) { CORE_LOGGER.unableToCreateSchema( e ); } } public static void execute(CommandLineArgs commandLineArgs) throws Exception { StandardServiceRegistry serviceRegistry = buildStandardServiceRegistry( commandLineArgs ); try { final MetadataImplementor metadata = buildMetadata( commandLineArgs, serviceRegistry ); metadata.orderColumns( false ); metadata.validate(); new SchemaExport() .setHaltOnError( commandLineArgs.halt ) .setOutputFile( commandLineArgs.outputFile ) .setDelimiter( commandLineArgs.delimiter ) .setFormat( commandLineArgs.format ) .setManageNamespaces( commandLineArgs.manageNamespaces ) .setImportFiles( commandLineArgs.importFile ) .execute( commandLineArgs.targetTypes, commandLineArgs.action, metadata, serviceRegistry ); } finally { StandardServiceRegistryBuilder.destroy( serviceRegistry ); } } private static StandardServiceRegistry buildStandardServiceRegistry(CommandLineArgs commandLineArgs) throws Exception { final BootstrapServiceRegistry bsr = new BootstrapServiceRegistryBuilder().build(); final StandardServiceRegistryBuilder ssrBuilder = new StandardServiceRegistryBuilder( bsr ); if ( commandLineArgs.cfgXmlFile != null ) { ssrBuilder.configure( commandLineArgs.cfgXmlFile ); } Properties properties = new Properties(); if ( commandLineArgs.propertiesFile != null ) { try ( final FileInputStream fis = new FileInputStream( commandLineArgs.propertiesFile ) ) { properties.load( fis ); } } ssrBuilder.applySettings( properties ); return ssrBuilder.build(); } private static MetadataImplementor buildMetadata( CommandLineArgs parsedArgs, StandardServiceRegistry serviceRegistry) { final MetadataSources metadataSources = new MetadataSources( serviceRegistry ); for ( String filename : parsedArgs.hbmXmlFiles ) { metadataSources.addFile( filename ); } for ( String filename : parsedArgs.jarFiles ) { metadataSources.addJar( new File( filename ) ); } final MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder(); final StrategySelector strategySelector = serviceRegistry.requireService( StrategySelector.class ); if ( parsedArgs.implicitNamingStrategyImplName != null ) { metadataBuilder.applyImplicitNamingStrategy( strategySelector.resolveStrategy( ImplicitNamingStrategy.class, parsedArgs.implicitNamingStrategyImplName ) ); } if ( parsedArgs.physicalNamingStrategyImplName != null ) { metadataBuilder.applyPhysicalNamingStrategy( strategySelector.resolveStrategy( PhysicalNamingStrategy.class, parsedArgs.physicalNamingStrategyImplName ) ); } return (MetadataImplementor) metadataBuilder.build(); } /** * Intended for test usage only. Builds a Metadata using the same algorithm as * {@link #main} * * @param args The "command line args" * * @return The built Metadata * * @throws Exception Problems building the Metadata */ public static MetadataImplementor buildMetadataFromMainArgs(String[] args) throws Exception { final CommandLineArgs commandLineArgs = CommandLineArgs.parseCommandLineArgs( args ); StandardServiceRegistry serviceRegistry = buildStandardServiceRegistry( commandLineArgs ); try { return buildMetadata( commandLineArgs, serviceRegistry ); } finally { StandardServiceRegistryBuilder.destroy( serviceRegistry ); } } /** * Returns a List of all Exceptions which occurred during the export. * * @return A List containing the Exceptions occurred during the export */ public List getExceptions() { return exceptions; } private static
Action
java
apache__rocketmq
store/src/main/java/org/apache/rocketmq/store/ha/HAConnectionStateNotificationService.java
{ "start": 1290, "end": 6134 }
class ____ extends ServiceThread { private static final Logger LOGGER = LoggerFactory.getLogger(LoggerName.STORE_LOGGER_NAME); private static final long CONNECTION_ESTABLISH_TIMEOUT = 10 * 1000; private volatile HAConnectionStateNotificationRequest request; private volatile long lastCheckTimeStamp = -1; private HAService haService; private DefaultMessageStore defaultMessageStore; public HAConnectionStateNotificationService(HAService haService, DefaultMessageStore defaultMessageStore) { this.haService = haService; this.defaultMessageStore = defaultMessageStore; } @Override public String getServiceName() { if (defaultMessageStore != null && defaultMessageStore.getBrokerConfig().isInBrokerContainer()) { return defaultMessageStore.getBrokerIdentity().getIdentifier() + HAConnectionStateNotificationService.class.getSimpleName(); } return HAConnectionStateNotificationService.class.getSimpleName(); } public synchronized void setRequest(HAConnectionStateNotificationRequest request) { if (this.request != null) { this.request.getRequestFuture().cancel(true); } this.request = request; lastCheckTimeStamp = System.currentTimeMillis(); } private synchronized void doWaitConnectionState() { if (this.request == null || this.request.getRequestFuture().isDone()) { return; } if (this.defaultMessageStore.getMessageStoreConfig().getBrokerRole() == BrokerRole.SLAVE) { if (haService.getHAClient().getCurrentState() == this.request.getExpectState()) { this.request.getRequestFuture().complete(true); this.request = null; } else if (haService.getHAClient().getCurrentState() == HAConnectionState.READY) { if ((System.currentTimeMillis() - lastCheckTimeStamp) > CONNECTION_ESTABLISH_TIMEOUT) { LOGGER.error("Wait HA connection establish with {} timeout", this.request.getRemoteAddr()); this.request.getRequestFuture().complete(false); this.request = null; } } else { lastCheckTimeStamp = System.currentTimeMillis(); } } else { boolean connectionFound = false; for (HAConnection connection : haService.getConnectionList()) { if (checkConnectionStateAndNotify(connection)) { connectionFound = true; } } if (connectionFound) { lastCheckTimeStamp = System.currentTimeMillis(); } if (!connectionFound && (System.currentTimeMillis() - lastCheckTimeStamp) > CONNECTION_ESTABLISH_TIMEOUT) { LOGGER.error("Wait HA connection establish with {} timeout", this.request.getRemoteAddr()); this.request.getRequestFuture().complete(false); this.request = null; } } } /** * Check if connection matched and notify request. * * @param connection connection to check. * @return if connection remote address match request. */ public synchronized boolean checkConnectionStateAndNotify(HAConnection connection) { if (this.request == null || connection == null) { return false; } String remoteAddress; try { remoteAddress = ((InetSocketAddress) connection.getSocketChannel().getRemoteAddress()) .getAddress().getHostAddress(); if (remoteAddress.equals(request.getRemoteAddr())) { HAConnectionState connState = connection.getCurrentState(); if (connState == this.request.getExpectState()) { this.request.getRequestFuture().complete(true); this.request = null; } else if (this.request.isNotifyWhenShutdown() && connState == HAConnectionState.SHUTDOWN) { this.request.getRequestFuture().complete(false); this.request = null; } return true; } } catch (Exception e) { LOGGER.error("Check connection address exception: {}", e); } return false; } @Override public void run() { LOGGER.info(this.getServiceName() + " service started"); while (!this.isStopped()) { try { this.waitForRunning(1000); this.doWaitConnectionState(); } catch (Exception e) { LOGGER.warn(this.getServiceName() + " service has exception. ", e); } } LOGGER.info(this.getServiceName() + " service end"); } }
HAConnectionStateNotificationService
java
apache__camel
components/camel-http-common/src/main/java/org/apache/camel/http/common/HttpHelper.java
{ "start": 3807, "end": 4605 }
class ____ found * @throws IOException can be thrown * @deprecated Camel 3.0 Please use the one which has the parameter of camel context */ @Deprecated public static Object deserializeJavaObjectFromStream(InputStream is) throws ClassNotFoundException, IOException { return deserializeJavaObjectFromStream(is, null); } /** * Deserializes the input stream to a Java object * * @param is input stream for the Java object * @param context the camel context which could help us to apply the customer classloader * @return the java object, or <tt>null</tt> if input stream was <tt>null</tt> * @throws ClassNotFoundException is thrown if
not
java
apache__camel
test-infra/camel-test-infra-ftp/src/test/java/org/apache/camel/test/infra/ftp/services/FtpService.java
{ "start": 1154, "end": 1278 }
interface ____ extends ContainerTestService, FtpInfraService, TestService, BeforeEachCallback, AfterEachCallback { }
FtpService
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/query/sqm/tree/predicate/SqmNegatablePredicate.java
{ "start": 307, "end": 957 }
interface ____ extends SqmPredicate { /** * Is this predicate (currently) negated? * * @return {@code true} if we have a negated form currently */ boolean isNegated(); /** * Apply an external negation. Called when we encounter a {@code NOT} * grouping. * <p> * For example, for {@code not(x is null)} we build the * {@link SqmNullnessPredicate} and then call its negate method which results * in {@code x is not null}. * <p> * Can be applied nested as well. For example, {@code not(not(x is null))} * becomes {@code x is null} because the double-negative cancel each other out. */ void negate(); }
SqmNegatablePredicate
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/SerializeUsingJDKTest.java
{ "start": 873, "end": 951 }
class ____ { @JsonPropertyOrder({ "x", "y" }) static
SerializeUsingJDKTest
java
spring-projects__spring-framework
spring-test/src/main/java/org/springframework/test/context/TestContextBootstrapper.java
{ "start": 1195, "end": 2087 }
class ____) via {@link BootstrapWith @BootstrapWith}, either * directly or as a meta-annotation. * * <p>If a bootstrapper is not explicitly configured via {@code @BootstrapWith}, * either the {@link org.springframework.test.context.support.DefaultTestContextBootstrapper * DefaultTestContextBootstrapper} or the * {@link org.springframework.test.context.web.WebTestContextBootstrapper * WebTestContextBootstrapper} will be used, depending on the presence of * {@link org.springframework.test.context.web.WebAppConfiguration @WebAppConfiguration}. * * <h3>Implementation Notes</h3> * * <p>Concrete implementations must provide a {@code public} no-args constructor. * * <p><strong>WARNING</strong>: this SPI will likely change in the future in * order to accommodate new requirements. Implementers are therefore strongly encouraged * <strong>not</strong> to implement this
hierarchy
java
mybatis__mybatis-3
src/main/java/org/apache/ibatis/plugin/Plugin.java
{ "start": 983, "end": 3614 }
class ____ implements InvocationHandler { private final Object target; private final Interceptor interceptor; private final Map<Class<?>, Set<Method>> signatureMap; private Plugin(Object target, Interceptor interceptor, Map<Class<?>, Set<Method>> signatureMap) { this.target = target; this.interceptor = interceptor; this.signatureMap = signatureMap; } public static Object wrap(Object target, Interceptor interceptor) { Map<Class<?>, Set<Method>> signatureMap = getSignatureMap(interceptor); Class<?> type = target.getClass(); Class<?>[] interfaces = getAllInterfaces(type, signatureMap); if (interfaces.length > 0) { return Proxy.newProxyInstance(type.getClassLoader(), interfaces, new Plugin(target, interceptor, signatureMap)); } return target; } @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { try { Set<Method> methods = signatureMap.get(method.getDeclaringClass()); if (methods != null && methods.contains(method)) { return interceptor.intercept(new Invocation(target, method, args)); } return method.invoke(target, args); } catch (Exception e) { throw ExceptionUtil.unwrapThrowable(e); } } private static Map<Class<?>, Set<Method>> getSignatureMap(Interceptor interceptor) { Intercepts interceptsAnnotation = interceptor.getClass().getAnnotation(Intercepts.class); // issue #251 if (interceptsAnnotation == null) { throw new PluginException( "No @Intercepts annotation was found in interceptor " + interceptor.getClass().getName()); } Signature[] sigs = interceptsAnnotation.value(); Map<Class<?>, Set<Method>> signatureMap = new HashMap<>(); for (Signature sig : sigs) { Set<Method> methods = signatureMap.computeIfAbsent(sig.type(), k -> new HashSet<>()); try { Method method = sig.type().getMethod(sig.method(), sig.args()); methods.add(method); } catch (NoSuchMethodException e) { throw new PluginException("Could not find method on " + sig.type() + " named " + sig.method() + ". Cause: " + e, e); } } return signatureMap; } private static Class<?>[] getAllInterfaces(Class<?> type, Map<Class<?>, Set<Method>> signatureMap) { Set<Class<?>> interfaces = new HashSet<>(); while (type != null) { for (Class<?> c : type.getInterfaces()) { if (signatureMap.containsKey(c)) { interfaces.add(c); } } type = type.getSuperclass(); } return interfaces.toArray(new Class<?>[0]); } }
Plugin
java
redisson__redisson
redisson/src/main/java/org/redisson/api/RAtomicDoubleReactive.java
{ "start": 689, "end": 768 }
interface ____ AtomicDouble object * * @author Nikita Koksharov * */ public
for
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/channel/ChannelStateWriteRequest.java
{ "start": 11434, "end": 11548 }
enum ____ { NEW, EXECUTING, COMPLETED, FAILED, CANCELLED } final
CheckpointInProgressRequestState
java
ReactiveX__RxJava
src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableMergeTest.java
{ "start": 13932, "end": 14938 }
class ____ implements Publisher<String> { Thread t; final CountDownLatch onNextBeingSent = new CountDownLatch(1); @Override public void subscribe(final Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); t = new Thread(new Runnable() { @Override public void run() { onNextBeingSent.countDown(); try { subscriber.onNext("hello"); // I can't use a countDownLatch to prove we are actually sending 'onNext' // since it will block if synchronized and I'll deadlock subscriber.onComplete(); } catch (Exception e) { subscriber.onError(e); } } }, "TestASynchronousFlowable"); t.start(); } } private static
TestASynchronousFlowable
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/Diff.java
{ "start": 3274, "end": 3399 }
interface ____<K> extends Comparable<K> { /** @return the key of this object. */ public K getKey(); } /** An
Element
java
apache__commons-lang
src/main/java/org/apache/commons/lang3/ClassUtils.java
{ "start": 71748, "end": 72296 }
class ____ not a wrapper class. * {@code null} if null input. Empty array if an empty array passed in. * @see #wrapperToPrimitive(Class) * @since 2.4 */ public static Class<?>[] wrappersToPrimitives(final Class<?>... classes) { if (classes == null) { return null; } if (classes.length == 0) { return classes; } return ArrayUtils.setAll(new Class[classes.length], i -> wrapperToPrimitive(classes[i])); } /** * Converts the specified wrapper
is
java
hibernate__hibernate-orm
tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/collectionbasictype/ConcreteLike.java
{ "start": 227, "end": 381 }
class ____ extends Like<ConcreteLike.Target> { @Override public Reference<Target> getObject() { return new Reference<>(); } public static
ConcreteLike
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/ChecksAnnotation.java
{ "start": 716, "end": 1710 }
class ____ implements Checks, RepeatableContainer<Check> { private org.hibernate.annotations.Check[] value; /** * Used in creating dynamic annotation instances (e.g. from XML) */ public ChecksAnnotation(ModelsContext modelContext) { } /** * Used in creating annotation instances from JDK variant */ public ChecksAnnotation(Checks annotation, ModelsContext modelContext) { this.value = extractJdkValue( annotation, HibernateAnnotations.CHECKS, "value", modelContext ); } /** * Used in creating annotation instances from Jandex variant */ public ChecksAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) { this.value = (Check[]) attributeValues.get( "value" ); } @Override public Class<? extends Annotation> annotationType() { return Checks.class; } @Override public org.hibernate.annotations.Check[] value() { return value; } public void value(org.hibernate.annotations.Check[] value) { this.value = value; } }
ChecksAnnotation
java
apache__flink
flink-table/flink-table-code-splitter/src/test/resources/block/expected/TestIfMultipleSingleLineStatementRewrite.java
{ "start": 7, "end": 1121 }
class ____ { public void myFun1(int[] a, int[] b) throws RuntimeException { if (a[0] == 0) { myFun1_0_0_rewriteGroup2(a, b); myFun1_0_0(a, b); } else { myFun1_0_4(a, b); } } void myFun1_0_0_1_2(int[] a, int[] b) throws RuntimeException { a[21] = 1; a[22] = 1; } void myFun1_0_4(int[] a, int[] b) throws RuntimeException { a[0] = b[0]; a[1] = b[1]; a[2] = b[2]; } void myFun1_0_0_1_3(int[] a, int[] b) throws RuntimeException { a[23] = b[2]; a[24] = b[2]; } void myFun1_0_0_1(int[] a, int[] b) throws RuntimeException { a[11] = b[0]; a[12] = b[0]; } void myFun1_0_0(int[] a, int[] b) throws RuntimeException { a[13] = b[0]; a[14] = b[0]; } void myFun1_0_0_rewriteGroup2(int[] a, int[] b) throws RuntimeException { myFun1_0_0_1(a, b); if (a[2] == 0) { myFun1_0_0_1_2(a, b); } else { myFun1_0_0_1_3(a, b); } } }
TestIfMultipleSingleLineStatementRewrite
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/EnumFieldTest.java
{ "start": 2460, "end": 2556 }
class ____ { public Type value; public Type value1; } public static
Model
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/boot/models/xml/internal/ResourceStreamLocatorImpl.java
{ "start": 437, "end": 939 }
class ____ implements ResourceStreamLocator { private final ClassLoading classLoadingAccess; public ResourceStreamLocatorImpl(ClassLoading classLoadingAccess) { this.classLoadingAccess = classLoadingAccess; } @Override public InputStream locateResourceStream(String resourceName) { final URL resource = classLoadingAccess.locateResource( resourceName ); try { return resource.openStream(); } catch (IOException e) { throw new RuntimeException( e ); } } }
ResourceStreamLocatorImpl
java
apache__camel
components/camel-avro-rpc/camel-avro-rpc-component/src/main/java/org/apache/camel/component/avro/AvroNettyProducer.java
{ "start": 1014, "end": 1429 }
class ____ extends AvroProducer { public AvroNettyProducer(Endpoint endpoint) { super(endpoint); } @Override public Transceiver createTransceiver() throws Exception { AvroConfiguration configuration = getEndpoint().getConfiguration(); return transceiver = new NettyTransceiver(new InetSocketAddress(configuration.getHost(), configuration.getPort())); } }
AvroNettyProducer
java
quarkusio__quarkus
core/deployment/src/main/java/io/quarkus/deployment/builditem/ConfigMappingBuildItem.java
{ "start": 1576, "end": 2361 }
class ____ annotated with {@link StaticInitSafe}, false otherwise */ public boolean isStaticInitSafe() { return configClass.isAnnotationPresent(StaticInitSafe.class); } public ConfigClass toConfigClass() { return ConfigClass.configClass(configClass, prefix); } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final ConfigMappingBuildItem that = (ConfigMappingBuildItem) o; return configClass.equals(that.configClass) && prefix.equals(that.prefix); } @Override public int hashCode() { return Objects.hash(configClass, prefix); } }
is
java
micronaut-projects__micronaut-core
management/src/main/java/io/micronaut/management/health/indicator/threads/DeadlockedThreadsHealthIndicator.java
{ "start": 1929, "end": 6232 }
class ____ extends AbstractHealthIndicator { private static final String NAME = "deadlockedThreads"; private static final String KEY_THREAD_ID = "threadId"; private static final String KEY_THREAD_NAME = "threadName"; private static final String KEY_THREAD_STATE = "threadState"; private static final String KEY_DAEMON = "daemon"; private static final String KEY_PRIORITY = "priority"; private static final String KEY_SUSPENDED = "suspended"; private static final String KEY_IN_NATIVE = "inNative"; private static final String KEY_LOCK_NAME = "lockName"; private static final String KEY_LOCK_OWNER_NAME = "lockOwnerName"; private static final String KEY_LOCK_OWNER_ID = "lockOwnerId"; private static final String KEY_LOCKED_SYNCHRONIZERS = "lockedSynchronizers"; private static final String KEY_STACK_TRACE = "stackTrace"; @Override protected Object getHealthInformation() { ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean(); long[] deadlockedThreads = null; if (threadMXBean.isSynchronizerUsageSupported()) { deadlockedThreads = threadMXBean.findDeadlockedThreads(); } else if (threadMXBean.isObjectMonitorUsageSupported()) { deadlockedThreads = threadMXBean.findMonitorDeadlockedThreads(); } if (deadlockedThreads == null) { this.healthStatus = HealthStatus.UP; return null; } this.healthStatus = HealthStatus.DOWN; return Arrays.stream(threadMXBean.getThreadInfo(deadlockedThreads, true, true, Integer.MAX_VALUE)) .map(DeadlockedThreadsHealthIndicator::getDetails) .toList(); } @Override public String getName() { return NAME; } private static Map<String, Object> getDetails(ThreadInfo threadInfo) { Map<String, Object> details = new LinkedHashMap<>(); details.put(KEY_THREAD_ID, String.valueOf(threadInfo.getThreadId())); details.put(KEY_THREAD_NAME, threadInfo.getThreadName()); details.put(KEY_THREAD_STATE, threadInfo.getThreadState().name()); details.put(KEY_DAEMON, String.valueOf(threadInfo.isDaemon())); details.put(KEY_PRIORITY, String.valueOf(threadInfo.getPriority())); details.put(KEY_SUSPENDED, String.valueOf(threadInfo.isSuspended())); details.put(KEY_IN_NATIVE, String.valueOf(threadInfo.isInNative())); details.put(KEY_LOCK_NAME, threadInfo.getLockName()); details.put(KEY_LOCK_OWNER_NAME, threadInfo.getLockOwnerName()); details.put(KEY_LOCK_OWNER_ID, String.valueOf(threadInfo.getLockOwnerId())); details.put(KEY_LOCKED_SYNCHRONIZERS, Arrays.stream(threadInfo.getLockedSynchronizers()).map(String::valueOf).toList()); details.put(KEY_STACK_TRACE, formatStackTrace(threadInfo)); return details; } private static String formatStackTrace(ThreadInfo threadInfo) { StringBuilder sb = new StringBuilder(); int i = 0; StackTraceElement[] stackTrace = threadInfo.getStackTrace(); for (; i < stackTrace.length; i++) { StackTraceElement ste = stackTrace[i]; sb.append(ste.toString()); sb.append('\n'); if (i == 0 && threadInfo.getLockInfo() != null) { switch (threadInfo.getThreadState()) { case BLOCKED: sb.append("- blocked on "); sb.append(threadInfo.getLockInfo()); sb.append('\n'); break; case WAITING, TIMED_WAITING: sb.append("- waiting on "); sb.append(threadInfo.getLockInfo()); sb.append('\n'); break; default: } } for (MonitorInfo mi : threadInfo.getLockedMonitors()) { if (mi.getLockedStackDepth() == i) { sb.append("- locked "); sb.append(mi); sb.append('\n'); } } } return sb.toString(); } }
DeadlockedThreadsHealthIndicator
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/ExecutingTest.java
{ "start": 33554, "end": 36453 }
class ____ { private ExecutionGraph executionGraph = TestingDefaultExecutionGraphBuilder.newBuilder() .build(EXECUTOR_EXTENSION.getExecutor()); private OperatorCoordinatorHandler operatorCoordinatorHandler; private Function<StateTransitionManager.Context, StateTransitionManager> stateTransitionManagerFactory = context -> TestingStateTransitionManager.withNoOp(); private int rescaleOnFailedCheckpointCount = 1; private ExecutingStateBuilder() throws JobException, JobExecutionException { operatorCoordinatorHandler = new TestingOperatorCoordinatorHandler(); } public ExecutingStateBuilder setExecutionGraph(ExecutionGraph executionGraph) { this.executionGraph = executionGraph; return this; } public ExecutingStateBuilder setOperatorCoordinatorHandler( OperatorCoordinatorHandler operatorCoordinatorHandler) { this.operatorCoordinatorHandler = operatorCoordinatorHandler; return this; } public ExecutingStateBuilder setStateTransitionManagerFactory( Function<StateTransitionManager.Context, StateTransitionManager> stateTransitionManagerFactory) { this.stateTransitionManagerFactory = stateTransitionManagerFactory; return this; } public ExecutingStateBuilder setRescaleOnFailedCheckpointCount( int rescaleOnFailedCheckpointCount) { this.rescaleOnFailedCheckpointCount = rescaleOnFailedCheckpointCount; return this; } private Executing build(MockExecutingContext ctx) { executionGraph.transitionToRunning(); try { return new Executing( executionGraph, getExecutionGraphHandler(executionGraph, ctx.getMainThreadExecutor()), operatorCoordinatorHandler, log, ctx, ClassLoader.getSystemClassLoader(), new ArrayList<>(), stateTransitionManagerFactory::apply, rescaleOnFailedCheckpointCount); } finally { Preconditions.checkState( !ctx.hadStateTransition, "State construction is an on-going state transition, during which no further transitions are allowed."); } } } private ExecutionGraphHandler getExecutionGraphHandler( ExecutionGraph executionGraph, ComponentMainThreadExecutor mainThreadExecutor) { return new ExecutionGraphHandler( executionGraph, log, mainThreadExecutor, mainThreadExecutor); } private static
ExecutingStateBuilder
java
apache__camel
components/camel-shiro/src/main/java/org/apache/camel/component/shiro/security/ShiroSecurityTokenInjector.java
{ "start": 1087, "end": 3063 }
class ____ implements Processor { private byte[] passPhrase; private ShiroSecurityToken securityToken; private CipherService cipherService; private boolean base64; public ShiroSecurityTokenInjector() { // Set up AES encryption based cipher service, by default cipherService = new AesCipherService(); } public ShiroSecurityTokenInjector(ShiroSecurityToken securityToken, byte[] passPhrase) { this(); this.setSecurityToken(securityToken); this.setPassPhrase(passPhrase); } public ShiroSecurityTokenInjector(ShiroSecurityToken securityToken, byte[] passPhrase, CipherService cipherService) { this(securityToken, passPhrase); this.cipherService = cipherService; } public ByteSource encrypt() { return ShiroSecurityHelper.encrypt(securityToken, passPhrase, cipherService); } @Override public void process(Exchange exchange) throws Exception { ByteSource bytes = encrypt(); Object token; if (isBase64()) { token = bytes.toBase64(); } else { token = bytes; } exchange.getIn().setHeader(ShiroSecurityConstants.SHIRO_SECURITY_TOKEN, token); } public byte[] getPassPhrase() { return passPhrase; } public void setPassPhrase(byte[] passPhrase) { this.passPhrase = passPhrase; } public void setSecurityToken(ShiroSecurityToken securityToken) { this.securityToken = securityToken; } public ShiroSecurityToken getSecurityToken() { return securityToken; } public CipherService getCipherService() { return cipherService; } public void setCipherService(CipherService cipherService) { this.cipherService = cipherService; } public boolean isBase64() { return base64; } public void setBase64(boolean base64) { this.base64 = base64; } }
ShiroSecurityTokenInjector
java
grpc__grpc-java
services/src/generated/main/grpc/io/grpc/health/v1/HealthGrpc.java
{ "start": 7944, "end": 8273 }
class ____ implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return HealthGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service Health. */ public static final
HealthImplBase
java
apache__flink
flink-filesystems/flink-hadoop-fs/src/test/java/org/apache/flink/runtime/fs/hdfs/HdfsKindTest.java
{ "start": 1271, "end": 1930 }
class ____ { @Test void testS3fileSystemSchemes() { assertThat(HadoopFileSystem.getKindForScheme("s3")).isEqualTo(FileSystemKind.OBJECT_STORE); assertThat(HadoopFileSystem.getKindForScheme("s3n")).isEqualTo(FileSystemKind.OBJECT_STORE); assertThat(HadoopFileSystem.getKindForScheme("s3a")).isEqualTo(FileSystemKind.OBJECT_STORE); assertThat(HadoopFileSystem.getKindForScheme("EMRFS")) .isEqualTo(FileSystemKind.OBJECT_STORE); } @Test void testViewFs() { assertThat(HadoopFileSystem.getKindForScheme("viewfs")) .isEqualTo(FileSystemKind.FILE_SYSTEM); } }
HdfsKindTest
java
apache__hadoop
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractSafeModeTest.java
{ "start": 1062, "end": 2435 }
class ____ extends AbstractFSContractTestBase { @Test public void testSafeMode() throws Throwable { final FileSystem fs = getFileSystem(); SafeMode fsWithSafeMode = verifyAndGetSafeModeInstance(fs); Assertions.assertThat(fsWithSafeMode.setSafeMode(SafeModeAction.GET)) .describedAs("Getting the status of safe mode before entering should be off.").isFalse(); Assertions.assertThat(fsWithSafeMode.setSafeMode(SafeModeAction.ENTER)) .describedAs("Entering Safe mode and safe mode turns on.").isTrue(); Assertions.assertThat(fsWithSafeMode.setSafeMode(SafeModeAction.GET)) .describedAs("Getting the status of safe mode after entering, safe mode should be on.") .isTrue(); Assertions.assertThat(fsWithSafeMode.setSafeMode(SafeModeAction.LEAVE)) .describedAs("Leaving safe mode, and safe mode switches off.").isFalse(); Assertions.assertThat(fsWithSafeMode.setSafeMode(SafeModeAction.FORCE_EXIT)) .describedAs("Force exist safe mode at any time, safe mode should always switches off.") .isFalse(); } private SafeMode verifyAndGetSafeModeInstance(FileSystem fs) { Assertions.assertThat(fs) .describedAs("File system %s must be an instance of %s", fs, SafeMode.class.getClass()) .isInstanceOf(SafeMode.class); return (SafeMode) fs; } }
AbstractContractSafeModeTest
java
spring-projects__spring-framework
spring-messaging/src/test/java/org/springframework/messaging/rsocket/service/RSocketServiceArgumentResolverTestSupport.java
{ "start": 978, "end": 2010 }
class ____ { private @Nullable RSocketServiceArgumentResolver resolver; private final RSocketRequestValues.Builder requestValuesBuilder = RSocketRequestValues.builder(null); private @Nullable RSocketRequestValues requestValues; protected RSocketServiceArgumentResolverTestSupport() { this.resolver = initResolver(); } protected abstract RSocketServiceArgumentResolver initResolver(); protected static MethodParameter initMethodParameter(Class<?> serviceClass, String methodName, int index) { Method method = ClassUtils.getMethod(serviceClass, methodName, (Class<?>[]) null); return new MethodParameter(method, index); } protected boolean execute(Object payload, MethodParameter parameter) { return this.resolver.resolve(payload, parameter, this.requestValuesBuilder); } protected RSocketRequestValues getRequestValues() { this.requestValues = (this.requestValues != null ? this.requestValues : this.requestValuesBuilder.build()); return this.requestValues; } }
RSocketServiceArgumentResolverTestSupport
java
quarkusio__quarkus
independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/FilePart.java
{ "start": 146, "end": 1601 }
class ____ { /** * The file to send */ public final File file; /** * The starting byte of the file */ public final long offset; /** * The number of bytes to send */ public final long count; /** * Create a new partial {@link File} object. * * @param file The file to send * @param offset The starting byte of the file (must be >= 0) * @param count The number of bytes to send (must be >= 0 and offset+count <= file size) */ public FilePart(File file, long offset, long count) { if (!file.exists()) throw new IllegalArgumentException("File does not exist: " + file); if (!file.isFile()) throw new IllegalArgumentException("File is not a regular file: " + file); if (!file.canRead()) throw new IllegalArgumentException("File cannot be read: " + file); if (offset < 0) throw new IllegalArgumentException("Offset (" + offset + ") must be >= 0: " + file); if (count < 0) throw new IllegalArgumentException("Count (" + count + ") must be >= 0: " + file); if ((offset + count) > file.length()) throw new IllegalArgumentException( "Offset + count (" + (offset + count) + ") larger than file size (" + file.length() + "): " + file); this.file = file; this.offset = offset; this.count = count; } }
FilePart
java
quarkusio__quarkus
extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/serialization/JsonArraySerde.java
{ "start": 153, "end": 324 }
class ____ extends Serdes.WrapperSerde<JsonArray> { public JsonArraySerde() { super(new JsonArraySerializer(), new JsonArrayDeserializer()); } }
JsonArraySerde
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/ser/enums/EnumAsMapKeySerializationTest.java
{ "start": 2480, "end": 2541 }
enum ____ { FIRST, SECOND; } static
Type
java
quarkusio__quarkus
integration-tests/hibernate-search-standalone-elasticsearch/src/test/java/io/quarkus/it/hibernate/search/standalone/elasticsearch/SearchWithLoadingTest.java
{ "start": 241, "end": 1258 }
class ____ { @Test public void testSearch() { RestAssured.when().put("/test/search-with-loading/init-data").then() .statusCode(204); RestAssured.when().get("/test/search-with-loading/search").then() .statusCode(200) .body(is("OK")); RestAssured.when().put("/test/search-with-loading/purge").then() .statusCode(200) .body(is("OK")); RestAssured.when().put("/test/search-with-loading/refresh").then() .statusCode(200) .body(is("OK")); RestAssured.when().get("/test/search-with-loading/search-empty").then() .statusCode(200); RestAssured.when().put("/test/search-with-loading/mass-indexer").then() .statusCode(200) .body(is("OK")); RestAssured.when().get("/test/search-with-loading/search").then() .statusCode(200) .body(is("OK")); } }
SearchWithLoadingTest
java
spring-projects__spring-framework
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/RequestScopedControllerAdviceIntegrationTests.java
{ "start": 2579, "end": 2699 }
class ____ implements Ordered { @Override public int getOrder() { return 99; } } }
RequestScopedControllerAdvice
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/issue_3200/Issue3281.java
{ "start": 1528, "end": 1644 }
class ____ { private Double max; private Double min; private Boolean recording; } }
TGigest
java
apache__dubbo
dubbo-metadata/dubbo-metadata-processor/src/main/java/org/apache/dubbo/metadata/annotation/processing/util/MemberUtils.java
{ "start": 1767, "end": 3491 }
interface ____ { static boolean matches(Element member, ElementKind kind) { return member == null || kind == null ? false : kind.equals(member.getKind()); } static boolean isPublicNonStatic(Element member) { return hasModifiers(member, PUBLIC) && !hasModifiers(member, STATIC); } static boolean hasModifiers(Element member, Modifier... modifiers) { if (member == null || modifiers == null) { return false; } Set<Modifier> actualModifiers = member.getModifiers(); for (Modifier modifier : modifiers) { if (!actualModifiers.contains(modifier)) { return false; } } return true; } static List<? extends Element> getDeclaredMembers(TypeMirror type) { TypeElement element = ofTypeElement(type); return element == null ? emptyList() : element.getEnclosedElements(); } static List<? extends Element> getAllDeclaredMembers(TypeMirror type) { return getHierarchicalTypes(type).stream() .map(MemberUtils::getDeclaredMembers) .flatMap(Collection::stream) .collect(Collectors.toList()); } static boolean matchParameterTypes(List<? extends VariableElement> parameters, CharSequence... parameterTypes) { int size = parameters.size(); if (size != parameterTypes.length) { return false; } for (int i = 0; i < size; i++) { VariableElement parameter = parameters.get(i); if (!Objects.equals(parameter.asType().toString(), parameterTypes[i])) { return false; } } return true; } }
MemberUtils
java
apache__flink
flink-examples/flink-examples-table/src/main/java/org/apache/flink/table/examples/java/basics/TemporalJoinSQLExample.java
{ "start": 1923, "end": 6199 }
class ____ { public static void main(String[] args) throws Exception { // set up the Java DataStream API final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); // set up the Java Table API final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); // Create a changelog stream of currency rate final DataStream<Row> currencyRate = env.fromData( Row.ofKind(RowKind.INSERT, Instant.ofEpochMilli(1000), "USD", 0.8), Row.ofKind(RowKind.UPDATE_AFTER, Instant.ofEpochMilli(4000), "USD", 0.9), Row.ofKind(RowKind.UPDATE_AFTER, Instant.ofEpochMilli(3000), "USD", 1.0), Row.ofKind(RowKind.UPDATE_AFTER, Instant.ofEpochMilli(6000), "USD", 1.1)); // Create a table from change log stream Table rateTable = tableEnv.fromChangelogStream( currencyRate, Schema.newBuilder() .column("f0", DataTypes.TIMESTAMP_LTZ(3)) .column("f1", DataTypes.STRING().notNull()) .column("f2", DataTypes.DOUBLE()) .watermark("f0", "f0 - INTERVAL '2' SECONDS") .primaryKey("f1") .build(), ChangelogMode.upsert()) .as("rate_time", "currency_code", "euro_rate"); // Register the table as a view, it will be accessible under a name tableEnv.createTemporaryView("currency_rate", rateTable); // Create a data stream of transaction final DataStream<Transaction> transaction = env.fromData( new Transaction("trx1", Instant.ofEpochMilli(1000), "USD", 1), new Transaction("trx2", Instant.ofEpochMilli(2000), "USD", 1), new Transaction("trx3", Instant.ofEpochMilli(3000), "USD", 1), new Transaction("trx4", Instant.ofEpochMilli(4000), "USD", 1)); // convert the Transaction DataStream and register it as a view, // it will be accessible under a name Table trxTable = tableEnv.fromDataStream( transaction, Schema.newBuilder() .column("id", DataTypes.STRING()) .column("trxTime", DataTypes.TIMESTAMP_LTZ(3)) .column("currencyCode", DataTypes.STRING()) .column("amount", DataTypes.DOUBLE()) .watermark("trxTime", "trxTime - INTERVAL '2' SECONDS") .build()) .as("id", "trx_time", "currency_code", "amount"); // Register the table as a view, it will be accessible under a name tableEnv.createTemporaryView("transaction", trxTable); // temporal join the two tables final Table result = tableEnv.sqlQuery( " SELECT\n" + " t.id,\n" + " t.trx_time,\n" + " c.currency_code,\n" + " t.amount,\n" + " t.amount * c.euro_rate AS total_euro\n" + " FROM transaction t\n" + " JOIN currency_rate FOR SYSTEM_TIME AS OF t.trx_time AS c\n" + " ON t.currency_code = c.currency_code; "); // convert the Table back to an insert-only DataStream of type `Order` tableEnv.toDataStream(result, EnrichedTransaction.class).print(); // after the table program is converted to a DataStream program, // we must use `env.execute()` to submit the job env.execute(); } /** A simple
TemporalJoinSQLExample
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java
{ "start": 1883, "end": 5876 }
class ____ extends TransportMasterNodeAction<UpdateDesiredNodesRequest, UpdateDesiredNodesResponse> { private static final Logger logger = LogManager.getLogger(TransportUpdateDesiredNodesAction.class); private final MasterServiceTaskQueue<UpdateDesiredNodesTask> taskQueue; @Inject public TransportUpdateDesiredNodesAction( TransportService transportService, ClusterService clusterService, RerouteService rerouteService, ThreadPool threadPool, ActionFilters actionFilters, AllocationService allocationService ) { super( UpdateDesiredNodesAction.NAME, false, transportService, clusterService, threadPool, actionFilters, UpdateDesiredNodesRequest::new, UpdateDesiredNodesResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.taskQueue = clusterService.createTaskQueue( "update-desired-nodes", Priority.URGENT, new UpdateDesiredNodesExecutor(rerouteService, allocationService) ); } @Override protected ClusterBlockException checkBlock(UpdateDesiredNodesRequest request, ClusterState state) { return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); } @Override protected void masterOperation( Task task, UpdateDesiredNodesRequest request, ClusterState state, ActionListener<UpdateDesiredNodesResponse> responseListener ) throws Exception { ActionListener.run( responseListener, listener -> taskQueue.submitTask( "update-desired-nodes", new UpdateDesiredNodesTask(request, listener), request.masterNodeTimeout() ) ); } static ClusterState replaceDesiredNodes(ClusterState clusterState, DesiredNodes newDesiredNodes) { return clusterState.copyAndUpdateMetadata( metadata -> metadata.putCustom(DesiredNodesMetadata.TYPE, new DesiredNodesMetadata(newDesiredNodes)) ); } static DesiredNodes updateDesiredNodes(DesiredNodes latestDesiredNodes, UpdateDesiredNodesRequest request) { final DesiredNodes proposedDesiredNodes = DesiredNodes.createIncludingStatusFromPreviousVersion( request.getHistoryID(), request.getVersion(), request.getNodes(), latestDesiredNodes ); if (latestDesiredNodes != null) { if (latestDesiredNodes.equalsWithProcessorsCloseTo(proposedDesiredNodes)) { return latestDesiredNodes; } if (latestDesiredNodes.hasSameVersion(proposedDesiredNodes)) { throw new IllegalArgumentException( format( Locale.ROOT, "Desired nodes with history [%s] and version [%d] already exists with a different definition", latestDesiredNodes.historyID(), latestDesiredNodes.version() ) ); } if (latestDesiredNodes.isSupersededBy(proposedDesiredNodes) == false) { throw new VersionConflictException( "version [{}] has been superseded by version [{}] for history [{}]", proposedDesiredNodes.version(), latestDesiredNodes.version(), latestDesiredNodes.historyID() ); } } return proposedDesiredNodes; } private record UpdateDesiredNodesTask(UpdateDesiredNodesRequest request, ActionListener<UpdateDesiredNodesResponse> listener) implements ClusterStateTaskListener { @Override public void onFailure(Exception e) { listener.onFailure(e); } } private static
TransportUpdateDesiredNodesAction
java
alibaba__nacos
ai/src/main/java/com/alibaba/nacos/ai/utils/AgentEndpointUtil.java
{ "start": 1103, "end": 3129 }
class ____ { /** * Transfer a collection of AgentEndpoint to a list of Instance. * * @param endpoints the collection of AgentEndpoint to transfer * @return the list of Instance transferred from AgentEndpoint * @throws NacosApiException if any validation failed during the transfer process */ public static List<Instance> transferToInstances(Collection<AgentEndpoint> endpoints) throws NacosApiException { List<Instance> result = new LinkedList<>(); for (AgentEndpoint endpoint : endpoints) { result.add(transferToInstance(endpoint)); } return result; } /** * Transfer a single AgentEndpoint to an Instance. * * @param endpoint the AgentEndpoint to transfer * @return the Instance transferred from AgentEndpoint * @throws NacosApiException if any validation failed during the transfer process */ public static Instance transferToInstance(AgentEndpoint endpoint) throws NacosApiException { Instance instance = new Instance(); instance.setIp(endpoint.getAddress()); instance.setPort(endpoint.getPort()); String path = StringUtils.isBlank(endpoint.getPath()) ? StringUtils.EMPTY : endpoint.getPath(); String protocol = StringUtils.isBlank(endpoint.getProtocol()) ? StringUtils.EMPTY : endpoint.getProtocol(); String query = StringUtils.isBlank(endpoint.getQuery()) ? StringUtils.EMPTY : endpoint.getQuery(); Map<String, String> metadata = Map.of(Constants.A2A.AGENT_ENDPOINT_PATH_KEY, path, Constants.A2A.AGENT_ENDPOINT_TRANSPORT_KEY, endpoint.getTransport(), Constants.A2A.NACOS_AGENT_ENDPOINT_SUPPORT_TLS, String.valueOf(endpoint.isSupportTls()), Constants.A2A.NACOS_AGENT_ENDPOINT_PROTOCOL_KEY, protocol, Constants.A2A.NACOS_AGENT_ENDPOINT_QUERY_KEY, query); instance.setMetadata(metadata); instance.validate(); return instance; } }
AgentEndpointUtil
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsOnlyOnce_with_Long_array_Test.java
{ "start": 1195, "end": 1910 }
class ____ extends LongArrayAssertBaseTest { @Test void should_fail_if_values_is_null() { // GIVEN Long[] values = null; // WHEN Throwable thrown = catchThrowable(() -> assertions.containsOnlyOnce(values)); // THEN then(thrown).isInstanceOf(NullPointerException.class) .hasMessage(shouldNotBeNull("values").create()); } @Override protected LongArrayAssert invoke_api_method() { return assertions.containsOnlyOnce(new Long[] { 6L, 8L }); } @Override protected void verify_internal_effects() { verify(arrays).assertContainsOnlyOnce(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L)); } }
LongArrayAssert_containsOnlyOnce_with_Long_array_Test
java
mybatis__mybatis-3
src/test/java/org/apache/ibatis/type/OffsetDateTimeTypeHandlerTest.java
{ "start": 997, "end": 3039 }
class ____ extends BaseTypeHandlerTest { private static final TypeHandler<OffsetDateTime> TYPE_HANDLER = new OffsetDateTimeTypeHandler(); private static final OffsetDateTime OFFSET_DATE_TIME = OffsetDateTime.now(); @Override @Test public void shouldSetParameter() throws Exception { TYPE_HANDLER.setParameter(ps, 1, OFFSET_DATE_TIME, null); verify(ps).setObject(1, OFFSET_DATE_TIME); } @Override @Test public void shouldGetResultFromResultSetByName() throws Exception { when(rs.getObject("column", OffsetDateTime.class)).thenReturn(OFFSET_DATE_TIME); assertEquals(OFFSET_DATE_TIME, TYPE_HANDLER.getResult(rs, "column")); verify(rs, never()).wasNull(); } @Override @Test public void shouldGetResultNullFromResultSetByName() throws Exception { when(rs.getObject("column", OffsetDateTime.class)).thenReturn(null); assertNull(TYPE_HANDLER.getResult(rs, "column")); verify(rs, never()).wasNull(); } @Override @Test public void shouldGetResultFromResultSetByPosition() throws Exception { when(rs.getObject(1, OffsetDateTime.class)).thenReturn(OFFSET_DATE_TIME); assertEquals(OFFSET_DATE_TIME, TYPE_HANDLER.getResult(rs, 1)); verify(rs, never()).wasNull(); } @Override @Test public void shouldGetResultNullFromResultSetByPosition() throws Exception { when(rs.getObject(1, OffsetDateTime.class)).thenReturn(null); assertNull(TYPE_HANDLER.getResult(rs, 1)); verify(rs, never()).wasNull(); } @Override @Test public void shouldGetResultFromCallableStatement() throws Exception { when(cs.getObject(1, OffsetDateTime.class)).thenReturn(OFFSET_DATE_TIME); assertEquals(OFFSET_DATE_TIME, TYPE_HANDLER.getResult(cs, 1)); verify(cs, never()).wasNull(); } @Override @Test public void shouldGetResultNullFromCallableStatement() throws Exception { when(cs.getObject(1, OffsetDateTime.class)).thenReturn(null); assertNull(TYPE_HANDLER.getResult(cs, 1)); verify(cs, never()).wasNull(); } }
OffsetDateTimeTypeHandlerTest
java
spring-projects__spring-framework
spring-beans/src/test/java/org/springframework/beans/factory/annotation/LookupAnnotationTests.java
{ "start": 1181, "end": 7353 }
class ____ { @Test void testWithoutConstructorArg() { DefaultListableBeanFactory beanFactory = configureBeanFactory(); AbstractBean bean = (AbstractBean) beanFactory.getBean("abstractBean"); Object expected = bean.get(); assertThat(expected.getClass()).isEqualTo(TestBean.class); assertThat(beanFactory.getBean(BeanConsumer.class).abstractBean).isSameAs(bean); } @Test void testWithOverloadedArg() { DefaultListableBeanFactory beanFactory = configureBeanFactory(); AbstractBean bean = (AbstractBean) beanFactory.getBean("abstractBean"); TestBean expected = bean.get("haha"); assertThat(expected.getClass()).isEqualTo(TestBean.class); assertThat(expected.getName()).isEqualTo("haha"); assertThat(beanFactory.getBean(BeanConsumer.class).abstractBean).isSameAs(bean); } @Test void testWithOneConstructorArg() { DefaultListableBeanFactory beanFactory = configureBeanFactory(); AbstractBean bean = (AbstractBean) beanFactory.getBean("abstractBean"); TestBean expected = bean.getOneArgument("haha"); assertThat(expected.getClass()).isEqualTo(TestBean.class); assertThat(expected.getName()).isEqualTo("haha"); assertThat(beanFactory.getBean(BeanConsumer.class).abstractBean).isSameAs(bean); } @Test void testWithTwoConstructorArg() { DefaultListableBeanFactory beanFactory = configureBeanFactory(); AbstractBean bean = (AbstractBean) beanFactory.getBean("abstractBean"); TestBean expected = bean.getTwoArguments("haha", 72); assertThat(expected.getClass()).isEqualTo(TestBean.class); assertThat(expected.getName()).isEqualTo("haha"); assertThat(expected.getAge()).isEqualTo(72); assertThat(beanFactory.getBean(BeanConsumer.class).abstractBean).isSameAs(bean); } @Test void testWithThreeArgsShouldFail() { DefaultListableBeanFactory beanFactory = configureBeanFactory(); AbstractBean bean = (AbstractBean) beanFactory.getBean("abstractBean"); assertThatExceptionOfType(AbstractMethodError.class).as("TestBean has no three arg constructor").isThrownBy(() -> bean.getThreeArguments("name", 1, 2)); assertThat(beanFactory.getBean(BeanConsumer.class).abstractBean).isSameAs(bean); } @Test void testWithEarlyInjection() { DefaultListableBeanFactory beanFactory = configureBeanFactory(); AbstractBean bean = beanFactory.getBean("beanConsumer", BeanConsumer.class).abstractBean; Object expected = bean.get(); assertThat(expected.getClass()).isEqualTo(TestBean.class); assertThat(beanFactory.getBean(BeanConsumer.class).abstractBean).isSameAs(bean); } @Test // gh-25806 public void testWithNullBean() { RootBeanDefinition tbd = new RootBeanDefinition(TestBean.class, () -> null); tbd.setScope(BeanDefinition.SCOPE_PROTOTYPE); DefaultListableBeanFactory beanFactory = configureBeanFactory(tbd); AbstractBean bean = beanFactory.getBean("beanConsumer", BeanConsumer.class).abstractBean; Object expected = bean.get(); assertThat(expected).isNull(); assertThat(beanFactory.getBean(BeanConsumer.class).abstractBean).isSameAs(bean); } @Test void testWithGenericBean() { DefaultListableBeanFactory beanFactory = configureBeanFactory(); beanFactory.registerBeanDefinition("numberBean", new RootBeanDefinition(NumberBean.class)); beanFactory.registerBeanDefinition("doubleStore", new RootBeanDefinition(DoubleStore.class)); beanFactory.registerBeanDefinition("floatStore", new RootBeanDefinition(FloatStore.class)); NumberBean bean = (NumberBean) beanFactory.getBean("numberBean"); assertThat(beanFactory.getBean(DoubleStore.class)).isSameAs(bean.getDoubleStore()); assertThat(beanFactory.getBean(FloatStore.class)).isSameAs(bean.getFloatStore()); } @Test void testSingletonWithoutMetadataCaching() { DefaultListableBeanFactory beanFactory = configureBeanFactory(); beanFactory.setCacheBeanMetadata(false); beanFactory.registerBeanDefinition("numberBean", new RootBeanDefinition(NumberBean.class)); beanFactory.registerBeanDefinition("doubleStore", new RootBeanDefinition(DoubleStore.class)); beanFactory.registerBeanDefinition("floatStore", new RootBeanDefinition(FloatStore.class)); NumberBean bean = (NumberBean) beanFactory.getBean("numberBean"); assertThat(beanFactory.getBean(DoubleStore.class)).isSameAs(bean.getDoubleStore()); assertThat(beanFactory.getBean(FloatStore.class)).isSameAs(bean.getFloatStore()); } @Test void testPrototypeWithoutMetadataCaching() { DefaultListableBeanFactory beanFactory = configureBeanFactory(); beanFactory.setCacheBeanMetadata(false); beanFactory.registerBeanDefinition("numberBean", new RootBeanDefinition(NumberBean.class, BeanDefinition.SCOPE_PROTOTYPE, null)); beanFactory.registerBeanDefinition("doubleStore", new RootBeanDefinition(DoubleStore.class)); beanFactory.registerBeanDefinition("floatStore", new RootBeanDefinition(FloatStore.class)); NumberBean bean = (NumberBean) beanFactory.getBean("numberBean"); assertThat(beanFactory.getBean(DoubleStore.class)).isSameAs(bean.getDoubleStore()); assertThat(beanFactory.getBean(FloatStore.class)).isSameAs(bean.getFloatStore()); bean = (NumberBean) beanFactory.getBean("numberBean"); assertThat(beanFactory.getBean(DoubleStore.class)).isSameAs(bean.getDoubleStore()); assertThat(beanFactory.getBean(FloatStore.class)).isSameAs(bean.getFloatStore()); } private DefaultListableBeanFactory configureBeanFactory(RootBeanDefinition tbd) { DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory(); AutowiredAnnotationBeanPostProcessor aabpp = new AutowiredAnnotationBeanPostProcessor(); aabpp.setBeanFactory(beanFactory); beanFactory.addBeanPostProcessor(aabpp); beanFactory.registerBeanDefinition("abstractBean", new RootBeanDefinition(AbstractBean.class)); beanFactory.registerBeanDefinition("beanConsumer", new RootBeanDefinition(BeanConsumer.class)); beanFactory.registerBeanDefinition("testBean", tbd); return beanFactory; } private DefaultListableBeanFactory configureBeanFactory() { RootBeanDefinition tbd = new RootBeanDefinition(TestBean.class); tbd.setScope(BeanDefinition.SCOPE_PROTOTYPE); return configureBeanFactory(tbd); } public abstract static
LookupAnnotationTests
java
mockito__mockito
mockito-core/src/main/java/org/mockito/MockedConstruction.java
{ "start": 1070, "end": 1533 }
interface ____ { int getCount(); /** * Get the constructor that is invoked during the mock creation. * * @return the constructor. */ Constructor<?> constructor(); /** * Get the arguments that were passed to the constructor. * * @return the arguments passed to the constructor, as a list. */ List<?> arguments(); } /** * Functional
Context
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-globalpolicygenerator/src/test/java/org/apache/hadoop/yarn/server/globalpolicygenerator/applicationcleaner/TestDefaultApplicationCleaner.java
{ "start": 6035, "end": 7841 }
class ____ extends DefaultApplicationCleaner { @Override public Set<ApplicationId> getAppsFromRouter() throws YarnRuntimeException { if (appIdToAddConcurrently != null) { SubClusterId scId = SubClusterId.newInstance("MySubClusterId"); try { ApplicationHomeSubCluster appHomeSubCluster = ApplicationHomeSubCluster.newInstance(appIdToAddConcurrently, scId); AddApplicationHomeSubClusterRequest request = AddApplicationHomeSubClusterRequest.newInstance(appHomeSubCluster); stateStore.addApplicationHomeSubCluster(request); } catch (YarnException e) { throw new YarnRuntimeException(e); } registryClient.writeAMRMTokenForUAM(appIdToAddConcurrently, scId.toString(), new Token<>()); } return routerAppIds; } } @Test public void testConcurrentNewApp() throws YarnException { appIdToAddConcurrently = ApplicationId.newInstance(1, 1); appCleaner.run(); // The concurrently added app should be still there GetApplicationsHomeSubClusterRequest appHomeSubClusterRequest = GetApplicationsHomeSubClusterRequest.newInstance(); GetApplicationsHomeSubClusterResponse applicationsHomeSubCluster = stateStore.getApplicationsHomeSubCluster(appHomeSubClusterRequest); Assertions.assertNotNull(applicationsHomeSubCluster); List<ApplicationHomeSubCluster> appsHomeSubClusters = applicationsHomeSubCluster.getAppsHomeSubClusters(); Assertions.assertNotNull(appsHomeSubClusters); Assertions.assertEquals(1, appsHomeSubClusters.size()); // The concurrently added app should be still there Assertions.assertEquals(1, registryClient.getAllApplications().size()); } }
TestableDefaultApplicationCleaner
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/cache/SharedRegionTest.java
{ "start": 1952, "end": 2001 }
class ____ { @Id public Integer id; } }
ZipCodes
java
google__dagger
hilt-compiler/main/java/dagger/hilt/android/processor/internal/bindvalue/BindValueMetadata.java
{ "start": 1811, "end": 3135 }
class ____ { static final ImmutableSet<ClassName> BIND_VALUE_ANNOTATIONS = ImmutableSet.of( ClassNames.ANDROID_BIND_VALUE); static final ImmutableSet<ClassName> BIND_VALUE_INTO_SET_ANNOTATIONS = ImmutableSet.of( ClassNames.ANDROID_BIND_VALUE_INTO_SET); static final ImmutableSet<ClassName> BIND_ELEMENTS_INTO_SET_ANNOTATIONS = ImmutableSet.of( ClassNames.ANDROID_BIND_ELEMENTS_INTO_SET); static final ImmutableSet<ClassName> BIND_VALUE_INTO_MAP_ANNOTATIONS = ImmutableSet.of( ClassNames.ANDROID_BIND_VALUE_INTO_MAP); /** * @return the {@code TestRoot} annotated class's name. */ abstract XTypeElement testElement(); /** @return a {@link ImmutableSet} of elements annotated with @BindValue. */ abstract ImmutableSet<BindValueElement> bindValueElements(); /** * @return a new BindValueMetadata instance. */ static BindValueMetadata create( XTypeElement testElement, Collection<XElement> bindValueElements) { ImmutableSet.Builder<BindValueElement> elements = ImmutableSet.builder(); for (XElement element : bindValueElements) { elements.add(BindValueElement.create(element)); } return new AutoValue_BindValueMetadata(testElement, elements.build()); } @AutoValue abstract static
BindValueMetadata
java
spring-projects__spring-framework
spring-webmvc/src/main/java/org/springframework/web/servlet/function/DefaultServerRequestBuilder.java
{ "start": 2566, "end": 6831 }
class ____ implements ServerRequest.Builder { private final HttpServletRequest servletRequest; private final List<HttpMessageConverter<?>> messageConverters; private final @Nullable ApiVersionStrategy versionStrategy; private HttpMethod method; private URI uri; private final HttpHeaders headers = new HttpHeaders(); private final MultiValueMap<String, Cookie> cookies = new LinkedMultiValueMap<>(); private final Map<String, Object> attributes = new LinkedHashMap<>(); private final MultiValueMap<String, String> params = new LinkedMultiValueMap<>(); private @Nullable InetSocketAddress remoteAddress; private byte[] body = new byte[0]; public DefaultServerRequestBuilder(ServerRequest other) { Assert.notNull(other, "ServerRequest must not be null"); this.servletRequest = other.servletRequest(); this.messageConverters = new ArrayList<>(other.messageConverters()); this.versionStrategy = other.apiVersionStrategy(); this.method = other.method(); this.uri = other.uri(); headers(headers -> headers.addAll(other.headers().asHttpHeaders())); cookies(cookies -> cookies.addAll(other.cookies())); attributes(attributes -> attributes.putAll(other.attributes())); params(params -> params.addAll(other.params())); this.remoteAddress = other.remoteAddress().orElse(null); } @Override public ServerRequest.Builder method(HttpMethod method) { Assert.notNull(method, "HttpMethod must not be null"); this.method = method; return this; } @Override public ServerRequest.Builder uri(URI uri) { Assert.notNull(uri, "URI must not be null"); this.uri = uri; return this; } @Override public ServerRequest.Builder header(String headerName, String... headerValues) { Assert.notNull(headerName, "Header name must not be null"); for (String headerValue : headerValues) { this.headers.add(headerName, headerValue); } return this; } @Override public ServerRequest.Builder headers(Consumer<HttpHeaders> headersConsumer) { Assert.notNull(headersConsumer, "Headers consumer must not be null"); headersConsumer.accept(this.headers); return this; } @Override public ServerRequest.Builder cookie(String name, String... values) { Assert.notNull(name, "Cookie name must not be null"); for (String value : values) { this.cookies.add(name, new Cookie(name, value)); } return this; } @Override public ServerRequest.Builder cookies(Consumer<MultiValueMap<String, Cookie>> cookiesConsumer) { Assert.notNull(cookiesConsumer, "Cookies consumer must not be null"); cookiesConsumer.accept(this.cookies); return this; } @Override public ServerRequest.Builder body(byte[] body) { Assert.notNull(body, "Body must not be null"); this.body = body; return this; } @Override public ServerRequest.Builder body(String body) { Assert.notNull(body, "Body must not be null"); return body(body.getBytes(StandardCharsets.UTF_8)); } @Override public ServerRequest.Builder attribute(String name, Object value) { Assert.notNull(name, "Name must not be null"); this.attributes.put(name, value); return this; } @Override public ServerRequest.Builder attributes(Consumer<Map<String, Object>> attributesConsumer) { Assert.notNull(attributesConsumer, "Attributes consumer must not be null"); attributesConsumer.accept(this.attributes); return this; } @Override public ServerRequest.Builder param(String name, String... values) { Assert.notNull(name, "Name must not be null"); for (String value : values) { this.params.add(name, value); } return this; } @Override public ServerRequest.Builder params(Consumer<MultiValueMap<String, String>> paramsConsumer) { Assert.notNull(paramsConsumer, "Parameters consumer must not be null"); paramsConsumer.accept(this.params); return this; } @Override public ServerRequest.Builder remoteAddress(@Nullable InetSocketAddress remoteAddress) { this.remoteAddress = remoteAddress; return this; } @Override public ServerRequest build() { return new BuiltServerRequest(this.servletRequest, this.method, this.uri, this.headers, this.cookies, this.attributes, this.params, this.remoteAddress, this.body, this.messageConverters, this.versionStrategy); } private static
DefaultServerRequestBuilder
java
quarkusio__quarkus
integration-tests/test-extension/tests/src/test/java/io/quarkus/it/extension/AbstractQuarkusTestMetaAnnotationTest.java
{ "start": 432, "end": 528 }
class ____ { public String foo() { return "foo"; } } }
MyTestBean
java
google__guice
core/test/com/google/inject/MembersInjectorTest.java
{ "start": 12618, "end": 13380 }
class ____ { boolean called = false; @Inject void callback(RecursiveMemberInjection recursiveMemberInjection) { if (called) { fail("Should not be called twice"); } called = true; } } /** Verifies that member injection injecting itself would get a non initialized instance. */ public void testRecursiveMemberInjector() throws Exception { final RecursiveMemberInjection rmi = new RecursiveMemberInjection(); Guice.createInjector( new AbstractModule() { @Override protected void configure() { bind(RecursiveMemberInjection.class).toInstance(rmi); } }); assertTrue("Member injection should happen", rmi.called); } static
RecursiveMemberInjection
java
mockito__mockito
mockito-core/src/main/java/org/mockito/Mockito.java
{ "start": 140482, "end": 171894 }
class ____ Singleton which depends on your mock.</li> * </ul> * * <b>Try to avoid this method at all costs. Only clear invocations if you are unable to efficiently test your program.</b> * @param <T> The type of the mocks * @param mocks The mocks to clear the invocations for */ public static <T> void clearInvocations(T... mocks) { MOCKITO_CORE.clearInvocations(mocks); } /** * Checks if any of given mocks has any unverified interaction. * <p> * You can use this method after you verified your mocks - to make sure that nothing * else was invoked on your mocks. * <p> * See also {@link Mockito#never()} - it is more explicit and communicates the intent well. * <p> * Stubbed invocations (if called) are also treated as interactions. * If you want stubbed invocations automatically verified, check out {@link Strictness#STRICT_STUBS} feature * introduced in Mockito 2.3.0. * If you want to ignore stubs for verification, see {@link #ignoreStubs(Object...)}. * <p> * A word of <b>warning</b>: * Some users who did a lot of classic, expect-run-verify mocking tend to use <code>verifyNoMoreInteractions()</code> very often, even in every test method. * <code>verifyNoMoreInteractions()</code> is not recommended to use in every test method. * <code>verifyNoMoreInteractions()</code> is a handy assertion from the interaction testing toolkit. Use it only when it's relevant. * Abusing it leads to over-specified, less maintainable tests. * <p> * This method will also detect unverified invocations that occurred before the test method, * for example: in <code>setUp()</code>, <code>&#064;Before</code> method or in constructor. * Consider writing nice code that makes interactions only in test methods. * * <p> * Example: * * <pre class="code"><code class="java"> * //interactions * mock.doSomething(); * mock.doSomethingUnexpected(); * * //verification * verify(mock).doSomething(); * * //following will fail because 'doSomethingUnexpected()' is unexpected * verifyNoMoreInteractions(mock); * * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param mocks to be verified */ public static void verifyNoMoreInteractions(Object... mocks) { MOCKITO_CORE.verifyNoMoreInteractions(mocks); } /** * Verifies that no interactions happened on given mocks. * <pre class="code"><code class="java"> * verifyNoInteractions(mockOne, mockTwo); * </code></pre> * This method will also detect invocations * that occurred before the test method, for example: in <code>setUp()</code>, <code>&#064;Before</code> method or in constructor. * Consider writing nice code that makes interactions only in test methods. * <p> * See also {@link Mockito#never()} - it is more explicit and communicates the intent well. * <p> * See examples in javadoc for {@link Mockito} class * * @param mocks to be verified * @since 3.0.1 */ public static void verifyNoInteractions(Object... mocks) { MOCKITO_CORE.verifyNoInteractions(mocks); } /** * Use <code>doThrow()</code> when you want to stub the void method with an exception. * <p> * Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler * does not like void methods inside brackets... * <p> * Example: * * <pre class="code"><code class="java"> * doThrow(new RuntimeException()).when(mock).someVoidMethod(); * </code></pre> * * @param toBeThrown to be thrown when the stubbed method is called * @return stubber - to select a method for stubbing */ public static Stubber doThrow(Throwable... toBeThrown) { return MOCKITO_CORE.stubber().doThrow(toBeThrown); } /** * Use <code>doThrow()</code> when you want to stub the void method with an exception. * <p> * A new exception instance will be created for each method invocation. * <p> * Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler * does not like void methods inside brackets... * <p> * Example: * * <pre class="code"><code class="java"> * doThrow(RuntimeException.class).when(mock).someVoidMethod(); * </code></pre> * * @param toBeThrown to be thrown when the stubbed method is called * @return stubber - to select a method for stubbing * @since 2.1.0 */ public static Stubber doThrow(Class<? extends Throwable> toBeThrown) { return MOCKITO_CORE.stubber().doThrow(toBeThrown); } /** * Same as {@link #doThrow(Class)} but sets consecutive exception classes to be thrown. Remember to use * <code>doThrow()</code> when you want to stub the void method to throw several exceptions * that are instances of the specified class. * <p> * A new exception instance will be created for each method invocation. * <p> * Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler * does not like void methods inside brackets... * <p> * Example: * * <pre class="code"><code class="java"> * doThrow(RuntimeException.class, BigFailure.class).when(mock).someVoidMethod(); * </code></pre> * * @param toBeThrown to be thrown when the stubbed method is called * @param toBeThrownNext next to be thrown when the stubbed method is called * @return stubber - to select a method for stubbing * @since 2.1.0 */ // Additional method helps users of JDK7+ to hide heap pollution / unchecked generics array // creation @SuppressWarnings({"unchecked", "varargs"}) public static Stubber doThrow( Class<? extends Throwable> toBeThrown, Class<? extends Throwable>... toBeThrownNext) { return MOCKITO_CORE.stubber().doThrow(toBeThrown, toBeThrownNext); } /** * Use <code>doCallRealMethod()</code> when you want to call the real implementation of a method. * <p> * As usual, you are going to read <b>the partial mock warning</b>: * Object oriented programming is more-or-less tackling complexity by dividing the complexity into separate, specific, SRPy objects. * How does partial mock fit into this paradigm? Well, it just doesn't... * Partial mock usually means that the complexity has been moved to a different method on the same object. * In most cases, this is not the way you want to design your application. * <p> * However, there are rare cases when partial mocks come handy: * dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.) * However, I wouldn't use partial mocks for new, test-driven and well-designed code. * <p> * See also javadoc {@link Mockito#spy(Object)} to find out more about partial mocks. * <b>Mockito.spy() is a recommended way of creating partial mocks.</b> * The reason is it guarantees real methods are called against correctly constructed object because you're responsible for constructing the object passed to spy() method. * <p> * Example: * <pre class="code"><code class="java"> * Foo mock = mock(Foo.class); * doCallRealMethod().when(mock).someVoidMethod(); * * // this will call the real implementation of Foo.someVoidMethod() * mock.someVoidMethod(); * </code></pre> * <p> * See examples in javadoc for {@link Mockito} class * * @return stubber - to select a method for stubbing * @since 1.9.5 */ public static Stubber doCallRealMethod() { return MOCKITO_CORE.stubber().doCallRealMethod(); } /** * Use <code>doAnswer()</code> when you want to stub a void method with generic {@link Answer}. * <p> * Stubbing voids requires different approach from {@link Mockito#when(Object)} because the compiler does not like void methods inside brackets... * <p> * Example: * * <pre class="code"><code class="java"> * doAnswer(new Answer() { * public Object answer(InvocationOnMock invocation) { * Object[] args = invocation.getArguments(); * Mock mock = invocation.getMock(); * return null; * }}) * .when(mock).someMethod(); * </code></pre> * <p> * See examples in javadoc for {@link Mockito} class * * @param answer to answer when the stubbed method is called * @return stubber - to select a method for stubbing */ public static Stubber doAnswer(Answer answer) { return MOCKITO_CORE.stubber().doAnswer(answer); } /** * Use <code>doNothing()</code> for setting void methods to do nothing. <b>Beware that void methods on mocks do nothing by default!</b> * However, there are rare situations when doNothing() comes handy: * <p> * <ol> * <li>Stubbing consecutive calls on a void method: * <pre class="code"><code class="java"> * doNothing(). * doThrow(new RuntimeException()) * .when(mock).someVoidMethod(); * * //does nothing the first time: * mock.someVoidMethod(); * * //throws RuntimeException the next time: * mock.someVoidMethod(); * </code></pre> * </li> * <li>When you spy real objects and you want the void method to do nothing: * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //let's make clear() do nothing * doNothing().when(spy).clear(); * * spy.add("one"); * * //clear() does nothing, so the list still contains "one" * spy.clear(); * </code></pre> * </li> * </ol> * <p> * See examples in javadoc for {@link Mockito} class * * @return stubber - to select a method for stubbing */ public static Stubber doNothing() { return MOCKITO_CORE.stubber().doNothing(); } /** * Use <code>doReturn()</code> in those rare occasions when you cannot use {@link Mockito#when(Object)}. * <p> * <b>Beware that {@link Mockito#when(Object)} is always recommended for stubbing because it is argument type-safe * and more readable</b> (especially when stubbing consecutive calls). * <p> * Here are those rare occasions when doReturn() comes handy: * <p> * * <ol> * <li>When spying real objects and calling real methods on a spy brings side effects * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty) * when(spy.get(0)).thenReturn("foo"); * * //You have to use doReturn() for stubbing: * doReturn("foo").when(spy).get(0); * </code></pre> * </li> * * <li>Overriding a previous exception-stubbing: * <pre class="code"><code class="java"> * when(mock.foo()).thenThrow(new RuntimeException()); * * //Impossible: the exception-stubbed foo() method is called so RuntimeException is thrown. * when(mock.foo()).thenReturn("bar"); * * //You have to use doReturn() for stubbing: * doReturn("bar").when(mock).foo(); * </code></pre> * </li> * </ol> * * Above scenarios shows a tradeoff of Mockito's elegant syntax. Note that the scenarios are very rare, though. * Spying should be sporadic and overriding exception-stubbing is very rare. Not to mention that in general * overriding stubbing is a potential code smell that points out too much stubbing. * <p> * See examples in javadoc for {@link Mockito} class * * @param toBeReturned to be returned when the stubbed method is called * @return stubber - to select a method for stubbing */ public static Stubber doReturn(Object toBeReturned) { return MOCKITO_CORE.stubber().doReturn(toBeReturned); } /** * Same as {@link #doReturn(Object)} but sets consecutive values to be returned. Remember to use * <code>doReturn()</code> in those rare occasions when you cannot use {@link Mockito#when(Object)}. * <p> * <b>Beware that {@link Mockito#when(Object)} is always recommended for stubbing because it is argument type-safe * and more readable</b> (especially when stubbing consecutive calls). * <p> * Here are those rare occasions when doReturn() comes handy: * <p> * * <ol> * <li>When spying real objects and calling real methods on a spy brings side effects * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty) * when(spy.get(0)).thenReturn("foo", "bar", "qix"); * * //You have to use doReturn() for stubbing: * doReturn("foo", "bar", "qix").when(spy).get(0); * </code></pre> * </li> * * <li>Overriding a previous exception-stubbing: * <pre class="code"><code class="java"> * when(mock.foo()).thenThrow(new RuntimeException()); * * //Impossible: the exception-stubbed foo() method is called so RuntimeException is thrown. * when(mock.foo()).thenReturn("bar", "foo", "qix"); * * //You have to use doReturn() for stubbing: * doReturn("bar", "foo", "qix").when(mock).foo(); * </code></pre> * </li> * </ol> * * Above scenarios shows a trade-off of Mockito's elegant syntax. Note that the scenarios are very rare, though. * Spying should be sporadic and overriding exception-stubbing is very rare. Not to mention that in general * overriding stubbing is a potential code smell that points out too much stubbing. * <p> * See examples in javadoc for {@link Mockito} class * * @param toBeReturned to be returned when the stubbed method is called * @param toBeReturnedNext to be returned in consecutive calls when the stubbed method is called * @return stubber - to select a method for stubbing * @since 2.1.0 */ @SuppressWarnings({"unchecked", "varargs"}) public static Stubber doReturn(Object toBeReturned, Object... toBeReturnedNext) { return MOCKITO_CORE.stubber().doReturn(toBeReturned, toBeReturnedNext); } /** * Creates {@link org.mockito.InOrder} object that allows verifying mocks in order. * * <pre class="code"><code class="java"> * InOrder inOrder = inOrder(firstMock, secondMock); * * inOrder.verify(firstMock).add("was called first"); * inOrder.verify(secondMock).add("was called second"); * </code></pre> * * Verification in order is flexible - <b>you don't have to verify all interactions</b> one-by-one * but only those that you are interested in testing in order. * <p> * Also, you can create InOrder object passing only mocks that are relevant for in-order verification. * <p> * <code>InOrder</code> verification is 'greedy', but you will hardly ever notice it. * If you want to find out more, read * <a href="https://github.com/mockito/mockito/wiki/Greedy-algorithm-of-verification-InOrder">this wiki page</a>. * <p> * As of Mockito 1.8.4 you can verifyNoMoreInteractions() in order-sensitive way. Read more: {@link InOrder#verifyNoMoreInteractions()} * <p> * See examples in javadoc for {@link Mockito} class * * @param mocks to be verified in order * * @return InOrder object to be used to verify in order */ public static InOrder inOrder(Object... mocks) { return MOCKITO_CORE.inOrder(mocks); } /** * Ignores stubbed methods of given mocks for the sake of verification. * Please consider using {@link Strictness#STRICT_STUBS} feature which eliminates the need for <code>ignoreStubs()</code> * and provides other benefits. * <p> * <code>ignoreStubs()</code> is sometimes useful when coupled with <code>verifyNoMoreInteractions()</code> or verification <code>inOrder()</code>. * Helps to avoid redundant verification of stubbed calls - typically we're not interested in verifying stubs. * <p> * <b>Warning</b>, <code>ignoreStubs()</code> might lead to overuse of <code>verifyNoMoreInteractions(ignoreStubs(...));</code> * Bear in mind that Mockito does not recommend bombarding every test with <code>verifyNoMoreInteractions()</code> * for the reasons outlined in javadoc for {@link Mockito#verifyNoMoreInteractions(Object...)} * Other words: all <b>*stubbed*</b> methods of given mocks are marked <b>*verified*</b> so that they don't get in a way during verifyNoMoreInteractions(). * <p> * This method <b>changes the input mocks</b>! This method returns input mocks just for convenience. * <p> * Ignored stubs will also be ignored for verification inOrder, including {@link org.mockito.InOrder#verifyNoMoreInteractions()}. * See the second example. * <p> * Example: * <pre class="code"><code class="java"> * //mocking lists for the sake of the example (if you mock List in real you will burn in hell) * List mock1 = mock(List.class), mock2 = mock(List.class); * * //stubbing mocks: * when(mock1.get(0)).thenReturn(10); * when(mock2.get(0)).thenReturn(20); * * //using mocks by calling stubbed get(0) methods: * System.out.println(mock1.get(0)); //prints 10 * System.out.println(mock2.get(0)); //prints 20 * * //using mocks by calling clear() methods: * mock1.clear(); * mock2.clear(); * * //verification: * verify(mock1).clear(); * verify(mock2).clear(); * * //verifyNoMoreInteractions() fails because get() methods were not accounted for. * try { verifyNoMoreInteractions(mock1, mock2); } catch (NoInteractionsWanted e); * * //However, if we ignore stubbed methods then we can verifyNoMoreInteractions() * verifyNoMoreInteractions(ignoreStubs(mock1, mock2)); * * //Remember that ignoreStubs() <b>*changes*</b> the input mocks and returns them for convenience. * </code></pre> * Ignoring stubs can be used with <b>verification in order</b>: * <pre class="code"><code class="java"> * List list = mock(List.class); * when(list.get(0)).thenReturn("foo"); * * list.add(0); * list.clear(); * System.out.println(list.get(0)); //we don't want to verify this * * InOrder inOrder = inOrder(ignoreStubs(list)); * inOrder.verify(list).add(0); * inOrder.verify(list).clear(); * inOrder.verifyNoMoreInteractions(); * </code></pre> * Stubbed invocations are automatically verified with {@link Strictness#STRICT_STUBS} feature * and it eliminates the need for <code>ignoreStubs()</code>. Example below uses JUnit Rules: * <pre class="code"><code class="java"> * &#064;Rule public MockitoRule mockito = MockitoJUnit.rule().strictness(Strictness.STRICT_STUBS); * * List list = mock(List.class); * when(list.get(0)).thenReturn("foo"); * * list.size(); * verify(list).size(); * * list.get(0); // Automatically verified by STRICT_STUBS * verifyNoMoreInteractions(list); // No need of ignoreStubs() * </code></pre> * * @since 1.9.0 * @param mocks input mocks that will be changed * @return the same mocks that were passed in as parameters */ public static Object[] ignoreStubs(Object... mocks) { return MOCKITO_CORE.ignoreStubs(mocks); } /** * Allows verifying exact number of invocations. E.g: * <pre class="code"><code class="java"> * verify(mock, times(2)).someMethod("some arg"); * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param wantedNumberOfInvocations wanted number of invocations * * @return verification mode */ public static VerificationMode times(int wantedNumberOfInvocations) { return VerificationModeFactory.times(wantedNumberOfInvocations); } /** * Alias to <code>times(0)</code>, see {@link Mockito#times(int)} * <p> * Verifies that interaction did not happen. E.g: * <pre class="code"><code class="java"> * verify(mock, never()).someMethod(); * </code></pre> * * <p> * If you want to verify there were NO interactions with the mock * check out {@link Mockito#verifyNoMoreInteractions(Object...)} * <p> * See examples in javadoc for {@link Mockito} class * * @return verification mode */ public static VerificationMode never() { return times(0); } /** * Allows at-least-once verification. E.g: * <pre class="code"><code class="java"> * verify(mock, atLeastOnce()).someMethod("some arg"); * </code></pre> * Alias to <code>atLeast(1)</code>. * <p> * See examples in javadoc for {@link Mockito} class * * @return verification mode */ public static VerificationMode atLeastOnce() { return VerificationModeFactory.atLeastOnce(); } /** * Allows at-least-x verification. E.g: * <pre class="code"><code class="java"> * verify(mock, atLeast(3)).someMethod("some arg"); * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param minNumberOfInvocations minimum number of invocations * * @return verification mode */ public static VerificationMode atLeast(int minNumberOfInvocations) { return VerificationModeFactory.atLeast(minNumberOfInvocations); } /** * Allows at-most-once verification. E.g: * <pre class="code"><code class="java"> * verify(mock, atMostOnce()).someMethod("some arg"); * </code></pre> * Alias to <code>atMost(1)</code>. * <p> * See examples in javadoc for {@link Mockito} class * * @return verification mode */ public static VerificationMode atMostOnce() { return VerificationModeFactory.atMostOnce(); } /** * Allows at-most-x verification. E.g: * <pre class="code"><code class="java"> * verify(mock, atMost(3)).someMethod("some arg"); * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param maxNumberOfInvocations max number of invocations * * @return verification mode */ public static VerificationMode atMost(int maxNumberOfInvocations) { return VerificationModeFactory.atMost(maxNumberOfInvocations); } /** * Allows non-greedy verification in order. For example * <pre class="code"><code class="java"> * inOrder.verify( mock, calls( 2 )).someMethod( "some arg" ); * </code></pre> * <ul> * <li>will not fail if the method is called 3 times, unlike times( 2 )</li> * <li>will not mark the third invocation as verified, unlike atLeast( 2 )</li> * </ul> * This verification mode can only be used with in order verification. * @param wantedNumberOfInvocations number of invocations to verify * @return verification mode */ public static VerificationMode calls(int wantedNumberOfInvocations) { return VerificationModeFactory.calls(wantedNumberOfInvocations); } /** * Allows checking if given method was the only one invoked. E.g: * <pre class="code"><code class="java"> * verify(mock, only()).someMethod(); * //above is a shorthand for following 2 lines of code: * verify(mock).someMethod(); * verifyNoMoreInteractions(mock); * </code></pre> * * <p> * See also {@link Mockito#verifyNoMoreInteractions(Object...)} * <p> * See examples in javadoc for {@link Mockito} class * * @return verification mode */ public static VerificationMode only() { return VerificationModeFactory.only(); } /** * Verification will be triggered over and over until the given amount of millis, allowing testing of async code. * Useful when interactions with the mock object did not happened yet. * Extensive use of {@code timeout()} method can be a code smell - there are better ways of testing concurrent code. * <p> * See also {@link #after(long)} method for testing async code. * Differences between {@code timeout()} and {@code after} are explained in Javadoc for {@link #after(long)}. * * <pre class="code"><code class="java"> * //passes when someMethod() is called no later than within 100 ms * //exits immediately when verification is satisfied (e.g. may not wait full 100 ms) * verify(mock, timeout(100)).someMethod(); * //above is an alias to: * verify(mock, timeout(100).times(1)).someMethod(); * * //passes as soon as someMethod() has been called 2 times under 100 ms * verify(mock, timeout(100).times(2)).someMethod(); * * //equivalent: this also passes as soon as someMethod() has been called 2 times under 100 ms * verify(mock, timeout(100).atLeast(2)).someMethod(); * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param millis - duration in milliseconds * * @return object that allows fluent specification of the verification (times(x), atLeast(y), etc.) */ public static VerificationWithTimeout timeout(long millis) { return new Timeout(millis, VerificationModeFactory.times(1)); } /** * Verification will be triggered after given amount of millis, allowing testing of async code. * Useful when interactions with the mock object have yet to occur. * Extensive use of {@code after()} method can be a code smell - there are better ways of testing concurrent code. * <p> * Not yet implemented to work with InOrder verification. * <p> * See also {@link #timeout(long)} method for testing async code. * Differences between {@code timeout()} and {@code after()} are explained below. * * <pre class="code"><code class="java"> * //passes after 100ms, if someMethod() has only been called once at that time. * verify(mock, after(100)).someMethod(); * //above is an alias to: * verify(mock, after(100).times(1)).someMethod(); * * //passes if someMethod() is called <b>*exactly*</b> 2 times, as tested after 100 millis * verify(mock, after(100).times(2)).someMethod(); * * //passes if someMethod() has not been called, as tested after 100 millis * verify(mock, after(100).never()).someMethod(); * * //verifies someMethod() after a given time span using given verification mode * //useful only if you have your own custom verification modes. * verify(mock, new After(100, yourOwnVerificationMode)).someMethod(); * </code></pre> * * <strong>timeout() vs. after()</strong> * <ul> * <li>timeout() exits immediately with success when verification passes</li> * <li>after() awaits full duration to check if verification passes</li> * </ul> * Examples: * <pre class="code"><code class="java"> * //1. * mock.foo(); * verify(mock, after(1000)).foo(); * //waits 1000 millis and succeeds * * //2. * mock.foo(); * verify(mock, timeout(1000)).foo(); * //succeeds immediately * </code></pre> * * See examples in javadoc for {@link Mockito} class * * @param millis - duration in milliseconds * * @return object that allows fluent specification of the verification */ public static VerificationAfterDelay after(long millis) { return new After(millis, VerificationModeFactory.times(1)); } /** * First of all, in case of any trouble, I encourage you to read the Mockito FAQ: <a href="https://github.com/mockito/mockito/wiki/FAQ">https://github.com/mockito/mockito/wiki/FAQ</a> * <p> * In case of questions you may also post to mockito mailing list: <a href="https://groups.google.com/group/mockito">https://groups.google.com/group/mockito</a> * <p> * <code>validateMockitoUsage()</code> <b>explicitly validates</b> the framework state to detect invalid use of Mockito. * However, this feature is optional <b>because Mockito validates the usage all the time...</b> but there is a gotcha so read on. * <p> * Examples of incorrect use: * <pre class="code"><code class="java"> * //Oops, thenReturn() part is missing: * when(mock.get()); * * //Oops, verified method call is inside verify() where it should be on the outside: * verify(mock.execute()); * * //Oops, missing method to verify: * verify(mock); * </code></pre> * * Mockito throws exceptions if you misuse it so that you know if your tests are written correctly. * The gotcha is that Mockito does the validation <b>next time</b> you use the framework (e.g. next time you verify, stub, call mock etc.). * But even though the exception might be thrown in the next test, * the exception <b>message contains a navigable stack trace element</b> with location of the defect. * Hence you can click and find the place where Mockito was misused. * <p> * Sometimes though, you might want to validate the framework usage explicitly. * For example, one of the users wanted to put <code>validateMockitoUsage()</code> in his <code>&#064;After</code> method * so that he knows immediately when he misused Mockito. * Without it, he would have known about it not sooner than <b>next time</b> he used the framework. * One more benefit of having <code>validateMockitoUsage()</code> in <code>&#064;After</code> is that jUnit runner and rule will always fail in the test method with defect * whereas ordinary 'next-time' validation might fail the <b>next</b> test method. * But even though JUnit might report next test as red, don't worry about it * and just click at navigable stack trace element in the exception message to instantly locate the place where you misused mockito. * <p> * <b>Both built-in runner: {@link MockitoJUnitRunner} and rule: {@link MockitoRule}</b> do validateMockitoUsage() after each test method. * <p> * Bear in mind that <b>usually you don't have to <code>validateMockitoUsage()</code></b> * and framework validation triggered on next-time basis should be just enough, * mainly because of enhanced exception message with clickable location of defect. * However, I would recommend validateMockitoUsage() if you already have sufficient test infrastructure * (like your own runner or base
is
java
redisson__redisson
redisson/src/main/java/org/redisson/api/RLiveObjectService.java
{ "start": 5109, "end": 5917 }
class ____ this object should have a field annotated with * RId, and the object should hold a non null value in that field. * * If this object is not in redis then a new hash key will be created to * store it. Otherwise overrides current object state in Redis with the given object state. * * @param <T> Entity type * @param detachedObject - not proxied object * @return proxied object * @throws IllegalArgumentException if the object is is a RLiveObject instance. */ <T> T merge(T detachedObject); /** * Returns proxied object for the detached object. Transfers all the * <b>NON NULL</b> field values to the redis server. It does not delete any * existing data in redis in case of the field value is null. * * The
representing
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/operators/ReduceTaskExternalITCase.java
{ "start": 1968, "end": 9423 }
class ____ extends DriverTestBase<GroupReduceFunction<Record, Record>> { private static final Logger LOG = LoggerFactory.getLogger(ReduceTaskExternalITCase.class); @SuppressWarnings("unchecked") private final RecordComparator comparator = new RecordComparator( new int[] {0}, (Class<? extends Value>[]) new Class[] {IntValue.class}); private final List<Record> outList = new ArrayList<>(); ReduceTaskExternalITCase(ExecutionConfig config) { super(config, 0, 1, 3 * 1024 * 1024); } @TestTemplate void testSingleLevelMergeReduceTask() { final int keyCnt = 8192; final int valCnt = 8; setNumFileHandlesForSort(2); addDriverComparator(this.comparator); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); try { addInputSorted( new UniformRecordGenerator(keyCnt, valCnt, false), this.comparator.duplicate()); GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>(); testDriver(testTask, MockReduceStub.class); } catch (Exception e) { LOG.info("Exception while running the test task.", e); fail("Exception in Test: " + e.getMessage()); } assertThat(this.outList) .withFailMessage("Resultset size was %d. Expected was %d", outList.size(), keyCnt) .hasSize(keyCnt); for (Record record : this.outList) { assertThat(record.getField(1, IntValue.class).getValue()) .withFailMessage("Incorrect result") .isEqualTo(valCnt - record.getField(0, IntValue.class).getValue()); } this.outList.clear(); } @TestTemplate void testMultiLevelMergeReduceTask() { final int keyCnt = 32768; final int valCnt = 8; setNumFileHandlesForSort(2); addDriverComparator(this.comparator); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); try { addInputSorted( new UniformRecordGenerator(keyCnt, valCnt, false), this.comparator.duplicate()); GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>(); testDriver(testTask, MockReduceStub.class); } catch (Exception e) { LOG.info("Exception while running the test task.", e); fail("Exception in Test: " + e.getMessage()); } assertThat(this.outList) .withFailMessage("Resultset size was %d. Expected was %d", outList.size(), keyCnt) .hasSize(keyCnt); for (Record record : this.outList) { assertThat(record.getField(1, IntValue.class).getValue()) .withFailMessage("Incorrect result") .isEqualTo(valCnt - record.getField(0, IntValue.class).getValue()); } this.outList.clear(); } @TestTemplate void testSingleLevelMergeCombiningReduceTask() throws IOException { final int keyCnt = 8192; final int valCnt = 8; addDriverComparator(this.comparator); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); Sorter<Record> sorter = null; try { sorter = ExternalSorter.newBuilder( getMemoryManager(), getContainingTask(), RecordSerializerFactory.get().getSerializer(), this.comparator.duplicate()) .maxNumFileHandles(2) .withCombiner(new MockCombiningReduceStub()) .enableSpilling(getIOManager(), 0.8f) .memoryFraction(this.perSortFractionMem) .objectReuse(true) .largeRecords(true) .build(new UniformRecordGenerator(keyCnt, valCnt, false)); addInput(sorter.getIterator()); GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>(); testDriver(testTask, MockCombiningReduceStub.class); } catch (Exception e) { LOG.info("Exception while running the test task.", e); fail("Invoke method caused exception: " + e.getMessage()); } finally { if (sorter != null) { sorter.close(); } } int expSum = 0; for (int i = 1; i < valCnt; i++) { expSum += i; } assertThat(this.outList) .withFailMessage("Resultset size was %d. Expected was %d", outList.size(), keyCnt) .hasSize(keyCnt); for (Record record : this.outList) { assertThat(record.getField(1, IntValue.class).getValue()) .withFailMessage("Incorrect result") .isEqualTo(expSum - record.getField(0, IntValue.class).getValue()); } this.outList.clear(); } @TestTemplate void testMultiLevelMergeCombiningReduceTask() throws IOException { int keyCnt = 32768; int valCnt = 8; addDriverComparator(this.comparator); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); Sorter<Record> sorter = null; try { sorter = ExternalSorter.newBuilder( getMemoryManager(), getContainingTask(), RecordSerializerFactory.get().getSerializer(), this.comparator.duplicate()) .maxNumFileHandles(2) .withCombiner(new MockCombiningReduceStub()) .enableSpilling(getIOManager(), 0.8f) .memoryFraction(this.perSortFractionMem) .objectReuse(false) .largeRecords(true) .build(new UniformRecordGenerator(keyCnt, valCnt, false)); addInput(sorter.getIterator()); GroupReduceDriver<Record, Record> testTask = new GroupReduceDriver<>(); testDriver(testTask, MockCombiningReduceStub.class); } catch (Exception e) { LOG.info("Exception while running the test task.", e); fail("Invoke method caused exception: " + e.getMessage()); } finally { if (sorter != null) { sorter.close(); } } int expSum = 0; for (int i = 1; i < valCnt; i++) { expSum += i; } assertThat(this.outList) .withFailMessage("Resultset size was %d. Expected was %d", outList.size(), keyCnt) .hasSize(keyCnt); for (Record record : this.outList) { assertThat(record.getField(1, IntValue.class).getValue()) .withFailMessage("Incorrect result") .isEqualTo(expSum - record.getField(0, IntValue.class).getValue()); } this.outList.clear(); } public static
ReduceTaskExternalITCase
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/search/fetch/FetchContext.java
{ "start": 1781, "end": 9885 }
class ____ { private final SearchContext searchContext; private final SourceLoader sourceLoader; private final FetchSourceContext fetchSourceContext; private final StoredFieldsContext storedFieldsContext; /** * Create a FetchContext based on a SearchContext */ public FetchContext(SearchContext searchContext, SourceLoader sourceLoader) { this.searchContext = searchContext; this.sourceLoader = sourceLoader; this.storedFieldsContext = buildStoredFieldsContext(searchContext); this.fetchSourceContext = buildFetchSourceContext(searchContext); } private static FetchSourceContext buildFetchSourceContext(SearchContext in) { FetchSourceContext fsc = in.fetchSourceContext(); StoredFieldsContext sfc = in.storedFieldsContext(); if (fsc == null) { boolean hasStoredFields = in.hasStoredFields(); boolean hasScriptFields = in.hasScriptFields(); // TODO it seems a bit odd that we disable implicit source loading if we've asked // for stored fields or script fields? But not eg doc_value fields or via // the `fields` API if (hasStoredFields == false && hasScriptFields == false) { fsc = FetchSourceContext.of(true); } } if (sfc != null && sfc.fetchFields()) { for (String field : sfc.fieldNames()) { if (SourceFieldMapper.NAME.equals(field)) { fsc = fsc == null ? FetchSourceContext.of(true) : FetchSourceContext.of(true, fsc.excludeVectors(), fsc.includes(), fsc.excludes()); } } } if (sfc != null && sfc.fetchFields() == false) { fsc = null; } return fsc; } private static StoredFieldsContext buildStoredFieldsContext(SearchContext in) { StoredFieldsContext sfc = in.storedFieldsContext(); if (sfc == null) { // if nothing is requested then we just do a standard metadata stored fields request sfc = StoredFieldsContext.metadataOnly(); } return sfc; } /** * The name of the index that documents are being fetched from */ public String getIndexName() { return searchContext.indexShard().shardId().getIndexName(); } /** * The point-in-time searcher the original query was executed against */ public ContextIndexSearcher searcher() { return searchContext.searcher(); } /** * The original query, not rewritten. */ public Query query() { return searchContext.query(); } /** * The original query in its rewritten form. */ public Query rewrittenQuery() { return searchContext.rewrittenQuery(); } /** * The original query with additional filters and named queries */ public ParsedQuery parsedQuery() { return searchContext.parsedQuery(); } /** * Any post-filters run as part of the search */ public ParsedQuery parsedPostFilter() { return searchContext.parsedPostFilter(); } /** * Configuration for fetching _source */ public FetchSourceContext fetchSourceContext() { return this.fetchSourceContext; } /** * Configuration for fetching stored fields */ public StoredFieldsContext storedFieldsContext() { return storedFieldsContext; } /** * Should the response include `explain` output */ public boolean explain() { return searchContext.explain() && searchContext.query() != null; } /** * The rescorers included in the original search, used for explain output */ public List<RescoreContext> rescore() { return searchContext.rescore(); } /** * The rank builder used in the original search */ public RankBuilder rankBuilder() { return searchContext.request().source() == null ? null : searchContext.request().source().rankBuilder(); } public List<String> queryNames() { return searchContext.request().source() == null ? Collections.emptyList() : searchContext.request().source().subSearches().stream().map(x -> x.getQueryBuilder().queryName()).toList(); } /** * Should the response include sequence number and primary term metadata */ public boolean seqNoAndPrimaryTerm() { return searchContext.seqNoAndPrimaryTerm(); } /** * Configuration for fetching docValues fields */ public FetchDocValuesContext docValuesContext() { FetchDocValuesContext dvContext = searchContext.docValuesContext(); if (searchContext.collapse() != null) { // retrieve the `doc_value` associated with the collapse field String name = searchContext.collapse().getFieldName(); if (dvContext == null) { return new FetchDocValuesContext( searchContext.getSearchExecutionContext(), Collections.singletonList(new FieldAndFormat(name, null)) ); } else if (searchContext.docValuesContext().fields().stream().map(ff -> ff.field).noneMatch(name::equals)) { dvContext.fields().add(new FieldAndFormat(name, null)); } } return dvContext; } /** * Configuration for highlighting */ public SearchHighlightContext highlight() { return searchContext.highlight(); } /** * Does the index analyzer for this field have token filters that may produce * backwards offsets in term vectors */ public boolean containsBrokenAnalysis(String field) { return getSearchExecutionContext().containsBrokenAnalysis(field); } /** * Should the response include scores, even if scores were not calculated in the original query */ public boolean fetchScores() { return searchContext.sort() != null && searchContext.trackScores(); } /** * Configuration for returning inner hits */ public InnerHitsContext innerHits() { return searchContext.innerHits(); } /** * Should the response include version metadata */ public boolean version() { return searchContext.version(); } /** * Configuration for the 'fields' response */ public FetchFieldsContext fetchFieldsContext() { return searchContext.fetchFieldsContext(); } /** * Configuration for script fields */ public ScriptFieldsContext scriptFields() { return searchContext.scriptFields(); } /** * Configuration for external fetch phase plugins */ public SearchExtBuilder getSearchExt(String name) { return searchContext.getSearchExt(name); } public SearchExecutionContext getSearchExecutionContext() { return searchContext.getSearchExecutionContext(); } /** * Loads source {@code _source} during a GET or {@code _search}. */ public SourceLoader sourceLoader() { return sourceLoader; } /** * For a hit document that's being processed, return the source lookup representing the * root document. This method is used to pass down the root source when processing this * document's nested inner hits. * * @param hitContext The context of the hit that's being processed. */ public Source getRootSource(FetchSubPhase.HitContext hitContext) { // Usually the root source simply belongs to the hit we're processing. But if // there are multiple layers of inner hits and we're in a nested context, then // the root source is found on the inner hits context. if (searchContext instanceof InnerHitSubContext innerHitsContext && hitContext.hit().getNestedIdentity() != null) { return innerHitsContext.getRootLookup(); } else { return hitContext.source(); } } }
FetchContext
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/asm/ClassVisitor.java
{ "start": 8290, "end": 8494 }
class ____ not enclosed in a method or constructor of its enclosing class (for example, if it is * enclosed in an instance initializer, static initializer, instance variable initializer, or *
is
java
elastic__elasticsearch
benchmarks/src/main/java/org/elasticsearch/benchmark/indices/breaker/MemoryStatsBenchmark.java
{ "start": 1413, "end": 2926 }
class ____ { private static final MemoryMXBean MEMORY_MX_BEAN = ManagementFactory.getMemoryMXBean(); @Param({ "0", "16", "256", "4096" }) private int tokens; @Benchmark public void baseline() { Blackhole.consumeCPU(tokens); } @Benchmark @Threads(1) public long getMemoryStats_01() { Blackhole.consumeCPU(tokens); return MEMORY_MX_BEAN.getHeapMemoryUsage().getUsed(); } @Benchmark @Threads(2) public long getMemoryStats_02() { Blackhole.consumeCPU(tokens); return MEMORY_MX_BEAN.getHeapMemoryUsage().getUsed(); } @Benchmark @Threads(4) public long getMemoryStats_04() { Blackhole.consumeCPU(tokens); return MEMORY_MX_BEAN.getHeapMemoryUsage().getUsed(); } @Benchmark @Threads(8) public long getMemoryStats_08() { Blackhole.consumeCPU(tokens); return MEMORY_MX_BEAN.getHeapMemoryUsage().getUsed(); } @Benchmark @Threads(16) public long getMemoryStats_16() { Blackhole.consumeCPU(tokens); return MEMORY_MX_BEAN.getHeapMemoryUsage().getUsed(); } @Benchmark @Threads(32) public long getMemoryStats_32() { Blackhole.consumeCPU(tokens); return MEMORY_MX_BEAN.getHeapMemoryUsage().getUsed(); } @Benchmark @Threads(64) public long getMemoryStats_64() { Blackhole.consumeCPU(tokens); return MEMORY_MX_BEAN.getHeapMemoryUsage().getUsed(); } }
MemoryStatsBenchmark
java
apache__camel
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/OpensearchComponentBuilderFactory.java
{ "start": 1384, "end": 1878 }
interface ____ { /** * OpenSearch (camel-opensearch) * Send requests to OpenSearch via Java Client API. * * Category: search,monitoring * Since: 4.0 * Maven coordinates: org.apache.camel:camel-opensearch * * @return the dsl builder */ static OpensearchComponentBuilder opensearch() { return new OpensearchComponentBuilderImpl(); } /** * Builder for the OpenSearch component. */
OpensearchComponentBuilderFactory
java
micronaut-projects__micronaut-core
core/src/main/java/io/micronaut/core/convert/TypeConverter.java
{ "start": 1546, "end": 3118 }
interface ____<S, T> { /** * Converts from the given source object type to the target type. * * @param object The object type * @param targetType The target type being converted to * @return The converted type or empty if the conversion is not possible */ default Optional<T> convert(S object, Class<T> targetType) { return convert(object, targetType, ConversionContext.DEFAULT); } /** * Converts from the given source object type to the target type. Implementers should take care to return {@link Optional#empty()} * in case the object is not convertible by catching any necessary exceptions and failing gracefully. * * @param object The object type * @param targetType The target type being converted to * @param context The {@link ConversionContext} * @return The converted type or empty if the conversion is not possible */ Optional<T> convert(S object, Class<T> targetType, ConversionContext context); /** * Creates a new {@link TypeConverter} for the give source type, target type and conversion function. * * @param sourceType The source type * @param targetType The target type * @param converter The converter function * @param <ST> The source generic type * @param <TT> The target generic type * @return The converter instance */ static <ST, TT> TypeConverter<ST, TT> of(Class<ST> sourceType, Class<TT> targetType, Function<ST, TT> converter) { // Keep the anonymous
TypeConverter
java
alibaba__nacos
client/src/test/java/com/alibaba/nacos/client/naming/utils/GenericPollerTest.java
{ "start": 809, "end": 1479 }
class ____ { @Test void testNext() { String item1 = "item1"; String item2 = "item2"; GenericPoller<String> poller = new GenericPoller<>(Arrays.asList(item1, item2)); assertEquals(item1, poller.next()); assertEquals(item2, poller.next()); assertEquals(item1, poller.next()); } @Test void testRefresh() { String item1 = "item1"; String item2 = "item2"; GenericPoller<String> poller = new GenericPoller<>(Arrays.asList(item1, item2)); Poller<String> poller1 = poller.refresh(Arrays.asList(item2)); assertEquals(item2, poller1.next()); } }
GenericPollerTest
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/query/internal/QueryInterpretationCacheStandardImpl.java
{ "start": 1170, "end": 7152 }
class ____ implements QueryInterpretationCache { private static final Logger LOG = QueryLogging.subLogger( "plan.cache" ); /** * the cache of the actual plans... */ private final InternalCache<Key, QueryPlan> queryPlanCache; private final ServiceRegistry serviceRegistry; private final InternalCache<Object, HqlInterpretation<?>> hqlInterpretationCache; private final InternalCache<String, ParameterInterpretation> nativeQueryParamCache; private StatisticsImplementor statistics; public QueryInterpretationCacheStandardImpl(int maxQueryPlanCount, ServiceRegistry serviceRegistry) { LOG.tracef( "Starting query interpretation cache (size %s)", maxQueryPlanCount ); final var cacheFactory = serviceRegistry.requireService( InternalCacheFactory.class ); this.queryPlanCache = cacheFactory.createInternalCache( maxQueryPlanCount ); this.hqlInterpretationCache = cacheFactory.createInternalCache( maxQueryPlanCount ); this.nativeQueryParamCache = cacheFactory.createInternalCache( maxQueryPlanCount ); this.serviceRegistry = serviceRegistry; } @Override public int getNumberOfCachedHqlInterpretations() { return hqlInterpretationCache.heldElementsEstimate(); } @Override public int getNumberOfCachedQueryPlans() { return queryPlanCache.heldElementsEstimate(); } private StatisticsImplementor getStatistics() { if ( statistics == null ) { statistics = serviceRegistry.requireService( StatisticsImplementor.class ); } return statistics; } @Override public <R> SelectQueryPlan<R> resolveSelectQueryPlan( Key key, Supplier<SelectQueryPlan<R>> creator) { return resolveSelectQueryPlan( key, k -> creator.get() ); } @Override public <K extends Key, R> SelectQueryPlan<R> resolveSelectQueryPlan( K key, Function<K, SelectQueryPlan<R>> creator) { LOG.tracef( "Resolving cached query plan for [%s]", key ); final var statistics = getStatistics(); final boolean statisticsEnabled = statistics.isStatisticsEnabled(); @SuppressWarnings("unchecked") final var cached = (SelectQueryPlan<R>) queryPlanCache.get( key ); if ( cached != null ) { if ( statisticsEnabled ) { statistics.queryPlanCacheHit( key.getQueryString() ); } return cached; } final var plan = creator.apply( key ); queryPlanCache.put( key.prepareForStore(), plan ); if ( statisticsEnabled ) { statistics.queryPlanCacheMiss( key.getQueryString() ); } return plan; } @Override public NonSelectQueryPlan getNonSelectQueryPlan(Key key) { return null; } @Override public void cacheNonSelectQueryPlan(Key key, NonSelectQueryPlan plan) { } @Override public <R> HqlInterpretation<R> resolveHqlInterpretation( String queryString, Class<R> expectedResultType, HqlTranslator translator) { LOG.tracef( "Resolving HQL interpretation for [%s]", queryString ); final var statistics = getStatistics(); final Object cacheKey = expectedResultType != null ? new HqlInterpretationCacheKey( queryString, expectedResultType ) : queryString; final var existing = hqlInterpretationCache.get( cacheKey ); if ( existing != null ) { if ( statistics.isStatisticsEnabled() ) { statistics.queryPlanCacheHit( queryString ); } //noinspection unchecked return (HqlInterpretation<R>) existing; } else if ( expectedResultType != null ) { final var existingQueryOnly = hqlInterpretationCache.get( queryString ); if ( existingQueryOnly != null ) { if ( statistics.isStatisticsEnabled() ) { statistics.queryPlanCacheHit( queryString ); } //noinspection unchecked return (HqlInterpretation<R>) existingQueryOnly; } } final var hqlInterpretation = createHqlInterpretation( queryString, expectedResultType, translator, statistics ); hqlInterpretationCache.put( cacheKey, hqlInterpretation ); return hqlInterpretation; } @Override public <R> void cacheHqlInterpretation(Object cacheKey, HqlInterpretation<R> hqlInterpretation) { hqlInterpretationCache.put( cacheKey, hqlInterpretation ); } protected static <R> HqlInterpretation<R> createHqlInterpretation( String queryString, Class<R> expectedResultType, HqlTranslator translator, StatisticsImplementor statistics) { final boolean statisticsEnabled = statistics.isStatisticsEnabled(); final long startTime = statisticsEnabled ? System.nanoTime() : 0L; final var sqmStatement = translator.translate( queryString, expectedResultType ); final ParameterMetadataImplementor parameterMetadata; final DomainParameterXref domainParameterXref; if ( sqmStatement.getSqmParameters().isEmpty() ) { domainParameterXref = DomainParameterXref.EMPTY; parameterMetadata = ParameterMetadataImpl.EMPTY; } else { domainParameterXref = DomainParameterXref.from( sqmStatement ); parameterMetadata = new ParameterMetadataImpl( domainParameterXref.getQueryParameters() ); } if ( statisticsEnabled ) { final long endTime = System.nanoTime(); final long microseconds = TimeUnit.MICROSECONDS.convert( endTime - startTime, TimeUnit.NANOSECONDS ); statistics.queryCompiled( queryString, microseconds ); } return new SimpleHqlInterpretationImpl<>( sqmStatement, parameterMetadata, domainParameterXref ); } @Override public ParameterInterpretation resolveNativeQueryParameters( String queryString, Function<String, ParameterInterpretation> creator) { LOG.tracef( "Resolving native query parameters for [%s]", queryString ); return nativeQueryParamCache.computeIfAbsent( queryString, creator ); } @Override public boolean isEnabled() { return true; } @Override public void close() { LOG.trace( "Destroying query interpretation cache" ); hqlInterpretationCache.clear(); nativeQueryParamCache.clear(); queryPlanCache.clear(); } /** * Interpretation-cache key used for HQL interpretations */ private record HqlInterpretationCacheKey(String queryString, Class<?> expectedResultType) { } }
QueryInterpretationCacheStandardImpl
java
ReactiveX__RxJava
src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableSingleMaybe.java
{ "start": 1234, "end": 2969 }
class ____<T> implements Observer<T>, Disposable { final MaybeObserver<? super T> downstream; Disposable upstream; T value; boolean done; SingleElementObserver(MaybeObserver<? super T> downstream) { this.downstream = downstream; } @Override public void onSubscribe(Disposable d) { if (DisposableHelper.validate(this.upstream, d)) { this.upstream = d; downstream.onSubscribe(this); } } @Override public void dispose() { upstream.dispose(); } @Override public boolean isDisposed() { return upstream.isDisposed(); } @Override public void onNext(T t) { if (done) { return; } if (value != null) { done = true; upstream.dispose(); downstream.onError(new IllegalArgumentException("Sequence contains more than one element!")); return; } value = t; } @Override public void onError(Throwable t) { if (done) { RxJavaPlugins.onError(t); return; } done = true; downstream.onError(t); } @Override public void onComplete() { if (done) { return; } done = true; T v = value; value = null; if (v == null) { downstream.onComplete(); } else { downstream.onSuccess(v); } } } }
SingleElementObserver
java
apache__kafka
clients/src/test/java/org/apache/kafka/common/security/oauthbearer/internals/secured/assertion/FileAssertionJwtTemplateTest.java
{ "start": 1460, "end": 4503 }
class ____ { @Test public void testBasicUsage() throws Exception { String expected = createTemplateJson( Map.of("typ", "JWT", "alg", "RS256"), Map.of("sub", "jdoe") ); File tmpFile = tempFile(expected); try (AssertionJwtTemplate template = new FileAssertionJwtTemplate(tmpFile)) { Map<String, Object> header = template.header(); assertNotNull(header); assertEquals("JWT", header.get("typ")); assertEquals("RS256", header.get("alg")); Map<String, Object> payload = template.payload(); assertNotNull(payload); assertEquals("jdoe", payload.get("sub")); } } @Test public void testHeaderOnly() throws Exception { String expected = toJson( Map.of( "header", Map.of("typ", "JWT", "alg", "RS256") ) ); File tmpFile = tempFile(expected); try (AssertionJwtTemplate template = new FileAssertionJwtTemplate(tmpFile)) { Map<String, Object> header = template.header(); assertNotNull(header); assertEquals("JWT", header.get("typ")); assertEquals("RS256", header.get("alg")); Map<String, Object> payload = template.payload(); assertNotNull(payload); assertTrue(payload.isEmpty()); } } @Test public void testPayloadOnly() throws Exception { String expected = toJson( Map.of( "payload", Map.of("sub", "jdoe") ) ); File tmpFile = tempFile(expected); try (AssertionJwtTemplate template = new FileAssertionJwtTemplate(tmpFile)) { Map<String, Object> header = template.header(); assertNotNull(header); assertTrue(header.isEmpty()); Map<String, Object> payload = template.payload(); assertNotNull(payload); assertEquals("jdoe", payload.get("sub")); } } @Test public void testMalformedFile() throws Exception { String expected = "{invalid-json}"; File tmpFile = tempFile(expected); assertThrows(KafkaException.class, () -> new FileAssertionJwtTemplate(tmpFile)); } @Test public void testMalformedFormat() throws Exception { String expected = toJson(Map.of("header", List.of("foo", "bar", "baz"))); File tmpFile = tempFile(expected); assertThrows(KafkaException.class, () -> new FileAssertionJwtTemplate(tmpFile)); } private String createTemplateJson(Map<String, Object> header, Map<String, Object> payload) { Map<String, Object> topLevel = Map.of("header", header, "payload", payload); return toJson(topLevel); } private String toJson(Map<String, Object> map) { ObjectMapper mapper = new ObjectMapper(); return assertDoesNotThrow(() -> mapper.writeValueAsString(map)); } }
FileAssertionJwtTemplateTest
java
spring-projects__spring-boot
module/spring-boot-quartz/src/test/java/org/springframework/boot/quartz/autoconfigure/QuartzAutoConfigurationTests.java
{ "start": 4103, "end": 19325 }
class ____ { private final ApplicationContextRunner contextRunner = new ApplicationContextRunner() .withPropertyValues("spring.datasource.generate-unique-name=true") .withConfiguration(AutoConfigurations.of(QuartzAutoConfiguration.class)); @Test void withNoDataSource() { this.contextRunner.run((context) -> { assertThat(context).hasSingleBean(Scheduler.class); Scheduler scheduler = context.getBean(Scheduler.class); assertThat(scheduler.getMetaData().getJobStoreClass()).isAssignableFrom(RAMJobStore.class); }); } @Test void withDataSourceUseMemoryByDefault() { this.contextRunner .withConfiguration(AutoConfigurations.of(DataSourceAutoConfiguration.class, DataSourceTransactionManagerAutoConfiguration.class)) .run((context) -> { assertThat(context).hasSingleBean(Scheduler.class); Scheduler scheduler = context.getBean(Scheduler.class); assertThat(scheduler.getMetaData().getJobStoreClass()).isAssignableFrom(RAMJobStore.class); }); } @Test void withDataSource() { this.contextRunner.withUserConfiguration(QuartzJobsConfiguration.class) .withConfiguration(AutoConfigurations.of(DataSourceAutoConfiguration.class, DataSourceTransactionManagerAutoConfiguration.class)) .withPropertyValues("spring.quartz.job-store-type=jdbc") .run(assertDataSourceInitializedByDataSourceDatabaseScriptInitializer("dataSource")); } @Test void withDataSourceAndInMemoryStoreDoesNotInitializeDataSource() { this.contextRunner.withUserConfiguration(QuartzJobsConfiguration.class) .withConfiguration(AutoConfigurations.of(DataSourceAutoConfiguration.class, DataSourceTransactionManagerAutoConfiguration.class)) .withPropertyValues("spring.quartz.job-store-type=memory") .run((context) -> { JdbcTemplate jdbcTemplate = new JdbcTemplate(context.getBean("dataSource", DataSource.class)); assertThat(jdbcTemplate.queryForList("SHOW TABLES") .stream() .map((table) -> (String) table.get("TABLE_NAME"))).noneMatch((name) -> name.startsWith("QRTZ")); }); } @Test void dataSourceInitializationBacksOffWithoutSpringBootJdbc() { this.contextRunner.withBean(DataSource.class, QuartzAutoConfigurationTests::createTestDataSource) .withBean(SchedulerFactoryBeanCustomizer.class, () -> (schedulerFactoryBean) -> { // Mock out the scheduler so that the context doesn't fail to start // due to missing tables that the JDBC job store requires. try { SchedulerFactory factory = mock(SchedulerFactory.class); given(factory.getScheduler()).willReturn(mock(Scheduler.class)); given(factory.getScheduler(anyString())).willReturn(mock(Scheduler.class)); schedulerFactoryBean.setSchedulerFactory(factory); } catch (SchedulerException ex) { throw new RuntimeException(ex); } }) .withClassLoader(new FilteredClassLoader("org.springframework.boot.jdbc", "org.springframework.boot.sql") { @Override public Enumeration<URL> getResources(String name) throws IOException { Enumeration<URL> resources = super.getResources(name); if (!name.equals("META-INF/spring.factories")) { return resources; } // Hide META-INF/spring.factories files with entries from // org.springframework.boot.jdbc return Collections.enumeration(Collections.list(resources).stream().filter((url) -> { try (InputStream input = url.openStream()) { String content = StreamUtils.copyToString(input, StandardCharsets.UTF_8); return !content.contains("org.springframework.boot.jdbc."); } catch (IOException ex) { return true; } }).toList()); } }) .withPropertyValues("spring.quartz.job-store-type=jdbc") .run((context) -> assertThat(context).doesNotHaveBean(QuartzDataSourceScriptDatabaseInitializer.class)); } @Test void withDataSourceNoTransactionManager() { this.contextRunner.withUserConfiguration(QuartzJobsConfiguration.class) .withConfiguration(AutoConfigurations.of(DataSourceAutoConfiguration.class)) .withPropertyValues("spring.quartz.job-store-type=jdbc") .run(assertDataSourceInitializedByDataSourceDatabaseScriptInitializer("dataSource")); } @Test void dataSourceWithQuartzDataSourceQualifierUsedWhenMultiplePresent() { this.contextRunner.withUserConfiguration(QuartzJobsConfiguration.class, MultipleDataSourceConfiguration.class) .withPropertyValues("spring.quartz.job-store-type=jdbc") .run(assertDataSourceInitializedByDataSourceDatabaseScriptInitializer("quartzDataSource")); } @Test void transactionManagerWithQuartzTransactionManagerUsedWhenMultiplePresent() { this.contextRunner .withUserConfiguration(QuartzJobsConfiguration.class, MultipleTransactionManagersConfiguration.class) .withPropertyValues("spring.quartz.job-store-type=jdbc") .run((context) -> { SchedulerFactoryBean schedulerFactoryBean = context.getBean(SchedulerFactoryBean.class); assertThat(schedulerFactoryBean).extracting("transactionManager") .isEqualTo(context.getBean("quartzTransactionManager")); }); } @Test void withTaskExecutor() { this.contextRunner.withUserConfiguration(MockExecutorConfiguration.class) .withPropertyValues("spring.quartz.properties.org.quartz.threadPool.threadCount=50") .run((context) -> { assertThat(context).hasSingleBean(Scheduler.class); Scheduler scheduler = context.getBean(Scheduler.class); assertThat(scheduler.getMetaData().getThreadPoolSize()).isEqualTo(50); Executor executor = context.getBean(Executor.class); then(executor).shouldHaveNoInteractions(); }); } @Test void withOverwriteExistingJobs() { this.contextRunner.withUserConfiguration(OverwriteTriggerConfiguration.class) .withPropertyValues("spring.quartz.overwrite-existing-jobs=true") .run((context) -> { assertThat(context).hasSingleBean(Scheduler.class); Scheduler scheduler = context.getBean(Scheduler.class); Trigger fooTrigger = scheduler.getTrigger(TriggerKey.triggerKey("fooTrigger")); assertThat(fooTrigger).isNotNull(); assertThat(((SimpleTrigger) fooTrigger).getRepeatInterval()).isEqualTo(30000); }); } @Test void withConfiguredJobAndTrigger(CapturedOutput output) { this.contextRunner.withUserConfiguration(QuartzFullConfiguration.class) .withPropertyValues("test-name=withConfiguredJobAndTrigger") .run((context) -> { assertThat(context).hasSingleBean(Scheduler.class); Scheduler scheduler = context.getBean(Scheduler.class); assertThat(scheduler.getJobDetail(JobKey.jobKey("fooJob"))).isNotNull(); assertThat(scheduler.getTrigger(TriggerKey.triggerKey("fooTrigger"))).isNotNull(); Awaitility.waitAtMost(Duration.ofSeconds(5)) .untilAsserted( () -> assertThat(output).contains("withConfiguredJobAndTrigger").contains("jobDataValue")); }); } @Test void withConfiguredCalendars() { this.contextRunner.withUserConfiguration(QuartzCalendarsConfiguration.class).run((context) -> { assertThat(context).hasSingleBean(Scheduler.class); Scheduler scheduler = context.getBean(Scheduler.class); assertThat(scheduler.getCalendar("weekly")).isNotNull(); assertThat(scheduler.getCalendar("monthly")).isNotNull(); }); } @Test void withQuartzProperties() { this.contextRunner.withPropertyValues("spring.quartz.properties.org.quartz.scheduler.instanceId=FOO") .run((context) -> { assertThat(context).hasSingleBean(Scheduler.class); Scheduler scheduler = context.getBean(Scheduler.class); assertThat(scheduler.getSchedulerInstanceId()).isEqualTo("FOO"); }); } @Test void withCustomizer() { this.contextRunner.withUserConfiguration(QuartzCustomConfiguration.class).run((context) -> { assertThat(context).hasSingleBean(Scheduler.class); Scheduler scheduler = context.getBean(Scheduler.class); assertThat(scheduler.getSchedulerName()).isEqualTo("fooScheduler"); }); } @Test void validateDefaultProperties() { this.contextRunner.withUserConfiguration(ManualSchedulerConfiguration.class).run((context) -> { assertThat(context).hasSingleBean(SchedulerFactoryBean.class); SchedulerFactoryBean schedulerFactory = context.getBean(SchedulerFactoryBean.class); QuartzProperties properties = new QuartzProperties(); assertThat(properties.isAutoStartup()).isEqualTo(schedulerFactory.isAutoStartup()); assertThat(schedulerFactory).hasFieldOrPropertyWithValue("startupDelay", (int) properties.getStartupDelay().getSeconds()); assertThat(schedulerFactory).hasFieldOrPropertyWithValue("waitForJobsToCompleteOnShutdown", properties.isWaitForJobsToCompleteOnShutdown()); assertThat(schedulerFactory).hasFieldOrPropertyWithValue("overwriteExistingJobs", properties.isOverwriteExistingJobs()); }); } @Test void withCustomConfiguration() { this.contextRunner .withPropertyValues("spring.quartz.auto-startup=false", "spring.quartz.startup-delay=1m", "spring.quartz.wait-for-jobs-to-complete-on-shutdown=true", "spring.quartz.overwrite-existing-jobs=true") .run((context) -> { assertThat(context).hasSingleBean(SchedulerFactoryBean.class); SchedulerFactoryBean schedulerFactory = context.getBean(SchedulerFactoryBean.class); assertThat(schedulerFactory.isAutoStartup()).isFalse(); assertThat(schedulerFactory).hasFieldOrPropertyWithValue("startupDelay", 60); assertThat(schedulerFactory).hasFieldOrPropertyWithValue("waitForJobsToCompleteOnShutdown", true); assertThat(schedulerFactory).hasFieldOrPropertyWithValue("overwriteExistingJobs", true); }); } @Test void withLiquibase() { this.contextRunner.withUserConfiguration(QuartzJobsConfiguration.class) .withConfiguration(AutoConfigurations.of(DataSourceAutoConfiguration.class, DataSourceTransactionManagerAutoConfiguration.class, LiquibaseAutoConfiguration.class)) .withPropertyValues("spring.quartz.job-store-type=jdbc", "spring.quartz.jdbc.initialize-schema=never", "spring.liquibase.change-log=classpath:org/quartz/impl/jdbcjobstore/liquibase.quartz.init.xml") .run(assertDataSourceInitialized("dataSource").andThen( (context) -> assertThat(context).doesNotHaveBean(QuartzDataSourceScriptDatabaseInitializer.class))); } @Test void withFlyway(@TempDir Path flywayLocation) throws Exception { ClassPathResource tablesResource = new ClassPathResource("org/quartz/impl/jdbcjobstore/tables_h2.sql"); try (InputStream stream = tablesResource.getInputStream()) { Files.copy(stream, flywayLocation.resolve("V2__quartz.sql")); } this.contextRunner.withUserConfiguration(QuartzJobsConfiguration.class) .withConfiguration(AutoConfigurations.of(DataSourceAutoConfiguration.class, DataSourceTransactionManagerAutoConfiguration.class, FlywayAutoConfiguration.class)) .withPropertyValues("spring.quartz.job-store-type=jdbc", "spring.quartz.jdbc.initialize-schema=never", "spring.flyway.locations=filesystem:" + flywayLocation, "spring.flyway.baseline-on-migrate=true") .run(assertDataSourceInitialized("dataSource").andThen( (context) -> assertThat(context).doesNotHaveBean(QuartzDataSourceScriptDatabaseInitializer.class))); } @Test void schedulerNameWithDedicatedProperty() { this.contextRunner.withPropertyValues("spring.quartz.scheduler-name=testScheduler") .run(assertSchedulerName("testScheduler")); } @Test void schedulerNameWithQuartzProperty() { this.contextRunner .withPropertyValues("spring.quartz.properties.org.quartz.scheduler.instanceName=testScheduler") .run(assertSchedulerName("testScheduler")); } @Test void schedulerNameWithDedicatedPropertyTakesPrecedence() { this.contextRunner .withPropertyValues("spring.quartz.scheduler-name=specificTestScheduler", "spring.quartz.properties.org.quartz.scheduler.instanceName=testScheduler") .run(assertSchedulerName("specificTestScheduler")); } @Test void schedulerNameUseBeanNameByDefault() { this.contextRunner.withPropertyValues().run(assertSchedulerName("quartzScheduler")); } @Test void whenTheUserDefinesTheirOwnQuartzDatabaseInitializerThenTheAutoConfiguredInitializerBacksOff() { this.contextRunner.withUserConfiguration(CustomQuartzDatabaseInitializerConfiguration.class) .withConfiguration(AutoConfigurations.of(DataSourceAutoConfiguration.class, DataSourceTransactionManagerAutoConfiguration.class)) .withPropertyValues("spring.quartz.job-store-type=jdbc") .run((context) -> assertThat(context).hasSingleBean(QuartzDataSourceScriptDatabaseInitializer.class) .doesNotHaveBean("quartzDataSourceScriptDatabaseInitializer") .hasBean("customInitializer")); } @Test void whenTheUserDefinesTheirOwnDatabaseInitializerThenTheAutoConfiguredQuartzInitializerRemains() { this.contextRunner.withUserConfiguration(CustomDatabaseInitializerConfiguration.class) .withConfiguration(AutoConfigurations.of(DataSourceAutoConfiguration.class, DataSourceTransactionManagerAutoConfiguration.class)) .withPropertyValues("spring.quartz.job-store-type=jdbc") .run((context) -> assertThat(context).hasSingleBean(QuartzDataSourceScriptDatabaseInitializer.class) .hasBean("customInitializer")); } private ContextConsumer<AssertableApplicationContext> assertDataSourceInitialized(String dataSourceName) { return (context) -> { assertThat(context).hasSingleBean(Scheduler.class); Scheduler scheduler = context.getBean(Scheduler.class); assertThat(scheduler.getMetaData().getJobStoreClass()).isAssignableFrom(LocalDataSourceJobStore.class); JdbcTemplate jdbcTemplate = new JdbcTemplate(context.getBean(dataSourceName, DataSource.class)); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM QRTZ_JOB_DETAILS", Integer.class)) .isEqualTo(2); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM QRTZ_SIMPLE_TRIGGERS", Integer.class)) .isZero(); }; } private ContextConsumer<AssertableApplicationContext> assertDataSourceInitializedByDataSourceDatabaseScriptInitializer( String dataSourceName) { return assertDataSourceInitialized(dataSourceName).andThen((context) -> { assertThat(context).hasSingleBean(QuartzDataSourceScriptDatabaseInitializer.class); QuartzDataSourceScriptDatabaseInitializer initializer = context .getBean(QuartzDataSourceScriptDatabaseInitializer.class); assertThat(initializer).hasFieldOrPropertyWithValue("dataSource", context.getBean(dataSourceName)); }); } private ContextConsumer<AssertableApplicationContext> assertSchedulerName(String schedulerName) { return (context) -> { assertThat(context).hasSingleBean(SchedulerFactoryBean.class); SchedulerFactoryBean schedulerFactory = context.getBean(SchedulerFactoryBean.class); assertThat(schedulerFactory).hasFieldOrPropertyWithValue("schedulerName", schedulerName); }; } private static DataSource createTestDataSource() { DataSourceProperties properties = new DataSourceProperties(); properties.setGenerateUniqueName(true); try { properties.afterPropertiesSet(); } catch (Exception ex) { throw new RuntimeException(ex); } return properties.initializeDataSourceBuilder().build(); } @Import(ComponentThatUsesScheduler.class) @Configuration(proxyBeanMethods = false) static
QuartzAutoConfigurationTests
java
apache__flink
flink-queryable-state/flink-queryable-state-client-java/src/main/java/org/apache/flink/queryablestate/network/ServerConnection.java
{ "start": 5385, "end": 9887 }
class ____<REQ extends MessageBody, RESP extends MessageBody> implements InternalConnection<REQ, RESP> { private final Function<Channel, EstablishedConnection<REQ, RESP>> connectionFactory; private final CompletableFuture<Void> closeFuture = new CompletableFuture<>(); /** Queue of requests while connecting. */ private final ArrayDeque<PendingConnection.PendingRequest<REQ, RESP>> queuedRequests = new ArrayDeque<>(); /** Failure cause if something goes wrong. */ @Nullable private Throwable failureCause = null; private boolean running = true; /** Creates a pending connection to the given server. */ private PendingConnection( Function<Channel, EstablishedConnection<REQ, RESP>> connectionFactory) { this.connectionFactory = connectionFactory; } /** * Returns a future holding the serialized request result. * * <p>Queues the request for when the channel is handed in. * * @param request the request to be sent. * @return Future holding the serialized result */ @Override public CompletableFuture<RESP> sendRequest(REQ request) { if (failureCause != null) { return FutureUtils.completedExceptionally(failureCause); } else if (!running) { return FutureUtils.completedExceptionally(new ClosedChannelException()); } else { // Queue this and handle when connected final PendingConnection.PendingRequest<REQ, RESP> pending = new PendingConnection.PendingRequest<>(request); queuedRequests.add(pending); return pending; } } @Override public InternalConnection<REQ, RESP> establishConnection(ChannelFuture future) { if (future.isSuccess()) { return createEstablishedConnection(future.channel()); } else { close(future.cause()); return this; } } @Override public boolean isEstablished() { return false; } @Override public CompletableFuture<Void> getCloseFuture() { return closeFuture; } /** * Creates an established connection from the given channel. * * @param channel Channel to create an established connection from */ private InternalConnection<REQ, RESP> createEstablishedConnection(Channel channel) { if (failureCause != null || !running) { // Close the channel and we are done. Any queued requests // are removed on the close/failure call and after that no // new ones can be enqueued. channel.close(); return this; } else { final EstablishedConnection<REQ, RESP> establishedConnection = connectionFactory.apply(channel); while (!queuedRequests.isEmpty()) { final PendingConnection.PendingRequest<REQ, RESP> pending = queuedRequests.poll(); FutureUtils.forward( establishedConnection.sendRequest(pending.getRequest()), pending); } return establishedConnection; } } /** Close the connecting channel with a ClosedChannelException. */ @Override public CompletableFuture<Void> close() { return close(new ClosedChannelException()); } /** * Close the connecting channel with an Exception (can be {@code null}) or forward to the * established channel. */ private CompletableFuture<Void> close(Throwable cause) { if (running) { running = false; failureCause = cause; for (PendingConnection.PendingRequest<REQ, RESP> pendingRequest : queuedRequests) { pendingRequest.completeExceptionally(cause); } queuedRequests.clear(); closeFuture.completeExceptionally(cause); } return closeFuture; } /** A pending request queued while the channel is connecting. */ private static final
PendingConnection
java
bumptech__glide
library/src/main/java/com/bumptech/glide/load/engine/cache/DiskLruCacheFactory.java
{ "start": 668, "end": 2278 }
interface ____ { File getCacheDirectory(); } public DiskLruCacheFactory(final String diskCacheFolder, long diskCacheSize) { this( new CacheDirectoryGetter() { @Override public File getCacheDirectory() { return new File(diskCacheFolder); } }, diskCacheSize); } public DiskLruCacheFactory( final String diskCacheFolder, final String diskCacheName, long diskCacheSize) { this( new CacheDirectoryGetter() { @Override public File getCacheDirectory() { return new File(diskCacheFolder, diskCacheName); } }, diskCacheSize); } /** * When using this constructor {@link CacheDirectoryGetter#getCacheDirectory()} will be called out * of UI thread, allowing to do I/O access without performance impacts. * * @param cacheDirectoryGetter Interface called out of UI thread to get the cache folder. * @param diskCacheSize Desired max bytes size for the LRU disk cache. */ // Public API. @SuppressWarnings("WeakerAccess") public DiskLruCacheFactory(CacheDirectoryGetter cacheDirectoryGetter, long diskCacheSize) { this.diskCacheSize = diskCacheSize; this.cacheDirectoryGetter = cacheDirectoryGetter; } @Override public DiskCache build() { File cacheDir = cacheDirectoryGetter.getCacheDirectory(); if (cacheDir == null) { return null; } if (cacheDir.isDirectory() || cacheDir.mkdirs()) { return DiskLruCacheWrapper.create(cacheDir, diskCacheSize); } return null; } }
CacheDirectoryGetter
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java
{ "start": 3023, "end": 4467 }
class ____ implements ExchangeSource { private boolean finished; ExchangeSourceImpl() { outstandingSources.trackNewInstance(); } @Override public Page pollPage() { checkFailure(); return buffer.pollPage(); } @Override public boolean isFinished() { checkFailure(); return finished || buffer.isFinished(); } @Override public IsBlockedResult waitForReading() { return buffer.waitForReading(); } @Override public void finish() { if (finished == false) { finished = true; outstandingSources.finishInstance(); } } @Override public int bufferSize() { return buffer.size(); } } /** * Create a new {@link ExchangeSource} for exchanging data * * @see ExchangeSinkOperator */ public ExchangeSource createExchangeSource() { return new ExchangeSourceImpl(); } /** * If we continue fetching pages using the same thread, we risk encountering a StackOverflow error. * On the other hand, if we fork when receiving a reply on the same thread, we add unnecessary overhead * from thread scheduling and context switching. LoopControl can be used to avoid these issues. */ private static
ExchangeSourceImpl
java
spring-projects__spring-framework
spring-webflux/src/main/java/org/springframework/web/reactive/function/server/HandlerFunction.java
{ "start": 954, "end": 1158 }
interface ____<T extends ServerResponse> { /** * Handle the given request. * @param request the request to handle * @return the response */ Mono<T> handle(ServerRequest request); }
HandlerFunction
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/MisusedWeekYearTest.java
{ "start": 6792, "end": 7783 }
class ____ { void testLiteralPattern() { SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd"); simpleDateFormat = new SimpleDateFormat("MM-dd"); simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd", DateFormatSymbols.getInstance()); simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd", Locale.getDefault()); // Don't flag if the pattern contains "ww", the week-in-year specifier. simpleDateFormat = new SimpleDateFormat("YYYY-ww"); simpleDateFormat = new SimpleDateFormat("YY-ww"); simpleDateFormat = new SimpleDateFormat("Y-ww"); simpleDateFormat = new SimpleDateFormat("Yw"); } void testLiteralPatternWithFolding() { SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy" + "-MM-dd"); } private static final String WEEK_YEAR_PATTERN = "yyyy-MM-dd"; void testConstantPattern() { SimpleDateFormat simpleDateFormat = new SimpleDateFormat(WEEK_YEAR_PATTERN); } private static
MisusedWeekYearNegativeCases
java
grpc__grpc-java
xds/src/main/java/io/grpc/xds/orca/OrcaOobUtil.java
{ "start": 6238, "end": 6428 }
class ____ registered with a component, using methods in {@link OrcaPerRequestUtil}. * When an ORCA report is received, that object's {@code onLoadReport} method is invoked. */ public
is
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/metamodel/mapping/internal/SqlTypedMappingImpl.java
{ "start": 358, "end": 2365 }
class ____ implements SqlTypedMapping { private final @Nullable String columnDefinition; private final @Nullable Long length; private final @Nullable Integer arrayLength; private final @Nullable Integer precision; private final @Nullable Integer scale; private final @Nullable Integer temporalPrecision; private final JdbcMapping jdbcMapping; public SqlTypedMappingImpl(JdbcMapping jdbcMapping) { this( null, null, null, null, null, null, jdbcMapping ); } public SqlTypedMappingImpl( @Nullable String columnDefinition, @Nullable Long length, @Nullable Integer precision, @Nullable Integer scale, @Nullable Integer temporalPrecision, JdbcMapping jdbcMapping) { this( columnDefinition, length, null, precision, scale, temporalPrecision, jdbcMapping ); } public SqlTypedMappingImpl( @Nullable String columnDefinition, @Nullable Long length, @Nullable Integer arrayLength, @Nullable Integer precision, @Nullable Integer scale, @Nullable Integer temporalPrecision, JdbcMapping jdbcMapping) { // Save memory by using interned strings. Probability is high that we have multiple duplicate strings this.columnDefinition = columnDefinition == null ? null : columnDefinition.intern(); this.length = length; this.arrayLength = arrayLength; this.precision = precision; this.scale = scale; this.temporalPrecision = temporalPrecision; this.jdbcMapping = jdbcMapping; } @Override public @Nullable String getColumnDefinition() { return columnDefinition; } @Override public @Nullable Long getLength() { return length; } @Override public @Nullable Integer getArrayLength() { return arrayLength; } @Override public @Nullable Integer getPrecision() { return precision; } @Override public @Nullable Integer getTemporalPrecision() { return temporalPrecision; } @Override public @Nullable Integer getScale() { return scale; } @Override public JdbcMapping getJdbcMapping() { return jdbcMapping; } }
SqlTypedMappingImpl
java
google__guava
android/guava/src/com/google/common/util/concurrent/AbstractScheduledService.java
{ "start": 19174, "end": 25175 }
class ____ implements Callable<@Nullable Void> { /** The underlying task. */ private final Runnable wrappedRunnable; /** The executor on which this Callable will be scheduled. */ private final ScheduledExecutorService executor; /** * The service that is managing this callable. This is used so that failure can be reported * properly. */ /* * This reference is part of a reference cycle, which is typically something we want to avoid * under j2objc -- but it is not detected by our j2objc cycle test. The cycle: * * - CustomScheduler.service contains an instance of ServiceDelegate. (It needs it so that it * can call notifyFailed.) * * - ServiceDelegate.runningTask contains an instance of ReschedulableCallable (at least in * the case that the service is using CustomScheduler). (It needs it so that it can cancel * the task and detect whether it has been cancelled.) * * - ReschedulableCallable has a reference back to its enclosing CustomScheduler. (It needs it * so that it can call getNextSchedule). * * Maybe there is a way to avoid this cycle. But we think the cycle is safe enough to ignore: * Each task is retained for only as long as it is running -- so it's retained only as long as * it would already be retained by the underlying executor. * * If the cycle test starts reporting this cycle in the future, we should add an entry to * cycle_suppress_list.txt. */ private final AbstractService service; /** * This lock is used to ensure safe and correct cancellation, it ensures that a new task is * not scheduled while a cancel is ongoing. Also it protects the currentFuture variable to * ensure that it is assigned atomically with being scheduled. */ private final ReentrantLock lock = new ReentrantLock(); /** The future that represents the next execution of this task. */ @GuardedBy("lock") private @Nullable SupplantableFuture cancellationDelegate; ReschedulableCallable( AbstractService service, ScheduledExecutorService executor, Runnable runnable) { this.wrappedRunnable = runnable; this.executor = executor; this.service = service; } @Override public @Nullable Void call() throws Exception { wrappedRunnable.run(); reschedule(); return null; } /** * Atomically reschedules this task and assigns the new future to {@link * #cancellationDelegate}. */ @CanIgnoreReturnValue Cancellable reschedule() { // invoke the callback outside the lock, prevents some shenanigans. Schedule schedule; try { schedule = CustomScheduler.this.getNextSchedule(); } catch (Throwable t) { restoreInterruptIfIsInterruptedException(t); service.notifyFailed(t); return new FutureAsCancellable(immediateCancelledFuture()); } // We reschedule ourselves with a lock held for two reasons. 1. we want to make sure that // cancel calls cancel on the correct future. 2. we want to make sure that the assignment // to currentFuture doesn't race with itself so that currentFuture is assigned in the // correct order. Throwable scheduleFailure = null; Cancellable toReturn; lock.lock(); try { toReturn = initializeOrUpdateCancellationDelegate(schedule); } catch (Throwable e) { // Any Exception is either a RuntimeException or sneaky checked exception. // // If an exception is thrown by the subclass then we need to make sure that the service // notices and transitions to the FAILED state. We do it by calling notifyFailed directly // because the service does not monitor the state of the future so if the exception is not // caught and forwarded to the service the task would stop executing but the service would // have no idea. // TODO(lukes): consider building everything in terms of ListenableScheduledFuture then // the AbstractService could monitor the future directly. Rescheduling is still hard... // but it would help with some of these lock ordering issues. scheduleFailure = e; toReturn = new FutureAsCancellable(immediateCancelledFuture()); } finally { lock.unlock(); } // Call notifyFailed outside the lock to avoid lock ordering issues. if (scheduleFailure != null) { service.notifyFailed(scheduleFailure); } return toReturn; } @GuardedBy("lock") /* * The GuardedBy checker warns us that we're not holding cancellationDelegate.lock. But in * fact we are holding it because it is the same as this.lock, which we know we are holding, * thanks to @GuardedBy above. (cancellationDelegate.lock is initialized to this.lock in the * call to `new SupplantableFuture` below.) */ @SuppressWarnings("GuardedBy") private Cancellable initializeOrUpdateCancellationDelegate(Schedule schedule) { if (cancellationDelegate == null) { return cancellationDelegate = new SupplantableFuture(lock, submitToExecutor(schedule)); } if (!cancellationDelegate.currentFuture.isCancelled()) { cancellationDelegate.currentFuture = submitToExecutor(schedule); } return cancellationDelegate; } private ScheduledFuture<@Nullable Void> submitToExecutor(Schedule schedule) { return executor.schedule(this, schedule.delay, schedule.unit); } } /** * Contains the most recently submitted {@code Future}, which may be cancelled or updated, * always under a lock. */ private static final
ReschedulableCallable
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/LocalTemporaryTableMutationStrategyNoDropTest.java
{ "start": 2493, "end": 3770 }
class ____ { private static final String MARKER = "$n"; @Test public void testGetSqlTruncateCommand(SessionFactoryScope scope) { scope.inTransaction( session -> { final TemporaryTableExporter temporaryTableExporter = scope.getSessionFactory().getJdbcServices().getDialect().getTemporaryTableExporter(); final TemporaryTable idTable = createTemporaryTable( scope ); final String sqlTruncateCommand = temporaryTableExporter.getSqlTruncateCommand( idTable, null, session ); assertThat( sqlTruncateCommand ) .contains( idTable.getSessionUidColumn().getColumnName() + " = " + MARKER ); } ); } private static TemporaryTable createTemporaryTable(SessionFactoryScope scope) { final SessionFactoryImplementor sessionFactory = scope.getSessionFactory(); final JdbcServices jdbcServices = sessionFactory.getJdbcServices(); return TemporaryTable.createIdTable( scope.getMetadataImplementor().getEntityBinding( TestEntity.class.getName() ), basename -> TemporaryTable.ID_TABLE_PREFIX + basename, TemporaryTableKind.PERSISTENT, jdbcServices.getDialect(), new ModelCreationContext( sessionFactory, scope, jdbcServices ) ); } public static
LocalTemporaryTableMutationStrategyNoDropTest
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/ResourceManagerProcessContext.java
{ "start": 1840, "end": 5208 }
class ____ { private final Configuration rmConfig; private final ResourceID resourceId; private final ResourceManagerRuntimeServicesConfiguration rmRuntimeServicesConfig; private final RpcService rpcService; private final HighAvailabilityServices highAvailabilityServices; private final HeartbeatServices heartbeatServices; private final DelegationTokenManager delegationTokenManager; private final FatalErrorHandler fatalErrorHandler; private final ClusterInformation clusterInformation; @Nullable private final String webInterfaceUrl; private final MetricRegistry metricRegistry; private final String hostname; private final Executor ioExecutor; public ResourceManagerProcessContext( Configuration rmConfig, ResourceID resourceId, ResourceManagerRuntimeServicesConfiguration rmRuntimeServicesConfig, RpcService rpcService, HighAvailabilityServices highAvailabilityServices, HeartbeatServices heartbeatServices, DelegationTokenManager delegationTokenManager, FatalErrorHandler fatalErrorHandler, ClusterInformation clusterInformation, @Nullable String webInterfaceUrl, MetricRegistry metricRegistry, String hostname, Executor ioExecutor) { this.rmConfig = checkNotNull(rmConfig); this.resourceId = checkNotNull(resourceId); this.rmRuntimeServicesConfig = checkNotNull(rmRuntimeServicesConfig); this.rpcService = checkNotNull(rpcService); this.highAvailabilityServices = checkNotNull(highAvailabilityServices); this.heartbeatServices = checkNotNull(heartbeatServices); this.delegationTokenManager = checkNotNull(delegationTokenManager); this.fatalErrorHandler = checkNotNull(fatalErrorHandler); this.clusterInformation = checkNotNull(clusterInformation); this.metricRegistry = checkNotNull(metricRegistry); this.hostname = checkNotNull(hostname); this.ioExecutor = checkNotNull(ioExecutor); this.webInterfaceUrl = webInterfaceUrl; } public Configuration getRmConfig() { return rmConfig; } public ResourceID getResourceId() { return resourceId; } public ResourceManagerRuntimeServicesConfiguration getRmRuntimeServicesConfig() { return rmRuntimeServicesConfig; } public RpcService getRpcService() { return rpcService; } public HighAvailabilityServices getHighAvailabilityServices() { return highAvailabilityServices; } public HeartbeatServices getHeartbeatServices() { return heartbeatServices; } public DelegationTokenManager getDelegationTokenManager() { return delegationTokenManager; } public FatalErrorHandler getFatalErrorHandler() { return fatalErrorHandler; } public ClusterInformation getClusterInformation() { return clusterInformation; } @Nullable public String getWebInterfaceUrl() { return webInterfaceUrl; } public MetricRegistry getMetricRegistry() { return metricRegistry; } public String getHostname() { return hostname; } public Executor getIoExecutor() { return ioExecutor; } }
ResourceManagerProcessContext
java
mybatis__mybatis-3
src/test/java/org/apache/ibatis/submitted/encoding/EncodingMapper.java
{ "start": 704, "end": 774 }
interface ____ { String select1(); String select2(); }
EncodingMapper
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/placement/CandidateNodeSetUtils.java
{ "start": 1033, "end": 1475 }
class ____ { private CandidateNodeSetUtils() { } /* * If the {@link CandidateNodeSet} only has one entry, return it. Otherwise, * return null. */ public static <N extends SchedulerNode> N getSingleNode( CandidateNodeSet<N> candidates) { N node = null; if (1 == candidates.getAllNodes().size()) { node = candidates.getAllNodes().values().iterator().next(); } return node; } }
CandidateNodeSetUtils
java
apache__kafka
streams/src/main/java/org/apache/kafka/streams/state/internals/TimestampedSegments.java
{ "start": 1185, "end": 3089 }
class ____ extends AbstractSegments<TimestampedSegment> { private final RocksDBMetricsRecorder metricsRecorder; TimestampedSegments(final String name, final String metricsScope, final long retentionPeriod, final long segmentInterval) { super(name, retentionPeriod, segmentInterval); metricsRecorder = new RocksDBMetricsRecorder(metricsScope, name); } @Override public TimestampedSegment getOrCreateSegment(final long segmentId, final StateStoreContext context) { if (segments.containsKey(segmentId)) { return segments.get(segmentId); } else { final TimestampedSegment newSegment = new TimestampedSegment(segmentName(segmentId), name, segmentId, position, metricsRecorder); if (segments.put(segmentId, newSegment) != null) { throw new IllegalStateException("TimestampedSegment already exists. Possible concurrent access."); } newSegment.openDB(context.appConfigs(), context.stateDir()); return newSegment; } } @Override public TimestampedSegment getOrCreateSegmentIfLive(final long segmentId, final StateStoreContext context, final long streamTime) { final TimestampedSegment segment = super.getOrCreateSegmentIfLive(segmentId, context, streamTime); cleanupExpiredSegments(streamTime); return segment; } @Override public void openExisting(final StateStoreContext context, final long streamTime) { metricsRecorder.init(ProcessorContextUtils.metricsImpl(context), context.taskId()); super.openExisting(context, streamTime); } }
TimestampedSegments
java
google__guava
android/guava/src/com/google/common/base/FinalizableReferenceQueue.java
{ "start": 13843, "end": 13972 }
class ____ with the given base URL as its classpath. */ URLClassLoader newLoader(URL base) { // We use the bootstrap
loader
java
apache__flink
flink-yarn/src/main/java/org/apache/flink/yarn/YarnApplicationFileUploader.java
{ "start": 14399, "end": 24310 }
class ____ with the file name */ List<String> registerProvidedLocalResources() { checkNotNull(localResources); final ArrayList<String> classPaths = new ArrayList<>(); final Set<String> resourcesJar = new HashSet<>(); final Set<String> resourcesDir = new HashSet<>(); providedSharedLibs.forEach( (fileName, fileStatus) -> { final Path filePath = fileStatus.getPath(); LOG.debug("Using remote file {} to register local resource", filePath); final YarnLocalResourceDescriptor descriptor = YarnLocalResourceDescriptor.fromFileStatus( fileName, fileStatus, LocalResourceVisibility.PUBLIC, LocalResourceType.FILE); localResources.put(fileName, descriptor.toLocalResource()); remotePaths.add(filePath); envShipResourceList.add(descriptor); if (!isFlinkDistJar(filePath.getName()) && !isPlugin(filePath)) { if (fileName.endsWith("jar")) { resourcesJar.add(fileName); } else { resourcesDir.add(new Path(fileName).getParent().toString()); } } else if (isFlinkDistJar(filePath.getName())) { flinkDist = descriptor; } }); // Construct classpath where resource directories go first followed // by resource files. Sort both resources and resource directories in // order to make classpath deterministic. resourcesDir.stream().sorted().forEach(classPaths::add); resourcesJar.stream().sorted().forEach(classPaths::add); return classPaths; } static YarnApplicationFileUploader from( final FileSystem fileSystem, final Path homeDirectory, final List<Path> providedLibDirs, final ApplicationId applicationId, final int fileReplication) throws IOException { return new YarnApplicationFileUploader( fileSystem, homeDirectory, providedLibDirs, applicationId, fileReplication); } private Path copyToRemoteApplicationDir( final Path localSrcPath, final String relativeDstPath, final int replicationFactor) throws IOException { final Path applicationDir = getApplicationDirPath(homeDir, applicationId); final String suffix = (relativeDstPath.isEmpty() ? "" : relativeDstPath + "/") + localSrcPath.getName(); final Path dst = new Path(applicationDir, suffix); final Path localSrcPathWithScheme; if (StringUtils.isNullOrWhitespaceOnly(localSrcPath.toUri().getScheme())) { localSrcPathWithScheme = new Path(URI.create("file:///").resolve(localSrcPath.toUri())); } else { localSrcPathWithScheme = localSrcPath; } LOG.debug( "Copying from {} to {} with replication factor {}", localSrcPathWithScheme, dst, replicationFactor); fileSystem.copyFromLocalFile(false, true, localSrcPathWithScheme, dst); fileSystem.setReplication(dst, (short) replicationFactor); return dst; } private FileStatus[] waitForTransferToComplete(Path dst) throws IOException { final int noOfRetries = 3; final int retryDelayMs = 100; int iter = 1; while (iter <= noOfRetries + 1) { try { return fileSystem.listStatus(dst); } catch (FileNotFoundException e) { LOG.debug( "Got FileNotFoundException while fetching uploaded remote resources at retry num {}", iter); try { LOG.debug("Sleeping for {}ms", retryDelayMs); TimeUnit.MILLISECONDS.sleep(retryDelayMs); } catch (InterruptedException ie) { LOG.warn( "Failed to sleep for {}ms at retry num {} while fetching uploaded remote resources", retryDelayMs, iter, ie); } iter++; } } return null; } private static boolean isFlinkDistJar(String fileName) { return fileName.startsWith("flink-dist") && fileName.endsWith("jar"); } private static boolean isPlugin(Path path) { Path parent = path.getParent(); while (parent != null) { if (ConfigConstants.DEFAULT_FLINK_PLUGINS_DIRS.equals(parent.getName())) { return true; } parent = parent.getParent(); } return false; } static Path getApplicationDirPath(final Path homeDir, final ApplicationId applicationId) { return new Path(checkNotNull(homeDir), ".flink/" + checkNotNull(applicationId) + '/'); } private Path getApplicationDir(final ApplicationId applicationId) throws IOException { final Path applicationDir = getApplicationDirPath(homeDir, applicationId); if (!fileSystem.exists(applicationDir)) { final FsPermission permission = new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE); fileSystem.mkdirs(applicationDir, permission); } return applicationDir; } private Map<String, FileStatus> getAllFilesInProvidedLibDirs(final List<Path> providedLibDirs) { final Map<String, FileStatus> allFiles = new HashMap<>(); checkNotNull(providedLibDirs) .forEach( FunctionUtils.uncheckedConsumer( path -> { if (!fileSystem.exists(path) || !fileSystem.isDirectory(path)) { LOG.warn( "Provided lib dir {} does not exist or is not a directory. Ignoring.", path); } else { final RemoteIterator<LocatedFileStatus> iterable = fileSystem.listFiles(path, true); while (iterable.hasNext()) { final LocatedFileStatus locatedFileStatus = iterable.next(); final String name = path.getParent() .toUri() .relativize( locatedFileStatus .getPath() .toUri()) .toString(); final FileStatus prevMapping = allFiles.put(name, locatedFileStatus); if (prevMapping != null) { throw new IOException( "Two files with the same filename exist in the shared libs: " + prevMapping.getPath() + " - " + locatedFileStatus.getPath() + ". Please deduplicate."); } } if (LOG.isDebugEnabled()) { LOG.debug( "The following files were found in the shared lib dir: {}", allFiles.values().stream() .map( fileStatus -> fileStatus .getPath() .toString()) .collect(Collectors.joining(", "))); } } })); return Collections.unmodifiableMap(allFiles); } private boolean isUsrLibDirIncludedInProvidedLib(final List<Path> providedLibDirs) throws IOException { for (Path path : providedLibDirs) { if (Utils.isUsrLibDirectory(fileSystem, path)) { return true; } } return false; } private void addToRemotePaths(boolean add, Path path) { if (add) { remotePaths.add(path); } } private void addToEnvShipResourceList(boolean add, YarnLocalResourceDescriptor descriptor) { if (add) { envShipResourceList.add(descriptor); } } }
paths
java
apache__camel
components/camel-digitalocean/src/main/java/org/apache/camel/component/digitalocean/producer/DigitalOceanFloatingIPsProducer.java
{ "start": 1598, "end": 7590 }
class ____ extends DigitalOceanProducer { public DigitalOceanFloatingIPsProducer(DigitalOceanEndpoint endpoint, DigitalOceanConfiguration configuration) { super(endpoint, configuration); } @Override public void process(Exchange exchange) throws Exception { switch (determineOperation(exchange)) { case list: getFloatingIPs(exchange); break; case create: createFloatingIp(exchange); break; case get: getFloatingIP(exchange); break; case delete: deleteFloatingIP(exchange); break; case assign: assignFloatingIPToDroplet(exchange); break; case unassign: unassignFloatingIP(exchange); break; case listActions: getFloatingIPActions(exchange); break; default: throw new IllegalArgumentException("Unsupported operation"); } } private void createFloatingIp(Exchange exchange) throws RequestUnsuccessfulException, DigitalOceanException { Integer dropletId = exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_ID, Integer.class); String region = exchange.getIn().getHeader(DigitalOceanHeaders.REGION, String.class); FloatingIP ip; if (ObjectHelper.isNotEmpty(dropletId)) { ip = getEndpoint().getDigitalOceanClient().createFloatingIP(dropletId); } else if (ObjectHelper.isNotEmpty(region)) { ip = getEndpoint().getDigitalOceanClient().createFloatingIP(region); } else { throw new IllegalArgumentException( DigitalOceanHeaders.DROPLET_ID + " or " + DigitalOceanHeaders.REGION + " must be specified"); } LOG.trace("FloatingIP [{}] ", ip); exchange.getMessage().setBody(ip); } private void getFloatingIPs(Exchange exchange) throws RequestUnsuccessfulException, DigitalOceanException { FloatingIPs ips = getEndpoint().getDigitalOceanClient().getAvailableFloatingIPs(configuration.getPage(), configuration.getPerPage()); LOG.trace("All Floating IPs : page {} / {} per page [{}] ", configuration.getPage(), configuration.getPerPage(), ips.getFloatingIPs()); exchange.getMessage().setBody(ips.getFloatingIPs()); } private void getFloatingIP(Exchange exchange) throws RequestUnsuccessfulException, DigitalOceanException { String ipAddress = exchange.getIn().getHeader(DigitalOceanHeaders.FLOATING_IP_ADDRESS, String.class); if (ObjectHelper.isEmpty(ipAddress)) { throw new IllegalArgumentException(DigitalOceanHeaders.FLOATING_IP_ADDRESS + " must be specified"); } FloatingIP ip = getEndpoint().getDigitalOceanClient().getFloatingIPInfo(ipAddress); LOG.trace("Floating IP {}", ip); exchange.getMessage().setBody(ip); } private void deleteFloatingIP(Exchange exchange) throws RequestUnsuccessfulException, DigitalOceanException { String ipAddress = exchange.getIn().getHeader(DigitalOceanHeaders.FLOATING_IP_ADDRESS, String.class); if (ObjectHelper.isEmpty(ipAddress)) { throw new IllegalArgumentException(DigitalOceanHeaders.FLOATING_IP_ADDRESS + " must be specified"); } Delete delete = getEndpoint().getDigitalOceanClient().deleteFloatingIP(ipAddress); LOG.trace("Delete Floating IP {}", delete); exchange.getMessage().setBody(delete); } private void assignFloatingIPToDroplet(Exchange exchange) throws RequestUnsuccessfulException, DigitalOceanException { Integer dropletId = exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_ID, Integer.class); if (ObjectHelper.isEmpty(dropletId)) { throw new IllegalArgumentException(DigitalOceanHeaders.DROPLET_ID + " must be specified"); } String ipAddress = exchange.getIn().getHeader(DigitalOceanHeaders.FLOATING_IP_ADDRESS, String.class); if (ObjectHelper.isEmpty(ipAddress)) { throw new IllegalArgumentException(DigitalOceanHeaders.FLOATING_IP_ADDRESS + " must be specified"); } Action action = getEndpoint().getDigitalOceanClient().assignFloatingIP(dropletId, ipAddress); LOG.trace("Assign Floating IP to Droplet {}", action); exchange.getMessage().setBody(action); } private void unassignFloatingIP(Exchange exchange) throws RequestUnsuccessfulException, DigitalOceanException { String ipAddress = exchange.getIn().getHeader(DigitalOceanHeaders.FLOATING_IP_ADDRESS, String.class); if (ObjectHelper.isEmpty(ipAddress)) { throw new IllegalArgumentException(DigitalOceanHeaders.FLOATING_IP_ADDRESS + " must be specified"); } Action action = getEndpoint().getDigitalOceanClient().unassignFloatingIP(ipAddress); LOG.trace("Unassign Floating IP {}", action); exchange.getMessage().setBody(action); } private void getFloatingIPActions(Exchange exchange) throws RequestUnsuccessfulException, DigitalOceanException { String ipAddress = exchange.getIn().getHeader(DigitalOceanHeaders.FLOATING_IP_ADDRESS, String.class); if (ObjectHelper.isEmpty(ipAddress)) { throw new IllegalArgumentException(DigitalOceanHeaders.FLOATING_IP_ADDRESS + " must be specified"); } Actions actions = getEndpoint().getDigitalOceanClient().getAvailableFloatingIPActions(ipAddress, configuration.getPage(), configuration.getPerPage()); LOG.trace("Actions for FloatingIP {} : page {} / {} per page [{}] ", ipAddress, configuration.getPage(), configuration.getPerPage(), actions.getActions()); exchange.getMessage().setBody(actions.getActions()); } }
DigitalOceanFloatingIPsProducer
java
apache__camel
components/camel-openstack/src/main/java/org/apache/camel/component/openstack/cinder/CinderEndpoint.java
{ "start": 1747, "end": 4910 }
class ____ extends AbstractOpenstackEndpoint { @UriParam(enums = "snapshots,volumes") @Metadata(required = true) String subsystem; @UriPath @Metadata(required = true) private String host; @UriParam(defaultValue = "default") private String domain = "default"; @UriParam @Metadata(required = true) private String project; @UriParam private String operation; @UriParam @Metadata(required = true, secret = true) private String username; @UriParam @Metadata(required = true, secret = true) private String password; @UriParam private Config config; @UriParam(defaultValue = V3, enums = "V2,V3") private String apiVersion = V3; public CinderEndpoint(String uri, CinderComponent component) { super(uri, component); } @Override public Producer createProducer() throws Exception { switch (getSubsystem()) { case CinderConstants.VOLUMES: return new VolumeProducer(this, createClient()); case CinderConstants.SNAPSHOTS: return new SnapshotProducer(this, createClient()); default: throw new IllegalArgumentException("Can't create producer with subsystem " + subsystem); } } public String getSubsystem() { return subsystem; } /** * OpenStack Cinder subsystem */ public void setSubsystem(String subsystem) { this.subsystem = subsystem; } @Override public String getDomain() { return domain; } /** * Authentication domain */ public void setDomain(String domain) { this.domain = domain; } @Override public String getProject() { return project; } /** * The project ID */ public void setProject(String project) { this.project = project; } @Override public String getOperation() { return operation; } /** * The operation to do */ public void setOperation(String operation) { this.operation = operation; } @Override public String getUsername() { return username; } /** * OpenStack username */ public void setUsername(String username) { this.username = username; } @Override public String getPassword() { return password; } /** * OpenStack password */ public void setPassword(String password) { this.password = password; } @Override public String getHost() { return host; } /** * OpenStack host url */ public void setHost(String host) { this.host = host; } @Override public Config getConfig() { return config; } /** * OpenStack configuration */ public void setConfig(Config config) { this.config = config; } @Override public String getApiVersion() { return apiVersion; } /** * OpenStack API version */ public void setApiVersion(String apiVersion) { this.apiVersion = apiVersion; } }
CinderEndpoint
java
quarkusio__quarkus
extensions/resteasy-reactive/rest-client-jaxrs/deployment/src/main/java/io/quarkus/jaxrs/client/reactive/deployment/MediaTypeWithPriority.java
{ "start": 61, "end": 148 }
interface ____ { int getPriority(); String getMediaType(); }
MediaTypeWithPriority
java
spring-projects__spring-framework
spring-beans/src/test/java/org/springframework/beans/BeanUtilsTests.java
{ "start": 27361, "end": 28639 }
class ____ { private boolean flag; private byte byteCount; private short shortCount; private int intCount; private long longCount; private float floatCount; private double doubleCount; private char character; private String text; @SuppressWarnings("unused") public BeanWithPrimitiveTypes(boolean flag, byte byteCount, short shortCount, int intCount, long longCount, float floatCount, double doubleCount, char character, String text) { this.flag = flag; this.byteCount = byteCount; this.shortCount = shortCount; this.intCount = intCount; this.longCount = longCount; this.floatCount = floatCount; this.doubleCount = doubleCount; this.character = character; this.text = text; } public boolean isFlag() { return flag; } public byte getByteCount() { return byteCount; } public short getShortCount() { return shortCount; } public int getIntCount() { return intCount; } public long getLongCount() { return longCount; } public float getFloatCount() { return floatCount; } public double getDoubleCount() { return doubleCount; } public char getCharacter() { return character; } public String getText() { return text; } } private static
BeanWithPrimitiveTypes
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java
{ "start": 1131, "end": 5193 }
class ____ implements EvalOperator.ExpressionEvaluator { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SubDoublesEvaluator.class); private final Source source; private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; private Warnings warnings; public SubDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.source = source; this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; } @Override public Block eval(Page page) { try (DoubleBlock lhsBlock = (DoubleBlock) lhs.eval(page)) { try (DoubleBlock rhsBlock = (DoubleBlock) rhs.eval(page)) { DoubleVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { return eval(page.getPositionCount(), lhsBlock, rhsBlock); } DoubleVector rhsVector = rhsBlock.asVector(); if (rhsVector == null) { return eval(page.getPositionCount(), lhsBlock, rhsBlock); } return eval(page.getPositionCount(), lhsVector, rhsVector); } } } @Override public long baseRamBytesUsed() { long baseRamBytesUsed = BASE_RAM_BYTES_USED; baseRamBytesUsed += lhs.baseRamBytesUsed(); baseRamBytesUsed += rhs.baseRamBytesUsed(); return baseRamBytesUsed; } public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { switch (lhsBlock.getValueCount(p)) { case 0: result.appendNull(); continue position; case 1: break; default: warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); result.appendNull(); continue position; } switch (rhsBlock.getValueCount(p)) { case 0: result.appendNull(); continue position; case 1: break; default: warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); result.appendNull(); continue position; } double lhs = lhsBlock.getDouble(lhsBlock.getFirstValueIndex(p)); double rhs = rhsBlock.getDouble(rhsBlock.getFirstValueIndex(p)); try { result.appendDouble(Sub.processDoubles(lhs, rhs)); } catch (ArithmeticException e) { warnings().registerException(e); result.appendNull(); } } return result.build(); } } public DoubleBlock eval(int positionCount, DoubleVector lhsVector, DoubleVector rhsVector) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { double lhs = lhsVector.getDouble(p); double rhs = rhsVector.getDouble(p); try { result.appendDouble(Sub.processDoubles(lhs, rhs)); } catch (ArithmeticException e) { warnings().registerException(e); result.appendNull(); } } return result.build(); } } @Override public String toString() { return "SubDoublesEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; } @Override public void close() { Releasables.closeExpectNoException(lhs, rhs); } private Warnings warnings() { if (warnings == null) { this.warnings = Warnings.createWarnings( driverContext.warningsMode(), source.source().getLineNumber(), source.source().getColumnNumber(), source.text() ); } return warnings; } static
SubDoublesEvaluator