language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
apache__flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlTumbleTableFunction.java
{ "start": 1676, "end": 3023 }
class ____ extends AbstractOperandMetadata { OperandMetadataImpl() { super(ImmutableList.of(PARAM_DATA, PARAM_TIMECOL, PARAM_SIZE, PARAM_OFFSET), 3); } @Override public boolean checkOperandTypes(SqlCallBinding callBinding, boolean throwOnFailure) { // There should only be three operands, and number of operands are checked before // this call. if (!SqlValidatorUtils.checkTableAndDescriptorOperands(callBinding, 1)) { return SqlValidatorUtils.throwValidationSignatureErrorOrReturnFalse( callBinding, throwOnFailure); } if (!checkIntervalOperands(callBinding, 2)) { return SqlValidatorUtils.throwValidationSignatureErrorOrReturnFalse( callBinding, throwOnFailure); } // check time attribute return SqlValidatorUtils.throwExceptionOrReturnFalse( checkTimeColumnDescriptorOperand(callBinding, 1), throwOnFailure); } @Override public String getAllowedSignatures(SqlOperator op, String opName) { return opName + "(TABLE table_name, DESCRIPTOR(timecol), datetime interval" + "[, datetime interval])"; } } }
OperandMetadataImpl
java
apache__dubbo
dubbo-rpc/dubbo-rpc-dubbo/src/test/java/org/apache/dubbo/rpc/protocol/dubbo/decode/MockHandler.java
{ "start": 1043, "end": 1485 }
class ____ extends ChannelDuplexHandler { private final Consumer consumer; private final ChannelHandler handler; public MockHandler(Consumer consumer, ChannelHandler handler) { this.consumer = consumer; this.handler = handler; } @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { this.handler.received(new MockChannel(consumer), msg); } }
MockHandler
java
apache__camel
components/camel-tracing/src/main/java/org/apache/camel/tracing/propagation/CamelHeadersExtractAdapter.java
{ "start": 1033, "end": 1732 }
class ____ implements ExtractAdapter { private final Map<String, Object> map = new CaseInsensitiveMap(); public CamelHeadersExtractAdapter(final Map<String, Object> map) { // Extract string valued map entries map.entrySet().stream().filter(e -> e.getValue() instanceof String) .forEach(e -> this.map.put(e.getKey(), e.getValue())); } @Override public Iterator<Map.Entry<String, Object>> iterator() { return map.entrySet().iterator(); } @Override public Object get(String key) { return this.map.get(key); } @Override public Set<String> keys() { return map.keySet(); } }
CamelHeadersExtractAdapter
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/bugs/_1594/Issue1594Mapper.java
{ "start": 751, "end": 984 }
class ____ { private Address address; public Address getAddress() { return address; } public void setAddress(Address address) { this.address = address; } }
Client
java
bumptech__glide
library/src/main/java/com/bumptech/glide/load/resource/drawable/DrawableTransitionOptions.java
{ "start": 435, "end": 3907 }
class ____ extends TransitionOptions<DrawableTransitionOptions, Drawable> { /** * Returns a {@link DrawableTransitionOptions} object that enables a cross fade animation. * * @see #crossFade() */ @NonNull public static DrawableTransitionOptions withCrossFade() { return new DrawableTransitionOptions().crossFade(); } /** * Returns a {@link DrawableTransitionOptions} object that enables a cross fade animation. * * @see #crossFade(int) */ @NonNull public static DrawableTransitionOptions withCrossFade(int duration) { return new DrawableTransitionOptions().crossFade(duration); } /** * Returns a {@link DrawableTransitionOptions} object that enables a cross fade animation. * * @see #crossFade(DrawableCrossFadeFactory) */ @NonNull public static DrawableTransitionOptions withCrossFade( @NonNull DrawableCrossFadeFactory drawableCrossFadeFactory) { return new DrawableTransitionOptions().crossFade(drawableCrossFadeFactory); } /** * Returns a {@link DrawableTransitionOptions} object that enables a cross fade animation. * * @see #crossFade(DrawableCrossFadeFactory.Builder) */ @NonNull public static DrawableTransitionOptions withCrossFade( @NonNull DrawableCrossFadeFactory.Builder builder) { return new DrawableTransitionOptions().crossFade(builder); } /** * Returns a {@link DrawableTransitionOptions} object that uses the given transition factory. * * @see com.bumptech.glide.GenericTransitionOptions#with(TransitionFactory) */ @NonNull public static DrawableTransitionOptions with( @NonNull TransitionFactory<Drawable> transitionFactory) { return new DrawableTransitionOptions().transition(transitionFactory); } /** * Enables a cross fade animation between both the placeholder and the first resource and between * subsequent resources (if thumbnails are used). */ @NonNull public DrawableTransitionOptions crossFade() { return crossFade(new DrawableCrossFadeFactory.Builder()); } /** * Enables a cross fade animation between both the placeholder and the first resource and between * subsequent resources (if thumbnails are used). * * @param duration The duration of the animation, see {@code * DrawableCrossFadeFactory.Builder(int)} * @see com.bumptech.glide.request.transition.DrawableCrossFadeFactory.Builder */ @NonNull public DrawableTransitionOptions crossFade(int duration) { return crossFade(new DrawableCrossFadeFactory.Builder(duration)); } /** * Enables a cross fade animation between both the placeholder and the first resource and between * subsequent resources (if thumbnails are used). */ @NonNull public DrawableTransitionOptions crossFade( @NonNull DrawableCrossFadeFactory drawableCrossFadeFactory) { return transition(drawableCrossFadeFactory); } /** * Enables a cross fade animation between both the placeholder and the first resource and between * subsequent resources (if thumbnails are used). */ @NonNull public DrawableTransitionOptions crossFade(@NonNull DrawableCrossFadeFactory.Builder builder) { return crossFade(builder.build()); } // Make sure that we're not equal to any other concrete implementation of TransitionOptions. @Override public boolean equals(Object o) { return o instanceof DrawableTransitionOptions && super.equals(o); } // Our
DrawableTransitionOptions
java
spring-projects__spring-boot
module/spring-boot-flyway/src/main/java/org/springframework/boot/flyway/actuate/endpoint/FlywayEndpoint.java
{ "start": 2428, "end": 2909 }
class ____ implements OperationResponseBody { private final Map<@Nullable String, ContextFlywayBeansDescriptor> contexts; private FlywayBeansDescriptor(Map<@Nullable String, ContextFlywayBeansDescriptor> contexts) { this.contexts = contexts; } public Map<@Nullable String, ContextFlywayBeansDescriptor> getContexts() { return this.contexts; } } /** * Description of an application context's {@link Flyway} beans. */ public static final
FlywayBeansDescriptor
java
alibaba__druid
core/src/main/java/com/alibaba/druid/sql/dialect/databricks/parser/DatabricksExprParser.java
{ "start": 263, "end": 577 }
class ____ extends SparkExprParser { public DatabricksExprParser(String sql, SQLParserFeature... features) { this(new DatabricksLexer(sql, features)); this.lexer.nextToken(); } public DatabricksExprParser(Lexer lexer) { super(lexer, DbType.databricks); } }
DatabricksExprParser
java
quarkusio__quarkus
devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusUpdate.java
{ "start": 591, "end": 7307 }
class ____ extends QuarkusPlatformTask { private Boolean noRewrite; private Boolean rewrite; private boolean rewriteDryRun; private String targetStreamId; private String targetPlatformVersion; private String rewritePluginVersion = null; private String rewriteQuarkusUpdateRecipes = null; private String rewriteAdditionalUpdateRecipes = null; @Input @Optional @Deprecated public Boolean getNoRewrite() { return noRewrite; } @Option(description = "Disable the rewrite feature (deprecated use --rewrite=false instead).", option = "noRewrite") @Deprecated public QuarkusUpdate setNoRewrite(Boolean noRewrite) { this.noRewrite = noRewrite; return this; } @Input @Optional public Boolean getRewrite() { return rewrite; } @Option(description = "Run the suggested update recipe for this project.", option = "rewrite") public QuarkusUpdate setRewrite(Boolean rewrite) { this.rewrite = rewrite; return this; } @Input @Optional public Boolean getRewriteDryRun() { return rewriteDryRun; } @Option(description = "Do a dry run of the suggested update recipe for this project.", option = "rewriteDryRun") public QuarkusUpdate setRewriteDryRun(Boolean rewriteDryRun) { this.rewriteDryRun = rewriteDryRun; return this; } @Input public boolean getPerModule() { return false; } @Option(description = "This option was not used", option = "perModule") @Deprecated public void setPerModule(boolean perModule) { } @Input @Optional public String getRewritePluginVersion() { return rewritePluginVersion; } @Option(description = "The OpenRewrite plugin version", option = "rewritePluginVersion") public void setRewritePluginVersion(String rewritePluginVersion) { this.rewritePluginVersion = rewritePluginVersion; } @Input @Optional public String getRewriteQuarkusUpdateRecipes() { return rewriteQuarkusUpdateRecipes; } @Option(description = "Use a custom io.quarkus:quarkus-update-recipes:LATEST artifact (GAV) or just provide the version. This artifact should contain the base Quarkus update recipes to update a project.", option = "quarkusUpdateRecipes") public QuarkusUpdate setRewriteQuarkusUpdateRecipes(String rewriteQuarkusUpdateRecipes) { this.rewriteQuarkusUpdateRecipes = rewriteQuarkusUpdateRecipes; return this; } @Input @Optional public String getRewriteAdditionalUpdateRecipes() { return rewriteAdditionalUpdateRecipes; } @Option(description = "Specify a list of additional artifacts (GAV) containing rewrite recipes.", option = "additionalUpdateRecipes") public QuarkusUpdate setRewriteAdditionalUpdateRecipes(String rewriteAdditionalUpdateRecipes) { this.rewriteAdditionalUpdateRecipes = rewriteAdditionalUpdateRecipes; return this; } @Input @Optional public String getTargetStreamId() { return targetStreamId; } @Option(description = "A target stream, for example: 2.0", option = "stream") public void setStreamId(String targetStreamId) { this.targetStreamId = targetStreamId; } @Input @Optional public String getTargetPlatformVersion() { return targetPlatformVersion; } @Option(description = "A target platform version, for example: 2.0.0.Final", option = "platformVersion") public void setTargetPlatformVersion(String targetPlatformVersion) { this.targetPlatformVersion = targetPlatformVersion; } public QuarkusUpdate() { super("Log Quarkus-specific recommended project updates, such as the new Quarkus platform BOM versions, new versions of Quarkus extensions that aren't managed by the Quarkus BOMs, etc"); } @TaskAction public void logUpdates() { getLogger().warn(getName() + " is experimental, its options and output might change in future versions"); final QuarkusProject quarkusProject = getQuarkusProject(false); final ExtensionCatalog targetCatalog; try { if (targetPlatformVersion != null) { var targetPrimaryBom = getPrimaryBom(quarkusProject.getExtensionsCatalog()); targetPrimaryBom = ArtifactCoords.pom(targetPrimaryBom.getGroupId(), targetPrimaryBom.getArtifactId(), targetPlatformVersion); targetCatalog = getExtensionCatalogResolver(quarkusProject.log()) .resolveExtensionCatalog(List.of(targetPrimaryBom)); } else if (targetStreamId != null) { var platformStream = PlatformStreamCoords.fromString(targetStreamId); targetCatalog = getExtensionCatalogResolver(quarkusProject.log()).resolveExtensionCatalog(platformStream); targetPlatformVersion = getPrimaryBom(targetCatalog).getVersion(); } else { targetCatalog = getExtensionCatalogResolver(quarkusProject.log()).resolveExtensionCatalog(); targetPlatformVersion = getPrimaryBom(targetCatalog).getVersion(); } } catch (RegistryResolutionException e) { throw new RuntimeException( "Failed to resolve the recommended Quarkus extension catalog from the configured extension registries", e); } final UpdateProject invoker = new UpdateProject(quarkusProject); invoker.targetCatalog(targetCatalog); if (rewriteQuarkusUpdateRecipes != null) { invoker.rewriteQuarkusUpdateRecipes(rewriteQuarkusUpdateRecipes); } if (rewriteAdditionalUpdateRecipes != null) { invoker.rewriteAdditionalUpdateRecipes(rewriteAdditionalUpdateRecipes); } if (rewritePluginVersion != null) { invoker.rewritePluginVersion(rewritePluginVersion); } invoker.targetPlatformVersion(targetPlatformVersion); invoker.rewriteDryRun(rewriteDryRun); // backward compat if (noRewrite != null && noRewrite) { rewrite = false; } if (rewrite != null) { invoker.rewrite(rewrite); } invoker.appModel(extension().getApplicationModel()); try { invoker.execute(); } catch (Exception e) { throw new GradleException("Failed to apply recommended updates", e); } } private static ArtifactCoords getPrimaryBom(ExtensionCatalog c) { return c.getDerivedFrom().isEmpty() ? c.getBom() : c.getDerivedFrom().get(0).getBom(); } }
QuarkusUpdate
java
quarkusio__quarkus
extensions/grpc-common/runtime/src/main/java/io/quarkus/grpc/common/runtime/graal/GrpcNettySubstitutions.java
{ "start": 2884, "end": 3244 }
class ____ implements BooleanSupplier { @Override public boolean getAsBoolean() { try { this.getClass().getClassLoader().loadClass("io.netty.channel.unix.DomainSocketAddress"); return false; } catch (Exception ignored) { return true; } } } @SuppressWarnings("unused")
NoDomainSocketPredicate
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FirstIntByTimestampAggregatorFunctionTests.java
{ "start": 948, "end": 2958 }
class ____ extends AggregatorFunctionTestCase { @Override protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { FirstLongByTimestampGroupingAggregatorFunctionTests.TimestampGen tsgen = randomFrom( FirstLongByTimestampGroupingAggregatorFunctionTests.TimestampGen.values() ); return new ListRowsBlockSourceOperator( blockFactory, List.of(ElementType.INT, ElementType.LONG), IntStream.range(0, size).mapToObj(l -> List.of(randomInt(), tsgen.gen())).toList() ); } @Override protected AggregatorFunctionSupplier aggregatorFunction() { return new FirstIntByTimestampAggregatorFunctionSupplier(); } @Override protected int inputCount() { return 2; } @Override protected String expectedDescriptionOfAggregator() { return "first_int_by_timestamp"; } @Override public void assertSimpleOutput(List<Page> input, Block result) { ExpectedWork work = new ExpectedWork(true); for (Page page : input) { IntBlock values = page.getBlock(0); LongBlock timestamps = page.getBlock(1); for (int p = 0; p < page.getPositionCount(); p++) { int tsStart = timestamps.getFirstValueIndex(p); int tsEnd = tsStart + timestamps.getValueCount(p); for (int tsOffset = tsStart; tsOffset < tsEnd; tsOffset++) { long timestamp = timestamps.getLong(tsOffset); int vStart = values.getFirstValueIndex(p); int vEnd = vStart + values.getValueCount(p); for (int vOffset = vStart; vOffset < vEnd; vOffset++) { int value = values.getInt(vOffset); work.add(timestamp, value); } } } } work.check(BlockUtils.toJavaObject(result, 0)); } }
FirstIntByTimestampAggregatorFunctionTests
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/api/map/MapAssert_doesNotContainValue_Test.java
{ "start": 1139, "end": 1941 }
class ____ extends MapAssertBaseTest { @Override protected MapAssert<Object, Object> invoke_api_method() { return assertions.doesNotContainValue("value1"); } @Override protected void verify_internal_effects() { verify(maps).assertDoesNotContainValue(getInfo(assertions), getActual(assertions), "value1", null); } @Test void should_honor_custom_value_equals_when_comparing_entry_values() { // GIVEN var map = Map.of("key", "value"); // WHEN/THEN then(map).usingEqualsForValues(String::equalsIgnoreCase) .doesNotContainValue("otherValue"); expectAssertionError(() -> assertThat(map).usingEqualsForValues(String::equalsIgnoreCase) .doesNotContainValue("Value")); } }
MapAssert_doesNotContainValue_Test
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/spi/TreatedNavigablePath.java
{ "start": 431, "end": 1619 }
class ____ extends NavigablePath { public TreatedNavigablePath(NavigablePath parent, String entityTypeName) { this( parent, entityTypeName, null ); } public TreatedNavigablePath(NavigablePath parent, String entityTypeName, @Nullable String alias) { super( parent, "#" + entityTypeName, alias, "treat(" + parent + " as " + entityTypeName + ")", 1 ); assert !( parent instanceof TreatedNavigablePath ); } @Override public NavigablePath treatAs(String entityName) { return new TreatedNavigablePath( castNonNull( getRealParent() ), entityName ); } @Override public NavigablePath treatAs(String entityName, @Nullable String alias) { return new TreatedNavigablePath( castNonNull( getRealParent() ), entityName, alias ); } // @Override // public int hashCode() { // return getFullPath().hashCode(); // } // // @Override // public boolean equals(Object other) { // if ( other == null ) { // return false; // } // // if ( other == this ) { // return true; // } // // if ( ! ( other instanceof NavigablePath ) ) { // return false; // } // // return getFullPath().equals( ( (NavigablePath) other ).getFullPath() ); // } }
TreatedNavigablePath
java
apache__camel
core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/CamelInternalProcessor.java
{ "start": 41796, "end": 43062 }
class ____ implements CamelInternalProcessorAdvice<String> { private final String id; private final String label; private final String source; public NodeHistoryAdvice(NamedNode definition) { this.id = definition.getId(); this.label = definition.getLabel(); this.source = LoggerHelper.getLineNumberLoggerName(definition); } @Override public String before(Exchange exchange) throws Exception { exchange.getExchangeExtension().setHistoryNodeId(id); exchange.getExchangeExtension().setHistoryNodeLabel(label); exchange.getExchangeExtension().setHistoryNodeSource(source); return null; } @Override public void after(Exchange exchange, String data) throws Exception { exchange.getExchangeExtension().setHistoryNodeId(null); exchange.getExchangeExtension().setHistoryNodeLabel(null); exchange.getExchangeExtension().setHistoryNodeSource(null); } @Override public boolean hasState() { return false; } } /** * Advice for {@link org.apache.camel.spi.StreamCachingStrategy} */ public static
NodeHistoryAdvice
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/bootstrap/jpa/PersistenceUnitOverridesTests.java
{ "start": 1817, "end": 19337 }
class ____ { @Test public void testPassingIntegrationJpaJdbcOverrides() { // the integration overrides say to use the "db2" JPA connection settings (which should override the persistence unit values) final Properties integrationOverrides = ConnectionProviderBuilder.getJpaConnectionProviderProperties( "db2" ); try (final EntityManagerFactory emf = new HibernatePersistenceProvider().createContainerEntityManagerFactory( new PersistenceUnitInfoAdapter() { @Override public Properties getProperties() { // effectively, the `persistence.xml` defines `db1` as the connection settings return ConnectionProviderBuilder.getJpaConnectionProviderProperties( "db1" ); } }, integrationOverrides )) { final Map<String, Object> properties = emf.getProperties(); final Object hibernateJdbcDriver = properties.get( AvailableSettings.JAKARTA_JDBC_URL ); assertThat( hibernateJdbcDriver ).isNotNull(); final Object jpaJdbcDriver = properties.get( AvailableSettings.JAKARTA_JDBC_URL ); assertThat( (String) jpaJdbcDriver ).contains( "db2" ); } } @Test public void testPassingIntegrationJtaDataSourceOverrideForJpaJdbcSettings() { final PersistenceUnitInfoAdapter puInfo = new PersistenceUnitInfoAdapter( ConnectionProviderBuilder.getJpaConnectionProviderProperties( "db2" ) ); final DataSource integrationDataSource = new DataSourceStub( "integrationDataSource" ); final HibernatePersistenceProvider provider = new HibernatePersistenceProvider(); // todo (6.0) : fix for Oracle see HHH-13432 // puInfo.getProperties().setProperty( AvailableSettings.HQL_BULK_ID_STRATEGY, MultiTableBulkIdStrategyStub.class.getName() ); try (final EntityManagerFactory emf = provider.createContainerEntityManagerFactory( puInfo, Collections.singletonMap( AvailableSettings.JAKARTA_JTA_DATASOURCE, integrationDataSource ) )) { // first let's check the DataSource used in the EMF... final ConnectionProvider connectionProvider = emf.unwrap( SessionFactoryImplementor.class ) .getServiceRegistry() .getService( ConnectionProvider.class ); assertThat( connectionProvider ).isInstanceOf( DataSourceConnectionProvider.class ); final DataSourceConnectionProvider dsCp = (DataSourceConnectionProvider) connectionProvider; assertThat( dsCp ).isNotNull(); assertThat( dsCp.getDataSource() ).isEqualTo( integrationDataSource ); // now let's check that it is exposed via the EMF properties // - note : the spec does not indicate that this should work, but // it worked this way in previous versions final Object jtaDs = emf.getProperties().get( AvailableSettings.JPA_JTA_DATASOURCE ); assertThat( jtaDs ).isEqualTo( integrationDataSource ); // Additionally, we should have set Hibernate's DATASOURCE setting final Object hibDs = emf.getProperties().get( AvailableSettings.JPA_JTA_DATASOURCE ); assertThat( hibDs ).isEqualTo( integrationDataSource ); // Make sure the non-jta-data-source setting was cleared or otherwise null final Object nonJtaDs = emf.getProperties().get( AvailableSettings.JAKARTA_NON_JTA_DATASOURCE ); assertThat( nonJtaDs ).isNull(); } } @Test public void testPassingIntegrationJpaJdbcOverrideForJtaDataSourceProperty() { PersistenceProvider provider = new HibernatePersistenceProvider() { @Override public EntityManagerFactory createContainerEntityManagerFactory(PersistenceUnitInfo info, Map integrationOverrides) { return super.createContainerEntityManagerFactory( new DelegatingPersistenceUnitInfo( info ) { // inject a JPA JTA DataSource setting into the PU final DataSource puDataSource; final Properties puProperties; { puDataSource = new DataSourceStub( "puDataSource" ); puProperties = new Properties(); puProperties.putAll( info.getProperties() ); puProperties.put( AvailableSettings.JPA_JTA_DATASOURCE, puDataSource ); } @Override public DataSource getJtaDataSource() { return null; } @Override public DataSource getNonJtaDataSource() { return null; } @Override public Properties getProperties() { return puProperties; } }, integrationOverrides ); } }; try (final EntityManagerFactory emf = provider.createContainerEntityManagerFactory( new PersistenceUnitInfoAdapter(), // however, provide JPA connection settings as "integration settings", which according to JPA spec should override the persistence unit values. // - note that it is unclear in the spec whether JDBC value in the integration settings should override // a JTA DataSource (nor the reverse). However, that is a useful thing to support ConnectionProviderBuilder.getJpaConnectionProviderProperties( "db2" ) )) { final Map<String, Object> properties = emf.getProperties(); final Object hibernateJdbcDriver = properties.get( AvailableSettings.URL ); assertThat( hibernateJdbcDriver ).isNotNull(); final Object jpaJdbcDriver = properties.get( AvailableSettings.JPA_JDBC_URL ); assertThat( (String) jpaJdbcDriver ).contains( "db2" ); // see if the values had the affect to adjust the `ConnectionProvider` used final ConnectionProvider connectionProvider = emf.unwrap( SessionFactoryImplementor.class ) .getServiceRegistry() .getService( ConnectionProvider.class ); assertThat( connectionProvider ).isInstanceOf( DriverManagerConnectionProvider.class ); } } @Test // @FailureExpected( // jiraKey = "HHH-12858", // message = "Even though the JDBC settings override a DataSource *property*, it" + // " does not override a DataSource defined using the dedicated persistence.xml element" // ) public void testPassingIntegrationJpaJdbcOverridesForJtaDataSourceElement() { PersistenceProvider provider = new HibernatePersistenceProvider() { @Override public EntityManagerFactory createContainerEntityManagerFactory(PersistenceUnitInfo info, Map integrationOverrides) { return super.createContainerEntityManagerFactory( new DelegatingPersistenceUnitInfo( info ) { // inject a JPA JTA DataSource setting into the PU final DataSource puDataSource = new DataSourceStub( "puDataSource" ); @Override public DataSource getJtaDataSource() { return puDataSource; } }, integrationOverrides ); } }; try (final EntityManagerFactory emf = provider.createContainerEntityManagerFactory( new PersistenceUnitInfoAdapter(), // however, provide JPA connection settings as "integration settings", which according to JPA spec should override the persistence unit values. // - note that it is unclear in the spec whether JDBC value in the integration settings should override // a JTA DataSource (nor the reverse). However, that is a useful thing to support ConnectionProviderBuilder.getJpaConnectionProviderProperties( "db2" ) )) { final Map<String, Object> properties = emf.getProperties(); final Object hibernateJdbcDriver = properties.get( AvailableSettings.URL ); assertThat( hibernateJdbcDriver ).isNotNull(); final Object jpaJdbcDriver = properties.get( AvailableSettings.JPA_JDBC_URL ); assertThat( (String) jpaJdbcDriver ).contains( "db2" ); // see if the values had the affect to adjust the `ConnectionProvider` used final ConnectionProvider connectionProvider = emf.unwrap( SessionFactoryImplementor.class ) .getServiceRegistry() .getService( ConnectionProvider.class ); assertThat( connectionProvider ).isInstanceOf( DriverManagerConnectionProvider.class ); } } @Test // @FailureExpected( // jiraKey = "HHH-12858", // message = "So it appears any use of the persistence.xml `jta-data-source` or `non-jta-data-source` " + // "have precedence over integration settings, which is also incorrect" // ) public void testPassingIntegrationJpaDataSourceOverrideForJtaDataSourceElement() { final DataSource puDataSource = new DataSourceStub( "puDataSource" ); final DataSource integrationDataSource = new DataSourceStub( "integrationDataSource" ); PersistenceProvider provider = new HibernatePersistenceProvider() { @Override public EntityManagerFactory createContainerEntityManagerFactory(PersistenceUnitInfo info, Map integrationOverrides) { return super.createContainerEntityManagerFactory( new DelegatingPersistenceUnitInfo( info ) { @Override public DataSource getJtaDataSource() { // pretend the DataSource was defined using the `jta-data-source` element in persistence.xml // - as opposed using `javax.persistence.jtaDataSource` under the `properties` element return puDataSource; } }, integrationOverrides ); } }; final Map<String, Object> integrationOverrides = new HashMap<>(); //noinspection unchecked integrationOverrides.put( AvailableSettings.JPA_JTA_DATASOURCE, integrationDataSource ); // todo (6.0) : fix for Oracle see HHH-13432 // integrationOverrides.put( AvailableSettings.HQL_BULK_ID_STRATEGY, new MultiTableBulkIdStrategyStub() ); try (final EntityManagerFactory emf = provider.createContainerEntityManagerFactory( new PersistenceUnitInfoAdapter(), integrationOverrides )) { final Map<String, Object> properties = emf.getProperties(); final Object datasource = properties.get( AvailableSettings.JPA_JTA_DATASOURCE ); assertThat( datasource ).isEqualTo( integrationDataSource ); // see if the values had the affect to adjust the `ConnectionProvider` used final ConnectionProvider connectionProvider = emf.unwrap( SessionFactoryImplementor.class ) .getServiceRegistry() .getService( ConnectionProvider.class ); assertThat( connectionProvider ).isInstanceOf( DataSourceConnectionProvider.class ); final DataSourceConnectionProvider datasourceConnectionProvider = (DataSourceConnectionProvider) connectionProvider; assertThat( datasourceConnectionProvider ).isNotNull(); assertThat( datasourceConnectionProvider.getDataSource() ).isEqualTo( integrationDataSource ); } } @Test @JiraKey(value = "HHH-13640") public void testIntegrationOverridesOfPersistenceXmlDataSource() { // mimics a DataSource defined in the persistence.xml final DataSourceStub dataSource = new DataSourceStub( "puDataSource" ); final PersistenceUnitInfoAdapter info = new PersistenceUnitInfoAdapter() { @Override public DataSource getNonJtaDataSource() { return dataSource; } }; // Now create "integration Map" that overrides the DataSource to use final DataSource override = new DataSourceStub( "integrationDataSource" ); final Map<String, Object> integrationSettings = new HashMap<>(); integrationSettings.put( AvailableSettings.JPA_NON_JTA_DATASOURCE, override ); // todo (6.0) : fix for Oracle see HHH-13432 // integrationSettings.put( AvailableSettings.HQL_BULK_ID_STRATEGY, new MultiTableBulkIdStrategyStub() ); final PersistenceProvider provider = new HibernatePersistenceProvider(); try (final EntityManagerFactory emf = provider.createContainerEntityManagerFactory( info, integrationSettings )) { final Map<String, Object> properties = emf.getProperties(); assertThat( properties.get( AvailableSettings.JPA_NON_JTA_DATASOURCE ) ).isNotNull(); assertThat( properties.get( AvailableSettings.JPA_NON_JTA_DATASOURCE ) ).isEqualTo( override ); final SessionFactoryImplementor sessionFactory = emf.unwrap( SessionFactoryImplementor.class ); final ConnectionProvider connectionProvider = sessionFactory.getServiceRegistry() .getService( ConnectionProvider.class ); assertThat( connectionProvider ).isInstanceOf( DataSourceConnectionProvider.class ); final DataSourceConnectionProvider dsProvider = (DataSourceConnectionProvider) connectionProvider; assertThat( dsProvider.getDataSource() ).isEqualTo( override ); } } @Test @JiraKey(value = "HHH-13640") public void testIntegrationOverridesOfPersistenceXmlDataSourceWithDriverManagerInfo() { // mimics a DataSource defined in the persistence.xml final DataSourceStub dataSource = new DataSourceStub( "puDataSource" ); final PersistenceUnitInfoAdapter info = new PersistenceUnitInfoAdapter() { @Override public DataSource getNonJtaDataSource() { return dataSource; } }; final Map<String, Object> integrationSettings = ServiceRegistryUtil.createBaseSettings(); integrationSettings.put( AvailableSettings.JPA_JDBC_DRIVER, ConnectionProviderBuilder.DRIVER ); integrationSettings.put( AvailableSettings.JPA_JDBC_URL, ConnectionProviderBuilder.URL ); integrationSettings.put( AvailableSettings.JPA_JDBC_USER, ConnectionProviderBuilder.USER ); integrationSettings.put( AvailableSettings.JPA_JDBC_PASSWORD, ConnectionProviderBuilder.PASS ); integrationSettings.put( DriverManagerConnectionProvider.INIT_SQL, "" ); final PersistenceProvider provider = new HibernatePersistenceProvider(); try (final EntityManagerFactory emf = provider.createContainerEntityManagerFactory( info, integrationSettings )) { final SessionFactoryImplementor sessionFactory = emf.unwrap( SessionFactoryImplementor.class ); final ConnectionProvider connectionProvider = sessionFactory.getServiceRegistry().getService( ConnectionProvider.class ); assertThat( connectionProvider ).isInstanceOf( DriverManagerConnectionProvider.class ); } } @Test @JiraKey(value = "HHH-13640") public void testIntegrationOverridesOfPersistenceXmlDataSourceWithDriverManagerInfoUsingJakarta() { // mimics a DataSource defined in the persistence.xml final DataSourceStub dataSource = new DataSourceStub( "puDataSource" ); final PersistenceUnitInfoAdapter info = new PersistenceUnitInfoAdapter() { @Override public DataSource getNonJtaDataSource() { return dataSource; } }; final Map<String, Object> integrationSettings = ServiceRegistryUtil.createBaseSettings(); integrationSettings.put( AvailableSettings.JAKARTA_JDBC_DRIVER, ConnectionProviderBuilder.DRIVER ); integrationSettings.put( AvailableSettings.JAKARTA_JDBC_URL, ConnectionProviderBuilder.URL ); integrationSettings.put( AvailableSettings.JAKARTA_JDBC_USER, ConnectionProviderBuilder.USER ); integrationSettings.put( AvailableSettings.JAKARTA_JDBC_PASSWORD, ConnectionProviderBuilder.PASS ); integrationSettings.put( DriverManagerConnectionProvider.INIT_SQL, "" ); final PersistenceProvider provider = new HibernatePersistenceProvider(); try (final EntityManagerFactory emf = provider.createContainerEntityManagerFactory( info, integrationSettings )) { final SessionFactoryImplementor sessionFactory = emf.unwrap( SessionFactoryImplementor.class ); final ConnectionProvider connectionProvider = sessionFactory.getServiceRegistry().getService( ConnectionProvider.class ); assertThat( connectionProvider ).isInstanceOf( DriverManagerConnectionProvider.class ); } } @Test public void testCfgXmlBaseline() { final PersistenceUnitInfoAdapter info = new PersistenceUnitInfoAdapter() { private final Properties props = new Properties(); { props.put( AvailableSettings.CFG_XML_FILE, "org/hibernate/orm/test/bootstrap/jpa/hibernate.cfg.xml" ); } @Override public Properties getProperties() { return props; } }; final PersistenceProvider provider = new HibernatePersistenceProvider(); final Map<String, Object> integrationSettings = ServiceRegistryUtil.createBaseSettings(); try (final EntityManagerFactory emf = provider.createContainerEntityManagerFactory( info, integrationSettings )) { assertThat( emf.getProperties().get( AvailableSettings.DIALECT ) ) .isEqualTo( PersistenceUnitDialect.class.getName() ); assertThat( emf.unwrap( SessionFactoryImplementor.class ).getJdbcServices().getDialect() ) .isInstanceOf( PersistenceUnitDialect.class ); assertThat( emf.getMetamodel().entity( MappedEntity.class ) ) .isNotNull(); } } @Test public void testIntegrationOverridesOfCfgXml() { final PersistenceUnitInfoAdapter info = new PersistenceUnitInfoAdapter() { private final Properties props = new Properties(); { props.put( AvailableSettings.CFG_XML_FILE, "org/hibernate/orm/test/bootstrap/jpa/hibernate.cfg.xml" ); } @Override public Properties getProperties() { return props; } }; final PersistenceProvider provider = new HibernatePersistenceProvider(); final Map<String, Object> integrationSettings = ServiceRegistryUtil.createBaseSettings(); integrationSettings.put( AvailableSettings.DIALECT, IntegrationDialect.class.getName() ); try (final EntityManagerFactory emf = provider.createContainerEntityManagerFactory( info, integrationSettings )) { assertThat( emf.getProperties().get( AvailableSettings.DIALECT ) ) .isEqualTo( IntegrationDialect.class.getName() ); assertThat( emf.unwrap( SessionFactoryImplementor.class ).getJdbcServices().getDialect() ) .isInstanceOf( IntegrationDialect.class ); final EntityPersister entityMapping = emf.unwrap( SessionFactoryImplementor.class ) .getRuntimeMetamodels() .getMappingMetamodel() .getEntityDescriptor( MappedEntity.class ); assertThat( entityMapping ).isNotNull(); assertThat( entityMapping.getCacheAccessStrategy().getAccessType() ) .isEqualTo( AccessType.READ_ONLY ); } } public static
PersistenceUnitOverridesTests
java
elastic__elasticsearch
build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextualTests.java
{ "start": 942, "end": 1952 }
class ____ extends TransformTests { @Test public void testReplaceAll() throws Exception { String test_original = "/rest/transform/text/text_replace_original.yml"; List<ObjectNode> tests = getTests(test_original); String test_transformed = "/rest/transform/text/text_replace_transformed.yml"; List<ObjectNode> expectedTransformation = getTests(test_transformed); List<ObjectNode> transformedTests = transformTests( tests, List.of( new ReplaceTextual("key_to_replace", "value_to_replace", SerializableJsonNode.of("_replaced_value", TextNode.class), null), new ReplaceIsTrue("is_true_to_replace", SerializableJsonNode.of("is_true_replaced", TextNode.class)), new ReplaceIsFalse("is_false_to_replace", SerializableJsonNode.of("is_false_replaced", TextNode.class)) ) ); AssertObjectNodes.areEqual(transformedTests, expectedTransformation); } }
ReplaceTextualTests
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/dispatcher/runner/DispatcherRunnerFactory.java
{ "start": 1276, "end": 1690 }
interface ____ { DispatcherRunner createDispatcherRunner( LeaderElection leaderElection, FatalErrorHandler fatalErrorHandler, JobPersistenceComponentFactory jobPersistenceComponentFactory, Executor ioExecutor, RpcService rpcService, PartialDispatcherServices partialDispatcherServices) throws Exception; }
DispatcherRunnerFactory
java
micronaut-projects__micronaut-core
core-processor/src/main/java/io/micronaut/inject/writer/ClassWriterOutputVisitor.java
{ "start": 3586, "end": 6409 }
class ____ will be written to the {@code META-INF/services} file under the given type and class * name. * * @param type the fully qualified service name * @param classname the fully qualified classname * @param originatingElement The originating element * @since 3.5.0 */ void visitServiceDescriptor(String type, String classname, Element originatingElement); /** * Visit a file within the META-INF directory of the 'classes' directory. * * @param path The path to the file * @return An optional file it was possible to create it * @deprecated Visiting a file should supply the originating elements. Use {@link #visitMetaInfFile(String, Element...)} instead */ // this is still needed @Deprecated default Optional<GeneratedFile> visitMetaInfFile(String path) { return visitMetaInfFile(path, Element.EMPTY_ELEMENT_ARRAY); } /** * Visit a file within the META-INF directory of the 'classes' directory. * * @param path The path to the file * @param originatingElements The originating elements * @return An optional file it was possible to create it */ Optional<GeneratedFile> visitMetaInfFile(String path, Element... originatingElements); /** * Visit a file that will be generated within the generated 'sources' directory. * * @param path The path * @return The file if it was possible to create it * @deprecated Use {@link #visitGeneratedFile(String, Element...)} instead */ @Deprecated Optional<GeneratedFile> visitGeneratedFile(String path); /** * Visit a file that will be generated within the generated 'sources' directory. * * @param path The path * @param originatingElements the originating elements * @return The file if it was possible to create it * @since 4.0.0 */ Optional<GeneratedFile> visitGeneratedFile(String path, Element... originatingElements); /** * Visit a source file that will be generated within the generated 'sources' directory. * * @param packageName The package for the source file * @param fileNameWithoutExtension the name of the source file, without extension (determined automatically) * @param originatingElements the originating elements * @return The file if it was possible to create it * @since 4.2.0 */ default Optional<GeneratedFile> visitGeneratedSourceFile(String packageName, String fileNameWithoutExtension, io.micronaut.inject.ast.Element... originatingElements) { return Optional.empty(); } /** * Finish writing and flush any service entries to disk. */ void finish(); /** * The META-INF/services entries to write. * * @return A map of service to
that
java
google__guice
core/src/com/google/inject/internal/RealMultibinder.java
{ "start": 8983, "end": 12441 }
class ____<T> extends BaseFactory<T, Set<T>> implements ProviderWithExtensionVisitor<Set<T>>, MultibinderBinding<Set<T>> { List<Binding<T>> bindings; SingleParameterInjector<T>[] injectors; boolean permitDuplicates; RealMultibinderProvider(BindingSelection<T> bindingSelection) { super(bindingSelection); } @Override public ImmutableSet<Dependency<?>> getDependencies() { return bindingSelection.getDependencies(); } @Override protected void doInitialize() { bindings = bindingSelection.getBindings(); injectors = bindingSelection.getParameterInjectors(); permitDuplicates = bindingSelection.permitsDuplicates(); } @Override protected ImmutableSet<T> doProvision(InternalContext context, Dependency<?> dependency) throws InternalProvisionException { SingleParameterInjector<T>[] localInjectors = injectors; if (localInjectors == null) { // if localInjectors == null, then we have no bindings so return the empty set. return ImmutableSet.of(); } // If duplicates aren't permitted, we need to capture the original values in order to show a // meaningful error message to users (if duplicates were encountered). @SuppressWarnings("unchecked") T[] values = !permitDuplicates ? (T[]) new Object[localInjectors.length] : null; // Avoid ImmutableSet.copyOf(T[]), because it assumes there'll be duplicates in the input, but // in the usual case of permitDuplicates==false, we know the exact size must be // `localInjector.length` (otherwise we fail). This uses `builderWithExpectedSize` to avoid // the overhead of copyOf or an unknown builder size. If permitDuplicates==true, this will // assume a potentially larger size (but never a smaller size), and `build` will then reduce // as necessary. ImmutableSet.Builder<T> setBuilder = ImmutableSet.<T>builderWithExpectedSize(localInjectors.length); for (int i = 0; i < localInjectors.length; i++) { SingleParameterInjector<T> parameterInjector = localInjectors[i]; T newValue = parameterInjector.inject(context); if (newValue == null) { throw newNullEntryException(i); } if (!permitDuplicates) { values[i] = newValue; } setBuilder.add(newValue); } ImmutableSet<T> set = setBuilder.build(); // There are fewer items in the set than the array. Figure out which one got dropped. if (!permitDuplicates && set.size() < values.length) { throw newDuplicateValuesException(values); } return set; } @Override protected MethodHandle doGetHandle(LinkageContext context) { if (injectors == null) { return InternalMethodHandles.constantFactoryGetHandle(ImmutableSet.of()); } // null check each element List<MethodHandle> elementHandles = new ArrayList<>(injectors.length); for (int i = 0; i < injectors.length; i++) { var element = injectors[i].getInjectHandle(context); elementHandles.add( MethodHandles.filterReturnValue( element, MethodHandles.insertArguments( NULL_CHECK_RESULT_HANDLE, 1, bindings.get(i).getSource()))); } // At size one permitDuplicates is irrelevant and we can bind to the SingletonImmutableSet //
RealMultibinderProvider
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java
{ "start": 948, "end": 1417 }
class ____ extends Param<Short> { private int radix; public ShortParam(String name, Short defaultValue, int radix) { super(name, defaultValue); this.radix = radix; } public ShortParam(String name, Short defaultValue) { this(name, defaultValue, 10); } @Override protected Short parse(String str) throws Exception { return Short.parseShort(str, radix); } @Override protected String getDomain() { return "a short"; } }
ShortParam
java
apache__hadoop
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/SASTokenProviderException.java
{ "start": 1064, "end": 1332 }
class ____ extends AzureBlobFileSystemException { public SASTokenProviderException(String message) { super(message); } public SASTokenProviderException(String message, Throwable cause) { super(message); initCause(cause); } }
SASTokenProviderException
java
quarkusio__quarkus
extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/FragmentMethodsAdder.java
{ "start": 1896, "end": 4036 }
interface ____ implemented by forwarding them to the bean that implements them Object[] methodParameterTypes = new Object[methodToImplement.parametersCount()]; for (int i = 0; i < methodToImplement.parametersCount(); i++) { methodParameterTypes[i] = methodToImplement.parameterType(i).name().toString(); } String methodReturnType = methodToImplement.returnType().name().toString(); MethodDescriptor methodDescriptor = MethodDescriptor.ofMethod(generatedClassName, methodToImplement.name(), methodReturnType, methodParameterTypes); if (!classCreator.getExistingMethods().contains(methodDescriptor)) { try (MethodCreator methodCreator = classCreator.getMethodCreator(methodDescriptor)) { // obtain the bean from Arc ResultHandle bean = methodCreator.readInstanceField( customImplNameToHandle.get(customImplementationClassName), methodCreator.getThis()); ResultHandle[] methodParameterHandles = new ResultHandle[methodToImplement.parametersCount()]; for (int i = 0; i < methodToImplement.parametersCount(); i++) { methodParameterHandles[i] = methodCreator.getMethodParam(i); } // delegate call to bean ResultHandle result = methodCreator.invokeVirtualMethod( MethodDescriptor.ofMethod(customImplementationClassName, methodToImplement.name(), methodReturnType, methodParameterTypes), bean, methodParameterHandles); if (void.class.getName().equals(methodReturnType)) { methodCreator.returnValue(null); } else { methodCreator.returnValue(result); } } } } } } }
are
java
micronaut-projects__micronaut-core
inject-java/src/test/groovy/io/micronaut/inject/context/register/AbcFactory.java
{ "start": 138, "end": 227 }
class ____ { @Singleton Abc produce() { return new Abc(); } }
AbcFactory
java
google__gson
gson/src/test/java/com/google/gson/functional/EnumWithObfuscatedTest.java
{ "start": 1089, "end": 1634 }
enum ____ { @SerializedName("MAIL") MALE, @SerializedName("FEMAIL") FEMALE } @Test public void testEnumClassWithObfuscated() { for (Gender enumConstant : Gender.class.getEnumConstants()) { assertThrows( "Enum is not obfuscated", NoSuchFieldException.class, () -> Gender.class.getField(enumConstant.name())); } assertThat(gson.fromJson("\"MAIL\"", Gender.class)).isEqualTo(Gender.MALE); assertThat(gson.toJson(Gender.MALE, Gender.class)).isEqualTo("\"MAIL\""); } }
Gender
java
apache__rocketmq
auth/src/main/java/org/apache/rocketmq/auth/authentication/chain/DefaultAuthenticationHandler.java
{ "start": 1690, "end": 3611 }
class ____ implements Handler<DefaultAuthenticationContext, CompletableFuture<Void>> { private final AuthenticationMetadataProvider authenticationMetadataProvider; public DefaultAuthenticationHandler(AuthConfig config, Supplier<?> metadataService) { this.authenticationMetadataProvider = AuthenticationFactory.getMetadataProvider(config, metadataService); } @Override public CompletableFuture<Void> handle(DefaultAuthenticationContext context, HandlerChain<DefaultAuthenticationContext, CompletableFuture<Void>> chain) { return getUser(context).thenAccept(user -> doAuthenticate(context, user)); } protected CompletableFuture<User> getUser(DefaultAuthenticationContext context) { if (this.authenticationMetadataProvider == null) { throw new AuthenticationException("The authenticationMetadataProvider is not configured"); } if (StringUtils.isEmpty(context.getUsername())) { throw new AuthenticationException("username cannot be null."); } return this.authenticationMetadataProvider.getUser(context.getUsername()); } protected void doAuthenticate(DefaultAuthenticationContext context, User user) { if (user == null) { throw new AuthenticationException("User:{} is not found.", context.getUsername()); } if (user.getUserStatus() == UserStatus.DISABLE) { throw new AuthenticationException("User:{} is disabled.", context.getUsername()); } String signature = AclSigner.calSignature(context.getContent(), user.getPassword()); if (context.getSignature() == null || !MessageDigest.isEqual(signature.getBytes(AclSigner.DEFAULT_CHARSET), context.getSignature().getBytes(AclSigner.DEFAULT_CHARSET))) { throw new AuthenticationException("check signature failed."); } } }
DefaultAuthenticationHandler
java
elastic__elasticsearch
client/rest/src/main/java/org/elasticsearch/client/RestClient.java
{ "start": 31806, "end": 32144 }
class ____ { /** * Notifies that the node provided as argument has just failed */ public void onFailure(Node node) {} } /** * {@link NodeTuple} enables the {@linkplain Node}s and {@linkplain AuthCache} * to be set together in a thread safe, volatile way. */ static
FailureListener
java
spring-projects__spring-framework
spring-beans/src/main/java/org/springframework/beans/GenericTypeAwarePropertyDescriptor.java
{ "start": 1601, "end": 7693 }
class ____ extends PropertyDescriptor { private final Class<?> beanClass; private final @Nullable Method readMethod; private final @Nullable Method writeMethod; private @Nullable Set<Method> ambiguousWriteMethods; private volatile boolean ambiguousWriteMethodsLogged; private @Nullable MethodParameter writeMethodParameter; private volatile @Nullable ResolvableType writeMethodType; private @Nullable ResolvableType readMethodType; private volatile @Nullable TypeDescriptor typeDescriptor; private @Nullable Class<?> propertyType; private final @Nullable Class<?> propertyEditorClass; public GenericTypeAwarePropertyDescriptor(Class<?> beanClass, String propertyName, @Nullable Method readMethod, @Nullable Method writeMethod, @Nullable Class<?> propertyEditorClass) throws IntrospectionException { super(propertyName, null, null); this.beanClass = beanClass; Method readMethodToUse = (readMethod != null ? BridgeMethodResolver.findBridgedMethod(readMethod) : null); Method writeMethodToUse = (writeMethod != null ? BridgeMethodResolver.findBridgedMethod(writeMethod) : null); if (writeMethodToUse == null && readMethodToUse != null) { // Fallback: Original JavaBeans introspection might not have found matching setter // method due to lack of bridge method resolution, in case of the getter using a // covariant return type whereas the setter is defined for the concrete property type. Method candidate = ClassUtils.getMethodIfAvailable( this.beanClass, "set" + StringUtils.capitalize(getName()), (Class<?>[]) null); if (candidate != null && candidate.getParameterCount() == 1) { writeMethodToUse = candidate; } } this.readMethod = readMethodToUse; this.writeMethod = writeMethodToUse; if (this.writeMethod != null) { if (this.readMethod == null) { // Write method not matched against read method: potentially ambiguous through // several overloaded variants, in which case an arbitrary winner has been chosen // by the JDK's JavaBeans Introspector... Set<Method> ambiguousCandidates = new HashSet<>(); for (Method method : beanClass.getMethods()) { if (method.getName().equals(this.writeMethod.getName()) && !method.equals(this.writeMethod) && !method.isBridge() && method.getParameterCount() == this.writeMethod.getParameterCount()) { ambiguousCandidates.add(method); } } if (!ambiguousCandidates.isEmpty()) { this.ambiguousWriteMethods = ambiguousCandidates; } } this.writeMethodParameter = new MethodParameter(this.writeMethod, 0).withContainingClass(this.beanClass); } if (this.readMethod != null) { this.readMethodType = ResolvableType.forMethodReturnType(this.readMethod, this.beanClass); this.propertyType = this.readMethodType.resolve(this.readMethod.getReturnType()); } else if (this.writeMethodParameter != null) { this.propertyType = this.writeMethodParameter.getParameterType(); } this.propertyEditorClass = propertyEditorClass; } public Class<?> getBeanClass() { return this.beanClass; } @Override public @Nullable Method getReadMethod() { return this.readMethod; } @Override public @Nullable Method getWriteMethod() { return this.writeMethod; } public Method getWriteMethodForActualAccess() { Assert.state(this.writeMethod != null, "No write method available"); if (this.ambiguousWriteMethods != null && !this.ambiguousWriteMethodsLogged) { this.ambiguousWriteMethodsLogged = true; LogFactory.getLog(GenericTypeAwarePropertyDescriptor.class).debug("Non-unique JavaBean property '" + getName() + "' being accessed! Ambiguous write methods found next to actually used [" + this.writeMethod + "]: " + this.ambiguousWriteMethods); } return this.writeMethod; } public @Nullable Method getWriteMethodFallback(@Nullable Class<?> valueType) { if (this.ambiguousWriteMethods != null) { for (Method method : this.ambiguousWriteMethods) { Class<?> paramType = method.getParameterTypes()[0]; if (valueType != null ? paramType.isAssignableFrom(valueType) : !paramType.isPrimitive()) { return method; } } } return null; } public @Nullable Method getUniqueWriteMethodFallback() { if (this.ambiguousWriteMethods != null && this.ambiguousWriteMethods.size() == 1) { return this.ambiguousWriteMethods.iterator().next(); } return null; } public boolean hasUniqueWriteMethod() { return (this.writeMethod != null && this.ambiguousWriteMethods == null); } public MethodParameter getWriteMethodParameter() { Assert.state(this.writeMethodParameter != null, "No write method available"); return this.writeMethodParameter; } public ResolvableType getWriteMethodType() { ResolvableType writeMethodType = this.writeMethodType; if (writeMethodType == null) { writeMethodType = ResolvableType.forMethodParameter(getWriteMethodParameter()); this.writeMethodType = writeMethodType; } return writeMethodType; } public ResolvableType getReadMethodType() { Assert.state(this.readMethodType != null, "No read method available"); return this.readMethodType; } public TypeDescriptor getTypeDescriptor() { TypeDescriptor typeDescriptor = this.typeDescriptor; if (typeDescriptor == null) { Property property = new Property(getBeanClass(), getReadMethod(), getWriteMethod(), getName()); typeDescriptor = new TypeDescriptor(property); this.typeDescriptor = typeDescriptor; } return typeDescriptor; } @Override public @Nullable Class<?> getPropertyType() { return this.propertyType; } @Override public @Nullable Class<?> getPropertyEditorClass() { return this.propertyEditorClass; } @Override public boolean equals(@Nullable Object other) { return (this == other || (other instanceof GenericTypeAwarePropertyDescriptor that && getBeanClass().equals(that.getBeanClass()) && PropertyDescriptorUtils.equals(this, that))); } @Override public int hashCode() { return Objects.hash(getBeanClass(), getReadMethod(), getWriteMethod()); } }
GenericTypeAwarePropertyDescriptor
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/dynamic/DynMethods.java
{ "start": 10396, "end": 11832 }
class ____ check for an implementation * @param argClasses argument classes for the method * @return this Builder for method chaining */ public Builder impl(Class<?> targetClass, Class<?>... argClasses) { impl(targetClass, name, argClasses); return this; } public Builder ctorImpl(Class<?> targetClass, Class<?>... argClasses) { // don't do any work if an implementation has been found if (method != null) { return this; } try { this.method = new DynConstructors.Builder().impl(targetClass, argClasses).buildChecked(); } catch (NoSuchMethodException e) { // not the right implementation LOG.debug("failed to load constructor arity {} from class {}", argClasses.length, targetClass, e); } return this; } public Builder ctorImpl(String className, Class<?>... argClasses) { // don't do any work if an implementation has been found if (method != null) { return this; } try { this.method = new DynConstructors.Builder().impl(className, argClasses).buildChecked(); } catch (NoSuchMethodException e) { // not the right implementation LOG.debug("failed to load constructor arity {} from class {}", argClasses.length, className, e); } return this; } /** * Checks for an implementation, first finding the given
to
java
elastic__elasticsearch
x-pack/plugin/gpu/src/internalClusterTest/java/org/elasticsearch/xpack/gpu/GPUPluginInitializationWithGPUIT.java
{ "start": 1765, "end": 10726 }
class ____ extends GPUPlugin { public TestGPUPlugin() { super(); } @Override protected boolean isGpuIndexingFeatureAllowed() { return GPUPluginInitializationWithGPUIT.isGpuIndexingFeatureAllowed; } } @After public void reset() { isGpuIndexingFeatureAllowed = true; } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return List.of(TestGPUPlugin.class); } public void testFFOff() { assumeFalse("GPU_FORMAT feature flag disabled", GPUPlugin.GPU_FORMAT.isEnabled()); GPUPlugin gpuPlugin = internalCluster().getInstance(TestGPUPlugin.class); VectorsFormatProvider vectorsFormatProvider = gpuPlugin.getVectorsFormatProvider(); var format = vectorsFormatProvider.getKnnVectorsFormat(null, null, null); assertNull(format); } public void testFFOffIndexSettingNotSupported() { assumeFalse("GPU_FORMAT feature flag disabled", GPUPlugin.GPU_FORMAT.isEnabled()); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, () -> createIndex( "index1", Settings.builder().put(GPUPlugin.VECTORS_INDEXING_USE_GPU_SETTING.getKey(), GPUPlugin.GpuMode.TRUE).build() ) ); assertThat(exception.getMessage(), containsString("unknown setting [index.vectors.indexing.use_gpu]")); } public void testFFOffGPUFormatNull() { assumeFalse("GPU_FORMAT feature flag disabled", GPUPlugin.GPU_FORMAT.isEnabled()); GPUPlugin gpuPlugin = internalCluster().getInstance(TestGPUPlugin.class); VectorsFormatProvider vectorsFormatProvider = gpuPlugin.getVectorsFormatProvider(); createIndex("index1", Settings.EMPTY); IndexSettings settings = getIndexSettings(); final var indexOptions = DenseVectorFieldTypeTests.randomGpuSupportedIndexOptions(); var format = vectorsFormatProvider.getKnnVectorsFormat( settings, indexOptions, randomGPUSupportedSimilarity(indexOptions.getType()) ); assertNull(format); } public void testIndexSettingOnIndexAllSupported() { assumeTrue("GPU_FORMAT feature flag enabled", GPUPlugin.GPU_FORMAT.isEnabled()); GPUPlugin gpuPlugin = internalCluster().getInstance(TestGPUPlugin.class); VectorsFormatProvider vectorsFormatProvider = gpuPlugin.getVectorsFormatProvider(); createIndex("index1", Settings.builder().put(GPUPlugin.VECTORS_INDEXING_USE_GPU_SETTING.getKey(), GPUPlugin.GpuMode.TRUE).build()); IndexSettings settings = getIndexSettings(); final var indexOptions = DenseVectorFieldTypeTests.randomGpuSupportedIndexOptions(); var format = vectorsFormatProvider.getKnnVectorsFormat( settings, indexOptions, randomGPUSupportedSimilarity(indexOptions.getType()) ); assertNotNull(format); } public void testIndexSettingOnIndexTypeNotSupportedThrows() { assumeTrue("GPU_FORMAT feature flag enabled", GPUPlugin.GPU_FORMAT.isEnabled()); GPUPlugin gpuPlugin = internalCluster().getInstance(TestGPUPlugin.class); VectorsFormatProvider vectorsFormatProvider = gpuPlugin.getVectorsFormatProvider(); createIndex("index1", Settings.builder().put(GPUPlugin.VECTORS_INDEXING_USE_GPU_SETTING.getKey(), GPUPlugin.GpuMode.TRUE).build()); IndexSettings settings = getIndexSettings(); final var indexOptions = DenseVectorFieldTypeTests.randomFlatIndexOptions(); var ex = expectThrows( IllegalArgumentException.class, () -> vectorsFormatProvider.getKnnVectorsFormat(settings, indexOptions, randomGPUSupportedSimilarity(indexOptions.getType())) ); assertThat(ex.getMessage(), startsWith("[index.vectors.indexing.use_gpu] doesn't support [index_options.type] of")); } public void testIndexSettingOnIndexLicenseNotSupportedThrows() { assumeTrue("GPU_FORMAT feature flag enabled", GPUPlugin.GPU_FORMAT.isEnabled()); isGpuIndexingFeatureAllowed = false; GPUPlugin gpuPlugin = internalCluster().getInstance(TestGPUPlugin.class); VectorsFormatProvider vectorsFormatProvider = gpuPlugin.getVectorsFormatProvider(); createIndex("index1", Settings.builder().put(GPUPlugin.VECTORS_INDEXING_USE_GPU_SETTING.getKey(), GPUPlugin.GpuMode.TRUE).build()); IndexSettings settings = getIndexSettings(); final var indexOptions = DenseVectorFieldTypeTests.randomGpuSupportedIndexOptions(); var ex = expectThrows( IllegalArgumentException.class, () -> vectorsFormatProvider.getKnnVectorsFormat(settings, indexOptions, randomGPUSupportedSimilarity(indexOptions.getType())) ); assertThat( ex.getMessage(), equalTo("[index.vectors.indexing.use_gpu] was set to [true], but GPU indexing is a [ENTERPRISE] level feature") ); } public void testIndexSettingAutoAllSupported() { assumeTrue("GPU_FORMAT feature flag enabled", GPUPlugin.GPU_FORMAT.isEnabled()); GPUPlugin gpuPlugin = internalCluster().getInstance(TestGPUPlugin.class); VectorsFormatProvider vectorsFormatProvider = gpuPlugin.getVectorsFormatProvider(); createIndex("index1", Settings.builder().put(GPUPlugin.VECTORS_INDEXING_USE_GPU_SETTING.getKey(), GPUPlugin.GpuMode.AUTO).build()); IndexSettings settings = getIndexSettings(); final var indexOptions = DenseVectorFieldTypeTests.randomGpuSupportedIndexOptions(); var format = vectorsFormatProvider.getKnnVectorsFormat( settings, indexOptions, randomGPUSupportedSimilarity(indexOptions.getType()) ); assertNotNull(format); } public void testIndexSettingAutoLicenseNotSupported() { assumeTrue("GPU_FORMAT feature flag enabled", GPUPlugin.GPU_FORMAT.isEnabled()); isGpuIndexingFeatureAllowed = false; GPUPlugin gpuPlugin = internalCluster().getInstance(TestGPUPlugin.class); VectorsFormatProvider vectorsFormatProvider = gpuPlugin.getVectorsFormatProvider(); createIndex("index1", Settings.builder().put(GPUPlugin.VECTORS_INDEXING_USE_GPU_SETTING.getKey(), GPUPlugin.GpuMode.AUTO).build()); IndexSettings settings = getIndexSettings(); final var indexOptions = DenseVectorFieldTypeTests.randomGpuSupportedIndexOptions(); var format = vectorsFormatProvider.getKnnVectorsFormat( settings, indexOptions, randomGPUSupportedSimilarity(indexOptions.getType()) ); assertNull(format); } public void testIndexSettingAutoIndexTypeNotSupported() { assumeTrue("GPU_FORMAT feature flag enabled", GPUPlugin.GPU_FORMAT.isEnabled()); GPUPlugin gpuPlugin = internalCluster().getInstance(TestGPUPlugin.class); VectorsFormatProvider vectorsFormatProvider = gpuPlugin.getVectorsFormatProvider(); createIndex("index1", Settings.builder().put(GPUPlugin.VECTORS_INDEXING_USE_GPU_SETTING.getKey(), GPUPlugin.GpuMode.AUTO).build()); IndexSettings settings = getIndexSettings(); final var indexOptions = DenseVectorFieldTypeTests.randomFlatIndexOptions(); var format = vectorsFormatProvider.getKnnVectorsFormat( settings, indexOptions, randomGPUSupportedSimilarity(indexOptions.getType()) ); assertNull(format); } public void testIndexSettingOff() { assumeTrue("GPU_FORMAT feature flag enabled", GPUPlugin.GPU_FORMAT.isEnabled()); GPUPlugin gpuPlugin = internalCluster().getInstance(TestGPUPlugin.class); VectorsFormatProvider vectorsFormatProvider = gpuPlugin.getVectorsFormatProvider(); createIndex("index1", Settings.builder().put(GPUPlugin.VECTORS_INDEXING_USE_GPU_SETTING.getKey(), GPUPlugin.GpuMode.FALSE).build()); IndexSettings settings = getIndexSettings(); final var indexOptions = DenseVectorFieldTypeTests.randomGpuSupportedIndexOptions(); var format = vectorsFormatProvider.getKnnVectorsFormat( settings, indexOptions, randomGPUSupportedSimilarity(indexOptions.getType()) ); assertNull(format); } private IndexSettings getIndexSettings() { ensureGreen("index1"); IndexSettings settings = null; for (IndicesService service : internalCluster().getInstances(IndicesService.class)) { IndexService indexService = service.indexService(resolveIndex("index1")); if (indexService != null) { settings = indexService.getIndexSettings(); break; } } assertNotNull(settings); return settings; } }
TestGPUPlugin
java
quarkusio__quarkus
extensions/redis-cache/deployment/src/test/java/io/quarkus/cache/redis/deployment/PojoAndMultipleKeysCacheTest.java
{ "start": 2932, "end": 3747 }
class ____ { private final String str; private final int num; @JsonCreator public Message(String str, int num) { this.str = str; this.num = num; } public String getStr() { return str; } public int getNum() { return num; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Message message = (Message) o; return num == message.num && str.equals(message.str); } @Override public int hashCode() { return Objects.hash(str, num); } } }
Message
java
apache__logging-log4j2
log4j-1.2-api/src/main/java/org/apache/log4j/CategoryKey.java
{ "start": 939, "end": 1509 }
class ____ { String name; int hashCache; CategoryKey(final String name) { this.name = name; this.hashCache = name.hashCode(); } @Override public final int hashCode() { return hashCache; } @Override public final boolean equals(final Object rArg) { if (this == rArg) { return true; } if (rArg != null && CategoryKey.class == rArg.getClass()) { return name.equals(((CategoryKey) rArg).name); } else { return false; } } }
CategoryKey
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/sps/FileCollector.java
{ "start": 1020, "end": 1184 }
interface ____ scanning the directory recursively and collect files * under the given directory. */ @InterfaceAudience.Private @InterfaceStability.Evolving public
for
java
apache__flink
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/aggfunctions/LastValueAggFunctionWithoutOrderTest.java
{ "start": 8016, "end": 9572 }
class ____ extends LastValueAggFunctionWithoutOrderTestBase<StringData> { @Override protected List<List<StringData>> getInputValueSets() { return Arrays.asList( Arrays.asList( StringData.fromString("abc"), StringData.fromString("def"), StringData.fromString("ghi"), null, StringData.fromString("jkl"), null, StringData.fromString("zzz")), Arrays.asList(null, null), Arrays.asList(null, StringData.fromString("a"), null), Arrays.asList(StringData.fromString("x"), null, StringData.fromString("e"))); } @Override protected List<StringData> getExpectedResults() { return Arrays.asList( StringData.fromString("zzz"), null, StringData.fromString("a"), StringData.fromString("e")); } @Override protected AggregateFunction<StringData, RowData> getAggregator() { return new LastValueAggFunction<>(DataTypes.STRING().getLogicalType()); } } // -------------------------------------------------------------------------------------------- // This section contain base classes that provide common inputs and declare the accumulator //
StringLastValueAggFunctionWithoutOrderTest
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AMRMTokenSelector.java
{ "start": 1275, "end": 2345 }
class ____ implements TokenSelector<AMRMTokenIdentifier> { private static final Logger LOG = LoggerFactory .getLogger(AMRMTokenSelector.class); @SuppressWarnings("unchecked") public Token<AMRMTokenIdentifier> selectToken(Text service, Collection<Token<? extends TokenIdentifier>> tokens) { if (service == null) { return null; } LOG.debug("Looking for a token with service {}", service); for (Token<? extends TokenIdentifier> token : tokens) { LOG.debug("Token kind is {} and the token's service name is {}", token.getKind(), token.getService()); if (AMRMTokenIdentifier.KIND_NAME.equals(token.getKind()) && checkService(service, token)) { return (Token<AMRMTokenIdentifier>) token; } } return null; } private boolean checkService(Text service, Token<? extends TokenIdentifier> token) { if (service == null || token.getService() == null) { return false; } return token.getService().toString().contains(service.toString()); } }
AMRMTokenSelector
java
spring-projects__spring-framework
spring-tx/src/test/java/org/springframework/transaction/annotation/EnableTransactionManagementTests.java
{ "start": 23352, "end": 23525 }
class ____ implements TransactionalTestInterface { @Override public void saveFoo() { } } @Configuration @EnableTransactionManagement static
TransactionalTestService
java
apache__camel
test-infra/camel-test-infra-neo4j/src/test/java/org/apache/camel/test/infra/neo4j/services/Neo4jServiceFactory.java
{ "start": 1016, "end": 1106 }
class ____ { private Neo4jServiceFactory() { } public static
Neo4jServiceFactory
java
hibernate__hibernate-orm
hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/CommunityDialectResolver.java
{ "start": 423, "end": 749 }
class ____ implements DialectResolver { @Override public Dialect resolveDialect(DialectResolutionInfo info) { for ( CommunityDatabase database : CommunityDatabase.values() ) { if ( database.matchesResolutionInfo( info ) ) { return database.createDialect( info ); } } return null; } }
CommunityDialectResolver
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/TestLoadManifestsStage.java
{ "start": 3118, "end": 8413 }
class ____ extends AbstractManifestCommitterTest { public static final int FILES_PER_TASK_ATTEMPT = 100; private int taskAttemptCount; private File entryFile; /** * How many task attempts to make? * Override point. * @return a number greater than 0. */ protected int numberOfTaskAttempts() { return ManifestCommitterTestSupport.NUMBER_OF_TASK_ATTEMPTS; } @BeforeEach @Override public void setup() throws Exception { super.setup(); taskAttemptCount = numberOfTaskAttempts(); Assertions.assertThat(taskAttemptCount) .describedAs("Task attempt count") .isGreaterThan(0); } @AfterEach @Override public void teardown() throws Exception { if (entryFile != null) { entryFile.delete(); } super.teardown(); } public long heapSize() { return Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); } /** * Build a large number of manifests, but without the real files * and directories. * Save the manifests under the job attempt dir, then load * them via the {@link LoadManifestsStage}. * The directory preparation process is then executed after this. * Because we know each task attempt creates the same number of directories, * they will all be merged and so only a limited number of output dirs * will be created. */ @Test public void testSaveThenLoadManyManifests() throws Throwable { describe("Creating many manifests with fake file/dir entries," + " load them and prepare the output dirs."); int filesPerTaskAttempt = FILES_PER_TASK_ATTEMPT; LOG.info("Number of task attempts: {}, files per task attempt {}", taskAttemptCount, filesPerTaskAttempt); final StageConfig stageConfig = createStageConfigForJob(JOB1, getDestDir()); setJobStageConfig(stageConfig); // set up the job. new SetupJobStage(stageConfig).apply(false); LOG.info("Creating manifest files for {}", taskAttemptCount); executeTaskAttempts(taskAttemptCount, filesPerTaskAttempt); IOStatisticsSnapshot heapInfo = new IOStatisticsSnapshot(); heapinfo(heapInfo, "initial"); LOG.info("Loading in the manifests"); // Load in the manifests LoadManifestsStage stage = new LoadManifestsStage( stageConfig); entryFile = File.createTempFile("entry", ".seq"); LoadManifestsStage.Arguments args = new LoadManifestsStage.Arguments( entryFile, DEFAULT_WRITER_QUEUE_CAPACITY); LoadManifestsStage.Result loadManifestsResult = stage.apply(args); LoadManifestsStage.SummaryInfo summary = loadManifestsResult.getSummary(); LOG.info("\nJob statistics after loading {}", ioStatisticsToPrettyString(getStageStatistics())); LOG.info("Heap size = {}", heapSize()); heapinfo(heapInfo, "load.manifests"); Assertions.assertThat(summary.getManifestCount()) .describedAs("Manifest count of %s", summary) .isEqualTo(taskAttemptCount); Assertions.assertThat(summary.getFileCount()) .describedAs("File count of %s", summary) .isEqualTo(taskAttemptCount * (long) filesPerTaskAttempt); Assertions.assertThat(summary.getTotalFileSize()) .describedAs("File Size of %s", summary) .isEqualTo(getTotalDataSize()); // now that manifest list. List<String> manifestTaskIds = summary.getTaskIDs(); Assertions.assertThat(getTaskIds()) .describedAs("Task IDs of all tasks") .containsExactlyInAnyOrderElementsOf(manifestTaskIds); // now let's see about aggregating a large set of directories Set<Path> createdDirectories = new CreateOutputDirectoriesStage( stageConfig) .apply(loadManifestsResult.getLoadedManifestData().getDirectories()) .getCreatedDirectories(); heapinfo(heapInfo, "create.directories"); // but after the merge process, only one per generated file output // dir exists Assertions.assertThat(createdDirectories) .describedAs("Directories created") .hasSize(filesPerTaskAttempt); // and skipping the rename stage (which is going to fail), // go straight to cleanup new CleanupJobStage(stageConfig).apply( new CleanupJobStage.Arguments("", true, true, false, false, 0)); heapinfo(heapInfo, "cleanup"); ManifestSuccessData success = createManifestOutcome(stageConfig, OP_STAGE_JOB_COMMIT); success.snapshotIOStatistics(getStageStatistics()); success.getIOStatistics().aggregate(heapInfo); Configuration conf = getConfiguration(); enableManifestCommitter(conf); String reportDir = conf.getTrimmed(OPT_SUMMARY_REPORT_DIR, ""); Path reportDirPath = new Path(reportDir); Path path = new Path(reportDirPath, createJobSummaryFilename("TestLoadManifestsStage")); final FileSystem summaryFS = path.getFileSystem(conf); success.save(summaryFS, path, true); LOG.info("Saved summary to {}", path); new ManifestPrinter().loadAndPrintManifest(summaryFS, path); } /** * Force a GC then add heap info. * @param stats stats to update * @param stage stage name */ private static void heapinfo(final IOStatisticsSnapshot stats, final String stage) { System.gc(); addHeapInformation(stats, stage); } }
TestLoadManifestsStage
java
apache__flink
flink-core/src/test/java/org/apache/flink/api/common/io/FileInputFormatTest.java
{ "start": 33869, "end": 34819 }
class ____ extends FileInputFormat<IntValue> { private static final long serialVersionUID = 1L; private boolean compressedRead = false; @Override public boolean reachedEnd() { return true; } @Override public IntValue nextRecord(IntValue record) { return null; } @Override public void open(FileInputSplit split) throws IOException { compressedRead = false; super.open(split); } @Override protected FSDataInputStream decorateInputStream( FSDataInputStream inputStream, FileInputSplit fileSplit) { compressedRead = getInflaterInputStreamFactory( extractFileExtension(fileSplit.getPath().getName())) != null; return inputStream; } } private
DummyFileInputFormat
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/engine/ThreadPoolMergeExecutorService.java
{ "start": 44437, "end": 45457 }
class ____ { private final AtomicLong ioRate; AtomicIORate(long initialIORate) { ioRate = new AtomicLong(initialIORate); } long get() { return ioRate.get(); } // Exactly like {@link AtomicLong#updateAndGet} but calls the consumer rather than return the new (updated) value. // The consumer receives both the previous and the updated values (which can be equal). void update(LongUnaryOperator updateFunction, AtomicIORate.UpdateConsumer updateConsumer) { long prev = ioRate.get(), next = 0L; for (boolean haveNext = false;;) { if (haveNext == false) next = updateFunction.applyAsLong(prev); if (ioRate.weakCompareAndSetVolatile(prev, next)) { updateConsumer.accept(prev, next); return; } haveNext = (prev == (prev = ioRate.get())); } } @FunctionalInterface
AtomicIORate
java
apache__commons-lang
src/main/java/org/apache/commons/lang3/ClassUtils.java
{ "start": 30767, "end": 31130 }
class ____ or {@code valueIfNull}. * @since 3.0 * @see Class#getName() */ public static String getName(final Object object, final String valueIfNull) { return object == null ? valueIfNull : object.getClass().getName(); } /** * Gets the package name from the canonical name of a {@link Class}. * * @param cls the
name
java
apache__flink
flink-tests/src/test/java/org/apache/flink/runtime/operators/lifecycle/PartiallyFinishedSourcesITCase.java
{ "start": 3828, "end": 10967 }
class ____ extends TestLogger { @ClassRule public static final TemporaryFolder TEMPORARY_FOLDER = new TemporaryFolder(); @Rule public final SharedObjects sharedObjects = SharedObjects.create(); @Rule public Timeout timeoutRule = new Timeout(10, TimeUnit.MINUTES); private MiniClusterWithClientResource miniClusterResource; @Before public void init() throws Exception { Configuration configuration = new Configuration(); // set failover strategy on the cluster level // choose it from the parameter because it may affect the test // - "region" is currently the default // - "full" is enforced by Adaptive/Reactive scheduler (even when parameterized) configuration.set(EXECUTION_FAILOVER_STRATEGY, failoverStrategy); // If changelog backend is enabled then this test might run too slow with in-memory // implementation - use fs-based instead. // The randomization currently happens on the job level (environment); while this factory // can only be set on the cluster level; so we do it unconditionally here. FsStateChangelogStorageFactory.configure( configuration, TEMPORARY_FOLDER.newFolder(), Duration.ofMinutes(1), 10); miniClusterResource = new MiniClusterWithClientResource( new MiniClusterResourceConfiguration.Builder() .setConfiguration(configuration) .setNumberTaskManagers(1) .setNumberSlotsPerTaskManager(4) .build()); miniClusterResource.before(); } @After public void tearDown() { if (miniClusterResource != null) { miniClusterResource.after(); } } @Parameter(0) public TestingGraphBuilder graphBuilder; @Parameter(1) public TestCommandScope subtaskScope; @Parameter(2) public boolean failover; @Parameter(3) public String failoverStrategy; @Test public void test() throws Exception { TestJobWithDescription testJob = buildJob(); // pick any source operator Iterator<String> iterator = testJob.sources.iterator(); String finishingOperatorID = iterator.next(); JobVertexID finishingVertexID = findJobVertexID(testJob, finishingOperatorID); TestJobExecutor executor = TestJobExecutor.execute(testJob, miniClusterResource) .waitForEvent(CheckpointCompletedEvent.class) .sendOperatorCommand(finishingOperatorID, FINISH_SOURCES, subtaskScope) .waitForSubtasksToFinish(finishingVertexID, subtaskScope) // wait for a checkpoint to complete with a finished subtask(s) // but skip one checkpoint that might be started before finishing .waitForEvent(CheckpointCompletedEvent.class) .waitForEvent(CheckpointCompletedEvent.class); if (failover) { // If requested, fail the source operator. Failing non-source operator might not work // because it can be idle if all its sources were finished before. // However, if all source subtasks were finished, we need another source to fail. // Otherwise, (if finished single source subtask), rely on terminal property of // FINISH_SOURCES command for choosing a different subtask to fail. executor.triggerFailover( subtaskScope == ALL_SUBTASKS ? iterator.next() : finishingOperatorID); } executor.sendBroadcastCommand(FINISH_SOURCES, ALL_SUBTASKS) .waitForTermination() .assertFinishedSuccessfully(); checkOperatorsLifecycle(testJob, new DrainingValidator(), new FinishingValidator()); checkDataFlow(testJob, true); } private TestJobWithDescription buildJob() throws Exception { return graphBuilder.build( sharedObjects, cfg -> {}, env -> { RestartStrategyUtils.configureFixedDelayRestartStrategy(env, 1, 0L); // checkpoints can hang (because of not yet fixed bugs and triggering // checkpoint while the source finishes), so we reduce the timeout to // avoid hanging for too long. env.getCheckpointConfig().setCheckpointTimeout(30000); // but don't fail the job env.getCheckpointConfig() .setTolerableCheckpointFailureNumber(Integer.MAX_VALUE); // explicitly set to one to ease avoiding race conditions env.getCheckpointConfig().setMaxConcurrentCheckpoints(1); // with unaligned checkpoints state size can grow beyond the default // limits of in-memory storage CheckpointStorageUtils.configureFileSystemCheckpointStorage( env, TEMPORARY_FOLDER.newFolder().toURI()); }); } private JobVertexID findJobVertexID( TestJobWithDescription testJob, String finishingOperatorID) { return stream(testJob.jobGraph.getVertices().spliterator(), false) .filter( v -> v.getOperatorIDs().stream() .anyMatch(idPair -> matches(idPair, finishingOperatorID))) .findAny() .orElseThrow(() -> new RuntimeException("Vertex not found: " + finishingOperatorID)) .getID(); } private boolean matches(OperatorIDPair idPair, String operatorID) { return idPair.getUserDefinedOperatorID() .orElse(idPair.getGeneratedOperatorID()) .toString() .equals(operatorID); } @Parameterized.Parameters(name = "{0} {1}, failover: {2}, strategy: {3}") public static List<Object[]> parameters() { List<String> failoverStrategies = asList("full", "region"); List<List<Object>> rest = asList( asList(SIMPLE_GRAPH_BUILDER, SINGLE_SUBTASK, true), asList(COMPLEX_GRAPH_BUILDER, SINGLE_SUBTASK, true), asList(COMPLEX_GRAPH_BUILDER, ALL_SUBTASKS, true), asList(SIMPLE_GRAPH_BUILDER, SINGLE_SUBTASK, false), asList(COMPLEX_GRAPH_BUILDER, SINGLE_SUBTASK, false), asList(COMPLEX_GRAPH_BUILDER, ALL_SUBTASKS, false)); List<Object[]> result = new ArrayList<>(); for (String failoverStrategy : failoverStrategies) { for (List<Object> otherParams : rest) { List<Object> fullList = new ArrayList<>(otherParams); fullList.add(failoverStrategy); result.add(fullList.toArray()); } } return result; } }
PartiallyFinishedSourcesITCase
java
elastic__elasticsearch
test/framework/src/main/java/org/elasticsearch/datageneration/fields/leaf/DateFieldDataGenerator.java
{ "start": 932, "end": 2197 }
class ____ implements FieldDataGenerator { private final DataSource dataSource; private final Supplier<Instant> instants; private final Supplier<String> strings; public DateFieldDataGenerator(DataSource dataSource) { this.dataSource = dataSource; this.instants = () -> dataSource.get(new DataSourceRequest.InstantGenerator()).generator().get(); this.strings = dataSource.get(new DataSourceRequest.StringGenerator()).generator(); } @Override public Object generateValue(Map<String, Object> fieldMapping) { Supplier<Object> supplier = () -> instants.get().toEpochMilli(); if (fieldMapping != null && fieldMapping.get("format") != null) { String format = (String) fieldMapping.get("format"); supplier = () -> DateTimeFormatter.ofPattern(format, Locale.ROOT).withZone(ZoneId.from(ZoneOffset.UTC)).format(instants.get()); } if (fieldMapping != null && (Boolean) fieldMapping.getOrDefault("ignore_malformed", false)) { supplier = Wrappers.defaultsWithMalformed(supplier, strings::get, dataSource); } else { supplier = Wrappers.defaults(supplier, dataSource); } return supplier.get(); } }
DateFieldDataGenerator
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/frommap/MapToBeanNonStringMapAsMultiSourceMapper.java
{ "start": 803, "end": 1198 }
class ____ { private final String value; private final Map<String, String> map; public Target(String value, Map<String, String> map) { this.value = value; this.map = map; } public String getValue() { return value; } public Map<String, String> getMap() { return map; } } }
Target
java
spring-projects__spring-boot
cli/spring-boot-cli/src/intTest/java/org/springframework/boot/cli/infrastructure/Versions.java
{ "start": 903, "end": 1272 }
class ____ { private Versions() { } static String getBootVersion() { Properties gradleProperties = new Properties(); try (FileInputStream input = new FileInputStream("../../gradle.properties")) { gradleProperties.load(input); return gradleProperties.getProperty("version"); } catch (IOException ex) { throw new RuntimeException(ex); } } }
Versions
java
apache__flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/sinks/TableSinkBase.java
{ "start": 1110, "end": 1222 }
class ____ {@link TableSink}. * * @param <T> The return type of the {@link TableSinkBase}. * @deprecated This
for
java
spring-projects__spring-framework
spring-web/src/test/java/org/springframework/web/ErrorResponseTests.java
{ "start": 874, "end": 1666 }
class ____ { @Test void createWithHttpHeader() { ErrorResponse response = ErrorResponse .builder(new IllegalStateException(), HttpStatus.BAD_REQUEST, "test") .header("header", "value") .build(); assertThat(response.getHeaders().containsHeaderValue("header", "value")).isTrue(); } @Test void createWithHttpHeadersConsumer() { ErrorResponse response = ErrorResponse.builder(new IllegalStateException(), HttpStatus.BAD_REQUEST, "test") .header("header", "value") .headers(headers -> { headers.add("header", "value2"); headers.add("another", "value3"); }).build(); assertThat(response.getHeaders().get("header")).containsExactly("value", "value2"); assertThat(response.getHeaders().get("another")).containsExactly("value3"); } }
ErrorResponseTests
java
apache__flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/extraction/FunctionMappingExtractor.java
{ "start": 6302, "end": 15903 }
class ____ higher priority. */ static ResultExtraction createOutputFromGenericInMethod( int paramPos, int genericPos, boolean allowDataTypeHint) { return (extractor, method) -> { if (allowDataTypeHint) { Optional<FunctionResultTemplate> hints = extractHints(extractor, method); if (hints.isPresent()) { return hints.get(); } } final DataType dataType = DataTypeExtractor.extractFromGenericMethodParameter( extractor.typeFactory, extractor.getFunctionClass(), method, paramPos, genericPos); return FunctionResultTemplate.ofOutput(dataType); }; } // -------------------------------------------------------------------------------------------- // Verification strategies // -------------------------------------------------------------------------------------------- /** Verification that checks a method by parameters (arguments only) and return type. */ static MethodVerification createParameterAndReturnTypeVerification() { return (method, state, arguments, result) -> { checkNoState(state); checkScalarArgumentsOnly(arguments); final Class<?>[] parameters = assembleParameters(null, arguments); assert result != null; final Class<?> resultClass = result.toClass(); final Class<?> returnType = method.getReturnType(); // Parameters should be validated using strict autoboxing. // For return types, we can be more flexible as the UDF should know what it declared. final boolean isValid = isInvokable(Autoboxing.STRICT, method, parameters) && isAssignable(resultClass, returnType, Autoboxing.JVM); if (!isValid) { throw createMethodNotFoundError(method.getName(), parameters, resultClass, ""); } }; } /** Verification that checks a method by parameters (arguments only or with accumulator). */ static MethodVerification createParameterVerification(boolean requireAccumulator) { return (method, state, arguments, result) -> { if (requireAccumulator) { checkSingleState(state); } else { checkNoState(state); } checkScalarArgumentsOnly(arguments); final Class<?>[] parameters = assembleParameters(state, arguments); // Parameters should be validated using strict autoboxing. if (!isInvokable(Autoboxing.STRICT, method, parameters)) { throw createMethodNotFoundError( method.getName(), parameters, null, requireAccumulator ? "(<accumulator> [, <argument>]*)" : ""); } }; } /** * Verification that checks a method by parameters (arguments only) with mandatory {@link * CompletableFuture}. */ static MethodVerification createParameterAndCompletableFutureVerification( Class<?> baseClass, boolean verifyFutureContainsCollection) { return (method, state, arguments, result) -> { checkNoState(state); checkScalarArgumentsOnly(arguments); final Class<?>[] parameters = assembleParameters(null, arguments); final Class<?>[] parametersWithFuture = Stream.concat(Stream.of(CompletableFuture.class), Arrays.stream(parameters)) .toArray(Class<?>[]::new); assert result != null; final Class<?> resultClass = result.toClass(); Type genericType = method.getGenericParameterTypes()[0]; genericType = resolveVariableWithClassContext(baseClass, genericType); Optional<ParameterizedType> parameterized = getParameterizedType(genericType); if (!parameterized.isPresent()) { throw extractionError( "The method '%s' needs generic parameters for the CompletableFuture at position %d.", method.getName(), 0); } // If verifyFutureContainsCollection is given, it is assumed to be a generic parameters // of argumentClass, also at the position genericPos final Type returnType; if (verifyFutureContainsCollection) { Type nestedGenericType = parameterized.get().getActualTypeArguments()[0]; Optional<ParameterizedType> nestedParameterized = getParameterizedType(nestedGenericType); if (!nestedParameterized.isPresent() || !nestedParameterized.get().getRawType().equals(Collection.class)) { throw extractionError( "The method '%s' expects nested generic type CompletableFuture<Collection> for the %d arg.", method.getName(), 0); } returnType = nestedParameterized.get().getActualTypeArguments()[0]; } else { returnType = parameterized.get().getActualTypeArguments()[0]; } Class<?> returnTypeClass = getClassFromType(returnType); // Parameters should be validated using strict autoboxing. // For return types, we can be more flexible as the UDF should know what it declared. if (!(isInvokable(Autoboxing.STRICT, method, parametersWithFuture) && isAssignable(resultClass, returnTypeClass, Autoboxing.JVM))) { throw createMethodNotFoundError( method.getName(), parametersWithFuture, null, "(<completable future> [, <argument>]*)"); } }; } /** * Verification that checks a method by parameters (state and arguments) with optional context. */ static MethodVerification createParameterAndOptionalContextVerification( Class<?> context, boolean allowState) { return (method, state, arguments, result) -> { if (!allowState) { checkNoState(state); } final Class<?>[] parameters = assembleParameters(state, arguments); final Class<?>[] parametersWithContext = Stream.concat(Stream.of(context), Arrays.stream(parameters)) .toArray(Class<?>[]::new); // Parameters should be validated using strict autoboxing. if (!isInvokable(Autoboxing.STRICT, method, parameters) && !isInvokable(Autoboxing.STRICT, method, parametersWithContext)) { throw createMethodNotFoundError( method.getName(), parameters, null, allowState ? "(<context>? [, <state>]* [, <argument>]*)" : ""); } }; } // -------------------------------------------------------------------------------------------- // Methods from super class // -------------------------------------------------------------------------------------------- Class<? extends UserDefinedFunction> getFunction() { return function; } boolean supportsState() { return stateExtraction != null; } @Override protected Set<FunctionTemplate> extractGlobalFunctionTemplates() { return TemplateUtils.extractGlobalFunctionTemplates(typeFactory, function); } @Override protected Set<FunctionTemplate> extractLocalFunctionTemplates(Method method) { return TemplateUtils.extractLocalFunctionTemplates(typeFactory, method); } @Override protected List<Method> collectMethods(String methodName) { return ExtractionUtils.collectMethods(function, methodName); } @Override protected Class<?> getFunctionClass() { return function; } @Override protected String getHintType() { return "Function"; } // -------------------------------------------------------------------------------------------- // Helper methods // -------------------------------------------------------------------------------------------- /** Uses hints to extract functional template. */ private static Optional<FunctionResultTemplate> extractHints( BaseMappingExtractor extractor, Method method) { final Set<DataTypeHint> dataTypeHints = new HashSet<>(); dataTypeHints.addAll(collectAnnotationsOfMethod(DataTypeHint.class, method)); dataTypeHints.addAll( collectAnnotationsOfClass(DataTypeHint.class, extractor.getFunctionClass())); if (dataTypeHints.size() > 1) { throw extractionError( "More than one data type hint found for output of function. " + "Please use a function hint instead."); } if (dataTypeHints.size() == 1) { return Optional.ofNullable( FunctionTemplate.createOutputTemplate( extractor.typeFactory, dataTypeHints.iterator().next())); } // otherwise continue with regular extraction return Optional.empty(); } }
has
java
apache__camel
catalog/camel-route-parser/src/test/java/org/apache/camel/parser/java/MyRouteEmptyUriTest.java
{ "start": 1118, "end": 1535 }
class ____ extends CamelTestSupport { @Test void testFoo() { assertTrue(context.isStarted()); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { from("direct:foo") .to(""); // is empty on purpose } }; } }
MyRouteEmptyUriTest
java
apache__flink
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/aggregate/BatchApproxCountDistinctAggFunctions.java
{ "start": 6770, "end": 7502 }
class ____ extends ApproxCountDistinctAggFunction<Double> { public DoubleApproxCountDistinctAggFunction() { super(new DoubleType()); } @Override long getHashcode(Double value) { return hashLong(Double.doubleToLongBits(normalizeDouble(value)), DEFAULT_SEED); } private Double normalizeDouble(Double value) { if (value.isNaN()) { return Double.NaN; } else if (value == -0.0d) { return 0.0d; } else { return value; } } } /** Built-in float APPROX_COUNT_DISTINCT aggregate function. */ public static
DoubleApproxCountDistinctAggFunction
java
micronaut-projects__micronaut-core
core-processor/src/main/java/io/micronaut/inject/writer/BeanDefinitionWriter.java
{ "start": 254250, "end": 254873 }
class ____ { private BuildMethodLifecycleDefinition postConstruct; private BuildMethodLifecycleDefinition preDestroy; abstract ParameterElement[] getParameters(); void postConstruct(boolean intercepted) { if (postConstruct == null) { postConstruct = new BuildMethodLifecycleDefinition(intercepted); } } void preDestroy(boolean intercepted) { if (preDestroy == null) { preDestroy = new BuildMethodLifecycleDefinition(intercepted); } } } private static final
BuildMethodDefinition
java
apache__flink
flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/IntSerializer.java
{ "start": 2865, "end": 3078 }
class ____ extends SimpleTypeSerializerSnapshot<Integer> { @SuppressWarnings("WeakerAccess") public IntSerializerSnapshot() { super(() -> INSTANCE); } } }
IntSerializerSnapshot
java
grpc__grpc-java
xds/src/main/java/io/grpc/xds/ClusterManagerLoadBalancerProvider.java
{ "start": 4027, "end": 4825 }
class ____ { final Map<String, Object> childPolicies; ClusterManagerConfig(Map<String, Object> childPolicies) { this.childPolicies = Collections.unmodifiableMap(childPolicies); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof ClusterManagerConfig)) { return false; } ClusterManagerConfig config = (ClusterManagerConfig) o; return Objects.equals(childPolicies, config.childPolicies); } @Override public int hashCode() { return Objects.hash(childPolicies); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("childPolicies", childPolicies) .toString(); } } }
ClusterManagerConfig
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/query/UnknownKvStateLocation.java
{ "start": 1056, "end": 1396 }
class ____ extends Exception { private static final long serialVersionUID = 1L; public UnknownKvStateLocation(String registrationName) { super( "No KvStateLocation found for KvState instance with name '" + registrationName + "'."); } }
UnknownKvStateLocation
java
apache__camel
core/camel-api/src/main/java/org/apache/camel/spi/Breakpoint.java
{ "start": 1826, "end": 3677 }
enum ____ { Active, Suspended } /** * Gets the state of this break * * @return the state */ State getState(); /** * Suspend this breakpoint */ void suspend(); /** * Activates this breakpoint */ void activate(); /** * Callback invoked when the breakpoint was hit and the {@link Exchange} is about to be processed (before). * * @param exchange the {@link Exchange} * @param processor the {@link Processor} about to be processed * @param definition the {@link NamedNode} definition of the processor */ void beforeProcess(Exchange exchange, Processor processor, NamedNode definition); /** * Callback invoked when the breakpoint was hit and the {@link Exchange} has been processed (after). * * @param exchange the {@link Exchange} * @param processor the {@link Processor} which was processed * @param definition the {@link NamedNode} definition of the processor * @param timeTaken time in millis it took to process the {@link Exchange} - time spend in breakpoint callbacks may * affect this time */ void afterProcess(Exchange exchange, Processor processor, NamedNode definition, long timeTaken); /** * Callback invoked when the breakpoint was hit and any of the {@link Exchange} {@link EventObject event}s occurred. * * @param exchange the {@link Exchange} * @param event the event (instance of {@link ExchangeEvent} * @param definition the {@link NamedNode} definition of the last processor executed, may be <tt>null</tt> if not * possible to resolve from tracing * @see ExchangeEvent */ void onEvent(Exchange exchange, ExchangeEvent event, NamedNode definition); }
State
java
assertj__assertj-core
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/fields/RecursiveComparisonAssert_isNotIn_Test.java
{ "start": 1272, "end": 2601 }
class ____ extends WithComparingFieldsIntrospectionStrategyBaseTest { @Test void should_use_recursive_comparator() { // GIVEN Person actual = new Person("jack"); Person other = new Person("john"); // WHEN RecursiveComparisonAssert<?> recursiveComparisonAssert = assertThat(actual).usingRecursiveComparison(recursiveComparisonConfiguration) .isNotIn(other); // THEN Object comparator = FieldSupport.extraction().fieldValue("objects.comparisonStrategy.comparator", Object.class, recursiveComparisonAssert); then(comparator).isInstanceOf(RecursiveComparator.class); Object configuration = FieldSupport.extraction().fieldValue("recursiveComparisonConfiguration", Object.class, comparator); then(configuration).isSameAs(recursiveComparisonConfiguration); } @Test void should_succeed() { // GIVEN Person actual = new Person("jack"); Person other1 = new Person("john"); Person other2 = new Person("jim"); // WHEN/THEN then(actual).usingRecursiveComparison(recursiveComparisonConfiguration) .isNotIn(other1, other2) .isNotIn(list(other1, other2)); } }
RecursiveComparisonAssert_isNotIn_Test
java
google__guice
extensions/assistedinject/test/com/google/inject/assistedinject/FactoryProviderTest.java
{ "start": 15826, "end": 16213 }
class ____ implements Car { @AssistedInject public MultipleConstructorDefectiveCar() throws ExplosionException { throw new ExplosionException(); } @AssistedInject public MultipleConstructorDefectiveCar(@SuppressWarnings("unused") @Assisted Color c) throws FireException { throw new FireException(); } } public
MultipleConstructorDefectiveCar
java
apache__flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlDropPartitions.java
{ "start": 3048, "end": 3173 }
class ____ { public boolean ifExists; public List<SqlNodeList> partSpecs; } }
AlterTableDropPartitionsContext
java
quarkusio__quarkus
independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/ServerSerialisers.java
{ "start": 3854, "end": 21615 }
class ____ extends Serialisers { private static final Consumer<ResteasyReactiveRequestContext> HEADER_FUNCTION = new Consumer<ResteasyReactiveRequestContext>() { @Override public void accept(ResteasyReactiveRequestContext context) { ServerSerialisers.encodeResponseHeaders(context); } }; private static final String CONTENT = "Content"; private static final String CONTENT_LOWER = "content"; private static final String LOCATION = "Location"; private static final String TYPE = "Type"; private static final String TYPE_LOWER = "type"; private static final String LENGTH = "Length"; private static final String LENGTH_LOWER = "length"; private static final String CONTENT_TYPE = CONTENT + "-" + TYPE; // use this instead of the Vert.x constant because the TCK expects upper case public static final String TRANSFER_ENCODING = "Transfer-Encoding"; public final static List<Serialisers.BuiltinReader> BUILTIN_READERS = List.of( new Serialisers.BuiltinReader(String.class, ServerStringMessageBodyHandler.class, MediaType.WILDCARD), new Serialisers.BuiltinReader(Boolean.class, ServerBooleanMessageBodyHandler.class, MediaType.TEXT_PLAIN), new Serialisers.BuiltinReader(Character.class, ServerCharacterMessageBodyHandler.class, MediaType.TEXT_PLAIN), new Serialisers.BuiltinReader(Number.class, ServerNumberMessageBodyHandler.class, MediaType.TEXT_PLAIN), new Serialisers.BuiltinReader(InputStream.class, ServerInputStreamMessageBodyHandler.class, MediaType.WILDCARD), new Serialisers.BuiltinReader(Reader.class, ServerReaderBodyHandler.class, MediaType.WILDCARD), new Serialisers.BuiltinReader(File.class, ServerFileBodyHandler.class, MediaType.WILDCARD), new Serialisers.BuiltinReader(byte[].class, ServerByteArrayMessageBodyHandler.class, MediaType.WILDCARD), new Serialisers.BuiltinReader(Object.class, ServerDefaultTextPlainBodyHandler.class, MediaType.TEXT_PLAIN, RuntimeType.SERVER)); public final static List<Serialisers.BuiltinWriter> BUILTIN_WRITERS = List.of( new Serialisers.BuiltinWriter(String.class, ServerStringMessageBodyHandler.class, MediaType.TEXT_PLAIN), new Serialisers.BuiltinWriter(Number.class, ServerStringMessageBodyHandler.class, MediaType.TEXT_PLAIN), new Serialisers.BuiltinWriter(Boolean.class, ServerStringMessageBodyHandler.class, MediaType.TEXT_PLAIN), new Serialisers.BuiltinWriter(Character.class, ServerStringMessageBodyHandler.class, MediaType.TEXT_PLAIN), new Serialisers.BuiltinWriter(Object.class, ServerStringMessageBodyHandler.class, MediaType.WILDCARD), new Serialisers.BuiltinWriter(char[].class, ServerCharArrayMessageBodyHandler.class, MediaType.TEXT_PLAIN), new Serialisers.BuiltinWriter(byte[].class, ServerByteArrayMessageBodyHandler.class, MediaType.WILDCARD), new Serialisers.BuiltinWriter(MultivaluedMap.class, ServerFormUrlEncodedProvider.class, MediaType.APPLICATION_FORM_URLENCODED), new Serialisers.BuiltinWriter(InputStream.class, ServerInputStreamMessageBodyHandler.class, MediaType.WILDCARD), new Serialisers.BuiltinWriter(StreamingOutput.class, StreamingOutputMessageBodyWriter.class, MediaType.WILDCARD), new Serialisers.BuiltinWriter(Reader.class, ServerReaderBodyHandler.class, MediaType.WILDCARD), new Serialisers.BuiltinWriter(File.class, ServerFileBodyHandler.class, MediaType.WILDCARD), new Serialisers.BuiltinWriter(FilePart.class, ServerFilePartBodyHandler.class, MediaType.WILDCARD), new Serialisers.BuiltinWriter(MultipartFormDataOutput.class, MultipartMessageBodyWriter.class, MediaType.MULTIPART_FORM_DATA), new Serialisers.BuiltinWriter(java.nio.file.Path.class, ServerPathBodyHandler.class, MediaType.WILDCARD), new Serialisers.BuiltinWriter(PathPart.class, ServerPathPartBodyHandler.class, MediaType.WILDCARD)); public static final MessageBodyWriter<?>[] NO_WRITER = new MessageBodyWriter[0]; public static final MessageBodyReader<?>[] NO_READER = new MessageBodyReader[0]; private final ConcurrentMap<Class<?>, List<ResourceWriter>> noMediaTypeClassCache = new ConcurrentHashMap<>(); private final Function<Class<?>, List<ResourceWriter>> mappingFunction = new Function<Class<?>, List<ResourceWriter>>() { @Override public List<ResourceWriter> apply(Class<?> aClass) { Class<?> c = aClass; List<ResourceWriter> writers = new ArrayList<>(); Set<Class<?>> seenInterfaces = new HashSet<>(); while (c != null) { //TODO: the spec doesn't seem to be totally clear about the sorting here // the way the writers are sorted here takes the distance from the requested type // first and foremost and then uses the rest of the criteria List<ResourceWriter> forClass = getWriters().get(c); if (forClass != null) { forClass = new ArrayList<>(forClass); forClass.sort(new ResourceWriter.ResourceWriterComparator()); writers.addAll(forClass); } Deque<Class<?>> interfaces = new ArrayDeque<>(Arrays.asList(c.getInterfaces())); while (!interfaces.isEmpty()) { Class<?> iface = interfaces.poll(); if (seenInterfaces.contains(iface)) { continue; } seenInterfaces.add(iface); forClass = getWriters().get(iface); if (forClass != null) { forClass = new ArrayList<>(forClass); forClass.sort(new ResourceWriter.ResourceWriterComparator()); writers.addAll(forClass); } interfaces.addAll(Arrays.asList(iface.getInterfaces())); } c = c.getSuperclass(); } return writers; } }; public static boolean invokeWriter(ResteasyReactiveRequestContext context, Object entity, MessageBodyWriter writer, ServerSerialisers serialisers) throws IOException { return invokeWriter(context, entity, writer, serialisers, null); } public static boolean invokeWriter(ResteasyReactiveRequestContext context, Object entity, MessageBodyWriter writer, ServerSerialisers serialisers, MediaType mediaType) throws IOException { //note that GenericEntity is not a factor here. It should have already been unwrapped WriterInterceptor[] writerInterceptors = context.getWriterInterceptors(); boolean outputStreamSet = context.getOutputStream() != null; context.serverResponse().setPreCommitListener(HEADER_FUNCTION); RuntimeResource target = context.getTarget(); Type genericType; if (context.hasGenericReturnType()) { // make sure that when a Response with a GenericEntity was returned, we use it genericType = context.getGenericReturnType(); } else { genericType = target == null ? null : target.getReturnType(); } try { if (writer instanceof ServerMessageBodyWriter && writerInterceptors == null && !outputStreamSet) { ServerMessageBodyWriter<Object> quarkusRestWriter = (ServerMessageBodyWriter<Object>) writer; Class<?> entityClass = entity.getClass(); if (quarkusRestWriter.isWriteable( entityClass, genericType, target == null ? null : target.getLazyMethod(), context.getResponseMediaType())) { if (mediaType != null) { context.setResponseContentType(mediaType); } quarkusRestWriter.writeResponse(entity, genericType, context); return true; } else { return false; } } else { if (writer.isWriteable(entity.getClass(), context.getGenericReturnType(), context.getAllAnnotations(), context.getResponseMediaType())) { Response response = context.getResponse().get(); if (mediaType != null) { context.setResponseContentType(mediaType); } if (writerInterceptors == null) { writer.writeTo(entity, entity.getClass(), genericType, context.getAllAnnotations(), context.getResponseMediaType(), response.getHeaders(), context.getOrCreateOutputStream()); context.getOrCreateOutputStream().close(); } else { runWriterInterceptors(context, entity, writer, response, writerInterceptors, serialisers); } return true; } else { return false; } } } catch (Throwable e) { //clear the pre-commit listener, as if this error is unrecoverable //the error handling will want to write out its own response //and the pre commit listener will interfere with that context.serverResponse().setPreCommitListener(null); // also clear the stream in order to try to avoid writing out any data that // might have been put on the stream before the exception occurred context.setOutputStream(null); if (e instanceof RuntimeException) { throw new PreserveTargetException(e); } else if (e instanceof IOException) { throw new PreserveTargetException(e); } else { throw new PreserveTargetException(new RuntimeException(e)); } } } public static void runWriterInterceptors(ResteasyReactiveRequestContext context, Object entity, MessageBodyWriter writer, Response response, WriterInterceptor[] writerInterceptor, ServerSerialisers serialisers) throws IOException { WriterInterceptorContextImpl wc = new WriterInterceptorContextImpl(context, writerInterceptor, writer, context.getAllAnnotations(), entity.getClass(), context.getGenericReturnType(), entity, response.getMediaType(), response.getHeaders(), serialisers); wc.proceed(); } public MultivaluedMap<Class<?>, ResourceWriter> getWriters() { return writers; } public MultivaluedMap<Class<?>, ResourceReader> getReaders() { return readers; } /** * Find the best matching writer based on the 'Accept' HTTP header * This is probably more complex than it needs to be, but some RESTEasy tests show that the response type * is influenced by the provider's weight of the media types */ public BestMatchingServerWriterResult findBestMatchingServerWriter(ConfigurationImpl configuration, Class<?> entityType, ServerHttpRequest request) { // TODO: refactor to have use common code from findWriters Class<?> klass = entityType; Deque<Class<?>> toProcess = new LinkedList<>(); QuarkusMultivaluedMap<Class<?>, ResourceWriter> writers; if (configuration != null && !configuration.getResourceWriters().isEmpty()) { writers = new QuarkusMultivaluedHashMap<>(); writers.putAll(this.writers); writers.addAll(configuration.getResourceWriters()); } else { writers = this.writers; } BestMatchingServerWriterResult result = new BestMatchingServerWriterResult(); do { if (klass == Object.class) { //spec extension, look for interfaces as well //we match interfaces before Object Set<Class<?>> seen = new HashSet<>(toProcess); while (!toProcess.isEmpty()) { Class<?> iface = toProcess.poll(); List<ResourceWriter> matchingWritersByType = writers.get(iface); serverResourceWriterLookup(request, matchingWritersByType, result); for (Class<?> i : iface.getInterfaces()) { if (!seen.contains(i)) { seen.add(i); toProcess.add(i); } } } } List<ResourceWriter> matchingWritersByType = writers.get(klass); serverResourceWriterLookup(request, matchingWritersByType, result); toProcess.addAll(Arrays.asList(klass.getInterfaces())); klass = klass.getSuperclass(); } while (klass != null); return result; } private void serverResourceWriterLookup(ServerHttpRequest request, List<ResourceWriter> candidates, BestMatchingServerWriterResult result) { if (candidates == null) { return; } Map.Entry<MediaType, MediaType> selectedMediaTypes = null; List<ResourceWriter> selectedResourceWriters = null; for (ResourceWriter resourceWriter : candidates) { if (!resourceWriter.matchesRuntimeType(RuntimeType.SERVER)) { continue; } Map.Entry<MediaType, MediaType> current = resourceWriter.serverMediaType() .negotiateProduces(request.getRequestHeader(HttpHeaders.ACCEPT), null); if (current.getValue() == null) { continue; } if (selectedMediaTypes == null) { selectedMediaTypes = current; selectedResourceWriters = new ArrayList<>(1); selectedResourceWriters.add(resourceWriter); } else { int compare = MediaTypeHelper.Q_COMPARATOR.compare(current.getValue(), selectedMediaTypes.getValue()); if (compare == 0) { selectedResourceWriters.add(resourceWriter); } else if (compare < 0) { selectedMediaTypes = current; selectedResourceWriters = new ArrayList<>(1); selectedResourceWriters.add(resourceWriter); } } } if (selectedMediaTypes != null) { for (ResourceWriter selectedResourceWriter : selectedResourceWriters) { result.add(selectedResourceWriter.instance(), selectedMediaTypes.getKey()); } } } public NoMediaTypeResult findWriterNoMediaType(ResteasyReactiveRequestContext requestContext, Object entity, ServerSerialisers serialisers, RuntimeType runtimeType) { List<ResourceWriter> resultForClass = noMediaTypeClassCache.computeIfAbsent(entity.getClass(), mappingFunction); List<ResourceWriter> constrainedResultsForClass = new ArrayList<>(resultForClass.size()); for (ResourceWriter writer : resultForClass) { if (!writer.matchesRuntimeType(runtimeType)) { continue; } constrainedResultsForClass.add(writer); } MediaType selected = null; for (ResourceWriter writer : constrainedResultsForClass) { selected = writer.serverMediaType() .negotiateProduces(requestContext.serverRequest().getRequestHeader(HttpHeaders.ACCEPT)).getKey(); if (selected != null) { break; } } if (selected == null) { Set<MediaType> acceptable = new HashSet<>(); for (ResourceWriter i : constrainedResultsForClass) { acceptable.addAll(i.mediaTypes()); } throw new WebApplicationException(Response .notAcceptable(Variant .mediaTypes( acceptable.toArray(new MediaType[0])) .build()) .build()); } if (selected.isWildcardType() || (selected.getType().equals("application") && selected.isWildcardSubtype())) { selected = MediaType.APPLICATION_OCTET_STREAM_TYPE; } List<MessageBodyWriter<?>> finalResult = new ArrayList<>(constrainedResultsForClass.size()); for (ResourceWriter i : constrainedResultsForClass) { // this part seems to be needed in order to pass com.sun.ts.tests.jaxrs.ee.resource.java2entity.JAXRSClient if (i.mediaTypes().isEmpty()) { finalResult.add(i.instance()); } else { for (MediaType mt : i.mediaTypes()) { if (mt.isCompatible(selected)) { finalResult.add(i.instance()); break; } } } } return new NoMediaTypeResult(finalResult.toArray(NO_WRITER), selected, serialisers); } public static
ServerSerialisers
java
spring-projects__spring-framework
spring-webmvc/src/test/java/org/springframework/web/servlet/tags/form/FormTagTests.java
{ "start": 1317, "end": 12460 }
class ____ extends AbstractHtmlElementTagTests { private static final String REQUEST_URI = "/my/form"; private static final String QUERY_STRING = "foo=bar"; private FormTag tag; private MockHttpServletRequest request; @Override @SuppressWarnings("serial") protected void onSetUp() { this.tag = new FormTag() { @Override protected TagWriter createTagWriter() { return new TagWriter(getWriter()); } }; this.tag.setPageContext(getPageContext()); } @Override protected void extendRequest(MockHttpServletRequest request) { request.setRequestURI(REQUEST_URI); request.setQueryString(QUERY_STRING); this.request = request; } @Test void writeForm() throws Exception { String commandName = "myCommand"; String name = "formName"; String action = "/form.html"; String method = "POST"; String target = "myTarget"; String enctype = "my/enctype"; String acceptCharset = "iso-8859-1"; String onsubmit = "onsubmit"; String onreset = "onreset"; String autocomplete = "off"; String cssClass = "myClass"; String cssStyle = "myStyle"; String dynamicAttribute1 = "attr1"; String dynamicAttribute2 = "attr2"; this.tag.setName(name); this.tag.setCssClass(cssClass); this.tag.setCssStyle(cssStyle); this.tag.setModelAttribute(commandName); this.tag.setAction(action); this.tag.setMethod(method); this.tag.setTarget(target); this.tag.setEnctype(enctype); this.tag.setAcceptCharset(acceptCharset); this.tag.setOnsubmit(onsubmit); this.tag.setOnreset(onreset); this.tag.setAutocomplete(autocomplete); this.tag.setDynamicAttribute(null, dynamicAttribute1, dynamicAttribute1); this.tag.setDynamicAttribute(null, dynamicAttribute2, dynamicAttribute2); int result = this.tag.doStartTag(); assertThat(result).isEqualTo(Tag.EVAL_BODY_INCLUDE); assertThat(getPageContext().getRequest().getAttribute(FormTag.MODEL_ATTRIBUTE_VARIABLE_NAME)).as("Form attribute not exposed").isEqualTo(commandName); result = this.tag.doEndTag(); assertThat(result).isEqualTo(Tag.EVAL_PAGE); this.tag.doFinally(); assertThat(getPageContext().getRequest().getAttribute(FormTag.MODEL_ATTRIBUTE_VARIABLE_NAME)).as("Form attribute not cleared after tag ends").isNull(); String output = getOutput(); assertFormTagOpened(output); assertFormTagClosed(output); assertContainsAttribute(output, "class", cssClass); assertContainsAttribute(output, "style", cssStyle); assertContainsAttribute(output, "action", action); assertContainsAttribute(output, "method", method); assertContainsAttribute(output, "target", target); assertContainsAttribute(output, "enctype", enctype); assertContainsAttribute(output, "accept-charset", acceptCharset); assertContainsAttribute(output, "onsubmit", onsubmit); assertContainsAttribute(output, "onreset", onreset); assertContainsAttribute(output, "autocomplete", autocomplete); assertContainsAttribute(output, "id", commandName); assertContainsAttribute(output, "name", name); assertContainsAttribute(output, dynamicAttribute1, dynamicAttribute1); assertContainsAttribute(output, dynamicAttribute2, dynamicAttribute2); } @Test void withActionFromRequest() throws Exception { String commandName = "myCommand"; String enctype = "my/enctype"; String method = "POST"; String onsubmit = "onsubmit"; String onreset = "onreset"; this.tag.setModelAttribute(commandName); this.tag.setMethod(method); this.tag.setEnctype(enctype); this.tag.setOnsubmit(onsubmit); this.tag.setOnreset(onreset); int result = this.tag.doStartTag(); assertThat(result).isEqualTo(Tag.EVAL_BODY_INCLUDE); assertThat(getPageContext().getAttribute(FormTag.MODEL_ATTRIBUTE_VARIABLE_NAME, PageContext.REQUEST_SCOPE)).as("Form attribute not exposed").isEqualTo(commandName); result = this.tag.doEndTag(); assertThat(result).isEqualTo(Tag.EVAL_PAGE); this.tag.doFinally(); assertThat(getPageContext().getAttribute(FormTag.MODEL_ATTRIBUTE_VARIABLE_NAME, PageContext.REQUEST_SCOPE)).as("Form attribute not cleared after tag ends").isNull(); String output = getOutput(); assertFormTagOpened(output); assertFormTagClosed(output); assertContainsAttribute(output, "action", REQUEST_URI + "?" + QUERY_STRING); assertContainsAttribute(output, "method", method); assertContainsAttribute(output, "enctype", enctype); assertContainsAttribute(output, "onsubmit", onsubmit); assertContainsAttribute(output, "onreset", onreset); assertAttributeNotPresent(output, "name"); } @Test void prependServletPath() throws Exception { this.request.setContextPath("/myApp"); this.request.setServletPath("/main"); this.request.setPathInfo("/index.html"); String commandName = "myCommand"; String action = "/form.html"; String enctype = "my/enctype"; String method = "POST"; String onsubmit = "onsubmit"; String onreset = "onreset"; this.tag.setModelAttribute(commandName); this.tag.setServletRelativeAction(action); this.tag.setMethod(method); this.tag.setEnctype(enctype); this.tag.setOnsubmit(onsubmit); this.tag.setOnreset(onreset); int result = this.tag.doStartTag(); assertThat(result).isEqualTo(Tag.EVAL_BODY_INCLUDE); assertThat(getPageContext().getAttribute(FormTag.MODEL_ATTRIBUTE_VARIABLE_NAME, PageContext.REQUEST_SCOPE)).as("Form attribute not exposed").isEqualTo(commandName); result = this.tag.doEndTag(); assertThat(result).isEqualTo(Tag.EVAL_PAGE); this.tag.doFinally(); assertThat(getPageContext().getAttribute(FormTag.MODEL_ATTRIBUTE_VARIABLE_NAME, PageContext.REQUEST_SCOPE)).as("Form attribute not cleared after tag ends").isNull(); String output = getOutput(); assertFormTagOpened(output); assertFormTagClosed(output); assertContainsAttribute(output, "action", "/myApp/main/form.html"); assertContainsAttribute(output, "method", method); assertContainsAttribute(output, "enctype", enctype); assertContainsAttribute(output, "onsubmit", onsubmit); assertContainsAttribute(output, "onreset", onreset); assertAttributeNotPresent(output, "name"); } @Test void withNullResolvedCommand() { tag.setModelAttribute(null); assertThatIllegalArgumentException().isThrownBy( tag::doStartTag); } @Test // SPR-2645 void xssExploitWhenActionIsResolvedFromQueryString() throws Exception { String xssQueryString = QUERY_STRING + "&stuff=\"><script>alert('XSS!')</script>"; request.setQueryString(xssQueryString); tag.doStartTag(); assertThat(getOutput()).isEqualTo(("<form id=\"command\" action=\"/my/form?foo=bar&amp;stuff=&quot;&gt;&lt;" + "script&gt;alert(&#39;XSS!&#39;)&lt;/script&gt;\" method=\"post\">")); } @Test void get() throws Exception { this.tag.setMethod("get"); this.tag.doStartTag(); this.tag.doEndTag(); this.tag.doFinally(); String output = getOutput(); String formOutput = getFormTag(output); String inputOutput = getInputTag(output); assertContainsAttribute(formOutput, "method", "get"); assertThat(inputOutput).isEmpty(); } @Test void post() throws Exception { this.tag.setMethod("post"); this.tag.doStartTag(); this.tag.doEndTag(); this.tag.doFinally(); String output = getOutput(); String formOutput = getFormTag(output); String inputOutput = getInputTag(output); assertContainsAttribute(formOutput, "method", "post"); assertThat(inputOutput).isEmpty(); } @Test void put() throws Exception { this.tag.setMethod("put"); this.tag.doStartTag(); this.tag.doEndTag(); this.tag.doFinally(); String output = getOutput(); String formOutput = getFormTag(output); String inputOutput = getInputTag(output); assertContainsAttribute(formOutput, "method", "post"); assertContainsAttribute(inputOutput, "name", "_method"); assertContainsAttribute(inputOutput, "value", "put"); assertContainsAttribute(inputOutput, "type", "hidden"); } @Test void delete() throws Exception { this.tag.setMethod("delete"); this.tag.doStartTag(); this.tag.doEndTag(); this.tag.doFinally(); String output = getOutput(); String formOutput = getFormTag(output); String inputOutput = getInputTag(output); assertContainsAttribute(formOutput, "method", "post"); assertContainsAttribute(inputOutput, "name", "_method"); assertContainsAttribute(inputOutput, "value", "delete"); assertContainsAttribute(inputOutput, "type", "hidden"); } @Test void customMethodParameter() throws Exception { this.tag.setMethod("put"); this.tag.setMethodParam("methodParameter"); this.tag.doStartTag(); this.tag.doEndTag(); this.tag.doFinally(); String output = getOutput(); String formOutput = getFormTag(output); String inputOutput = getInputTag(output); assertContainsAttribute(formOutput, "method", "post"); assertContainsAttribute(inputOutput, "name", "methodParameter"); assertContainsAttribute(inputOutput, "value", "put"); assertContainsAttribute(inputOutput, "type", "hidden"); } @Test void clearAttributesOnFinally() throws Exception { this.tag.setModelAttribute("model"); getPageContext().setAttribute("model", "foo bar"); assertThat(getPageContext().getAttribute(FormTag.MODEL_ATTRIBUTE_VARIABLE_NAME, PageContext.REQUEST_SCOPE)).isNull(); this.tag.doStartTag(); assertThat(getPageContext().getAttribute(FormTag.MODEL_ATTRIBUTE_VARIABLE_NAME, PageContext.REQUEST_SCOPE)).isNotNull(); this.tag.doFinally(); assertThat(getPageContext().getAttribute(FormTag.MODEL_ATTRIBUTE_VARIABLE_NAME, PageContext.REQUEST_SCOPE)).isNull(); } @Test void requestDataValueProcessorHooks() throws Exception { String action = "/my/form?foo=bar"; RequestDataValueProcessor processor = getMockRequestDataValueProcessor(); given(processor.processAction(this.request, action, "post")).willReturn(action); given(processor.getExtraHiddenFields(this.request)).willReturn(Collections.singletonMap("key", "value")); this.tag.doStartTag(); this.tag.doEndTag(); this.tag.doFinally(); String output = getOutput(); assertThat(getInputTag(output)).isEqualTo("<div>\n<input type=\"hidden\" name=\"key\" value=\"value\" />\n</div>"); assertFormTagOpened(output); assertFormTagClosed(output); } @Test void defaultActionEncoded() throws Exception { this.request.setRequestURI("/a b c"); request.setQueryString(""); this.tag.doStartTag(); this.tag.doEndTag(); this.tag.doFinally(); String output = getOutput(); String formOutput = getFormTag(output); assertContainsAttribute(formOutput, "action", "/a%20b%20c"); } private String getFormTag(String output) { int inputStart = output.indexOf("<", 1); int inputEnd = output.lastIndexOf(">", output.length() - 2); return output.substring(0, inputStart) + output.substring(inputEnd + 1); } private String getInputTag(String output) { int inputStart = output.indexOf("<", 1); int inputEnd = output.lastIndexOf(">", output.length() - 2); return output.substring(inputStart, inputEnd + 1); } private static void assertFormTagOpened(String output) { assertThat(output).startsWith("<form "); } private static void assertFormTagClosed(String output) { assertThat(output).endsWith("</form>"); } }
FormTagTests
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/type/NullType.java
{ "start": 400, "end": 864 }
class ____ extends JavaObjectType { /** * Singleton access */ public static final NullType INSTANCE = new NullType(); public NullType() { super( ObjectNullResolvingJdbcType.INSTANCE, ObjectJavaType.INSTANCE ); } public NullType(JdbcType jdbcType, JavaType<Object> javaType) { super( jdbcType, javaType ); } @Override public String getName() { return "null"; } @Override protected boolean registerUnderJavaType() { return false; } }
NullType
java
reactor__reactor-core
reactor-core/src/main/java/reactor/core/publisher/ContextPropagationSupport.java
{ "start": 946, "end": 4177 }
class ____ { static final Logger LOGGER = Loggers.getLogger(ContextPropagationSupport.class); // Note: If reflection is used for this field, then the name of the field should end with 'Available'. // The preprocessing for native-image support is in Spring Framework, and is a short term solution. // The field should end with 'Available'. See org.springframework.aot.nativex.feature.PreComputeFieldFeature. // Ultimately the long term solution should be provided by Reactor Core. static final boolean isContextPropagationOnClasspath; static final boolean isContextPropagation103OnClasspath; static final boolean isContextPropagation101OnClasspath; static boolean propagateContextToThreadLocals = false; static { boolean contextPropagation = false; boolean contextPropagation103 = false; boolean contextPropagation101 = false; try { Class.forName("io.micrometer.context.ContextRegistry"); contextPropagation = true; Class.forName("io.micrometer.context.ThreadLocalAccessor").getDeclaredMethod("restore", Object.class); contextPropagation101 = true; Class.forName("io.micrometer.context.ContextSnapshotFactory"); contextPropagation103 = true; } catch (ClassNotFoundException notFound) { } catch (NoSuchMethodException notFound) { } catch (LinkageError linkageErr) { } catch (Throwable err) { LOGGER.error("Unexpected exception while detecting ContextPropagation feature." + " The feature is considered disabled due to this:", err); } isContextPropagationOnClasspath = contextPropagation; isContextPropagation101OnClasspath = contextPropagation101; isContextPropagation103OnClasspath = contextPropagation103; if (isContextPropagationOnClasspath && !isContextPropagation103OnClasspath) { LOGGER.warn("context-propagation version below 1.0.3 can cause memory leaks" + " when working with scope-based ThreadLocalAccessors, please " + "upgrade!"); } } /** * Is Micrometer {@code context-propagation} API on the classpath? * * @return true if context-propagation is available at runtime, false otherwise */ static boolean isContextPropagationAvailable() { return isContextPropagationOnClasspath; } static boolean isContextPropagation101Available() { return isContextPropagation101OnClasspath; } static boolean isContextPropagation103Available() { return isContextPropagation103OnClasspath; } static boolean shouldPropagateContextToThreadLocals() { return isContextPropagationOnClasspath && propagateContextToThreadLocals; } static boolean shouldWrapPublisher(Publisher<?> publisher) { return shouldPropagateContextToThreadLocals() && !Scannable.from(publisher).scanOrDefault(InternalProducerAttr.INSTANCE, false); } static boolean shouldRestoreThreadLocalsInSomeOperators() { return isContextPropagationOnClasspath && !propagateContextToThreadLocals; } }
ContextPropagationSupport
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/webapp/AbstractRESTRequestInterceptor.java
{ "start": 1141, "end": 1277 }
class ____ provides common functionality which * can be used and/or extended by other concrete interceptor classes. */ public abstract
and
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/orphan/onetomany/EagerOneToManyOrphanWithIdentityIdTest.java
{ "start": 3406, "end": 3765 }
class ____ { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) Long id; String name; @ManyToOne Parent parent; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Parent getParent() { return parent; } public void setParent(Parent parent) { this.parent = parent; } } }
Child
java
apache__flink
flink-core/src/main/java/org/apache/flink/api/common/typeinfo/PrimitiveArrayTypeInfo.java
{ "start": 9806, "end": 9965 }
class ____ no array."); } // basic type arrays return (PrimitiveArrayTypeInfo<X>) TYPES.get(type); } /** Static map from array
is
java
apache__hadoop
hadoop-cloud-storage-project/hadoop-huaweicloud/src/test/java/org/apache/hadoop/fs/obs/TestOBSContractMkdir.java
{ "start": 1064, "end": 1254 }
class ____ extends AbstractContractMkdirTest { @Override protected AbstractFSContract createContract(final Configuration conf) { return new OBSContract(conf); } }
TestOBSContractMkdir
java
quarkusio__quarkus
extensions/resteasy-classic/resteasy-client/deployment/src/test/java/io/quarkus/restclient/ft/AsyncRestClientFallbackTest.java
{ "start": 958, "end": 1568 }
class ____ { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(AsyncRestClientFallbackTest.class, Client.class, MyFallback.class)); @TestHTTPResource URL url; @Test public void testFallbackWasUsed() throws ExecutionException, InterruptedException { Client client = RestClientBuilder.newBuilder().baseUrl(url).build(Client.class); assertEquals("pong", client.ping().toCompletableFuture().get()); } @RegisterRestClient public
AsyncRestClientFallbackTest
java
spring-projects__spring-boot
module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/startup/StartupEndpointTests.java
{ "start": 1744, "end": 4288 }
class ____ { @Test void startupEventsAreFound() { BufferingApplicationStartup applicationStartup = new BufferingApplicationStartup(256); testStartupEndpoint(applicationStartup, (startupEndpoint) -> { StartupDescriptor startup = startupEndpoint.startup(); assertThat(startup.getSpringBootVersion()).isEqualTo(SpringBootVersion.getVersion()); assertThat(startup.getTimeline().getStartTime()) .isEqualTo(applicationStartup.getBufferedTimeline().getStartTime()); }); } @Test void bufferWithGetIsNotDrained() { BufferingApplicationStartup applicationStartup = new BufferingApplicationStartup(256); testStartupEndpoint(applicationStartup, (startupEndpoint) -> { StartupDescriptor startup = startupEndpoint.startupSnapshot(); assertThat(startup.getTimeline().getEvents()).isNotEmpty(); assertThat(applicationStartup.getBufferedTimeline().getEvents()).isNotEmpty(); }); } @Test void bufferWithPostIsDrained() { BufferingApplicationStartup applicationStartup = new BufferingApplicationStartup(256); testStartupEndpoint(applicationStartup, (startupEndpoint) -> { StartupDescriptor startup = startupEndpoint.startup(); assertThat(startup.getTimeline().getEvents()).isNotEmpty(); assertThat(applicationStartup.getBufferedTimeline().getEvents()).isEmpty(); }); } @Test void shouldRegisterHints() { RuntimeHints runtimeHints = new RuntimeHints(); new StartupEndpointRuntimeHints().registerHints(runtimeHints, getClass().getClassLoader()); Set<TypeReference> bindingTypes = Set.of( TypeReference.of("org.springframework.boot.context.metrics.buffering.BufferedStartupStep$DefaultTag"), TypeReference.of("org.springframework.core.metrics.jfr.FlightRecorderStartupStep$FlightRecorderTag")); for (TypeReference bindingType : bindingTypes) { assertThat(RuntimeHintsPredicates.reflection() .onType(bindingType) .withMemberCategories(MemberCategory.INVOKE_PUBLIC_METHODS)).accepts(runtimeHints); } } private void testStartupEndpoint(ApplicationStartup applicationStartup, Consumer<StartupEndpoint> startupEndpoint) { ApplicationContextRunner contextRunner = new ApplicationContextRunner() .withInitializer((context) -> context.setApplicationStartup(applicationStartup)) .withUserConfiguration(EndpointConfiguration.class); contextRunner.run((context) -> { assertThat(context).hasSingleBean(StartupEndpoint.class); startupEndpoint.accept(context.getBean(StartupEndpoint.class)); }); } @Configuration(proxyBeanMethods = false) static
StartupEndpointTests
java
netty__netty
buffer/src/main/java/io/netty/buffer/ReadOnlyAbstractByteBuf.java
{ "start": 790, "end": 2021 }
class ____ extends ReadOnlyByteBuf { ReadOnlyAbstractByteBuf(AbstractByteBuf buffer) { super(buffer); } @Override public AbstractByteBuf unwrap() { return (AbstractByteBuf) super.unwrap(); } @Override protected byte _getByte(int index) { return unwrap()._getByte(index); } @Override protected short _getShort(int index) { return unwrap()._getShort(index); } @Override protected short _getShortLE(int index) { return unwrap()._getShortLE(index); } @Override protected int _getUnsignedMedium(int index) { return unwrap()._getUnsignedMedium(index); } @Override protected int _getUnsignedMediumLE(int index) { return unwrap()._getUnsignedMediumLE(index); } @Override protected int _getInt(int index) { return unwrap()._getInt(index); } @Override protected int _getIntLE(int index) { return unwrap()._getIntLE(index); } @Override protected long _getLong(int index) { return unwrap()._getLong(index); } @Override protected long _getLongLE(int index) { return unwrap()._getLongLE(index); } }
ReadOnlyAbstractByteBuf
java
elastic__elasticsearch
x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlBaseParser.java
{ "start": 34586, "end": 36712 }
class ____ extends ParserRuleContext { public TerminalNode LEFT_PARENTHESIS() { return getToken(KqlBaseParser.LEFT_PARENTHESIS, 0); } public QueryContext query() { return getRuleContext(QueryContext.class,0); } public TerminalNode RIGHT_PARENTHESIS() { return getToken(KqlBaseParser.RIGHT_PARENTHESIS, 0); } public ParenthesizedQueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_parenthesizedQuery; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).enterParenthesizedQuery(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof KqlBaseListener ) ((KqlBaseListener)listener).exitParenthesizedQuery(this); } @Override public <T> T accept(ParseTreeVisitor<? extends T> visitor) { if ( visitor instanceof KqlBaseVisitor ) return ((KqlBaseVisitor<? extends T>)visitor).visitParenthesizedQuery(this); else return visitor.visitChildren(this); } } public final ParenthesizedQueryContext parenthesizedQuery() throws RecognitionException { ParenthesizedQueryContext _localctx = new ParenthesizedQueryContext(_ctx, getState()); enterRule(_localctx, 18, RULE_parenthesizedQuery); try { enterOuterAlt(_localctx, 1); { setState(105); match(LEFT_PARENTHESIS); setState(106); query(0); setState(107); match(RIGHT_PARENTHESIS); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } @SuppressWarnings("CheckReturnValue") public static
ParenthesizedQueryContext
java
quarkusio__quarkus
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/simple/HeaderParamResource.java
{ "start": 175, "end": 458 }
class ____ { private final String headerParamValue; public HeaderParamResource(@HeaderParam("h1") String headerParamValue) { this.headerParamValue = headerParamValue; } @GET public String get() { return headerParamValue; } }
HeaderParamResource
java
apache__camel
core/camel-core-model/src/main/java/org/apache/camel/model/EnrichDefinition.java
{ "start": 1464, "end": 15600 }
class ____ extends ExpressionNode implements AggregationStrategyAwareDefinition<EnrichDefinition> { @XmlTransient private AggregationStrategy aggregationStrategyBean; @XmlAttribute private String variableSend; @XmlAttribute private String variableReceive; @XmlAttribute @Metadata(javaType = "org.apache.camel.AggregationStrategy") private String aggregationStrategy; @XmlAttribute @Metadata(label = "advanced") private String aggregationStrategyMethodName; @XmlAttribute @Metadata(label = "advanced") private String aggregationStrategyMethodAllowNull; @XmlAttribute @Metadata(label = "advanced", javaType = "java.lang.Boolean") private String aggregateOnException; @XmlAttribute @Metadata(label = "advanced", javaType = "java.lang.Boolean") private String shareUnitOfWork; @XmlAttribute @Metadata(label = "advanced", javaType = "java.lang.Integer") private String cacheSize; @XmlAttribute @Metadata(label = "advanced", javaType = "java.lang.Boolean") private String ignoreInvalidEndpoint; @XmlAttribute @Metadata(label = "advanced", defaultValue = "true", javaType = "java.lang.Boolean") private String allowOptimisedComponents; @XmlAttribute @Metadata(label = "advanced", defaultValue = "true", javaType = "java.lang.Boolean") private String autoStartComponents; public EnrichDefinition() { this((AggregationStrategy) null); } public EnrichDefinition(AggregationStrategy aggregationStrategy) { this.aggregationStrategyBean = aggregationStrategy; } protected EnrichDefinition(EnrichDefinition source) { super(source); this.aggregationStrategyBean = source.aggregationStrategyBean; this.variableSend = source.variableSend; this.variableReceive = source.variableReceive; this.aggregationStrategy = source.aggregationStrategy; this.aggregationStrategyMethodName = source.aggregationStrategyMethodName; this.aggregationStrategyMethodAllowNull = source.aggregationStrategyMethodAllowNull; this.aggregateOnException = source.aggregateOnException; this.shareUnitOfWork = source.shareUnitOfWork; this.cacheSize = source.cacheSize; this.ignoreInvalidEndpoint = source.ignoreInvalidEndpoint; this.allowOptimisedComponents = source.allowOptimisedComponents; this.autoStartComponents = source.autoStartComponents; } @Override public String toString() { return "Enrich[" + getExpression() + "]"; } @Override public String getShortName() { return "enrich"; } @Override public String getLabel() { return "enrich[" + getExpression() + "]"; } // Fluent API // ------------------------------------------------------------------------- /** * To use a variable as the source for the message body to send. This makes it handy to use variables for user data * and to easily control what data to use for sending and receiving. * * Important: When using send variable then the message body is taken from this variable instead of the current * message, however the headers from the message will still be used as well. In other words, the variable is used * instead of the message body, but everything else is as usual. */ public EnrichDefinition variableReceive(String variableReceive) { setVariableReceive(variableReceive); return this; } /** * To use a variable to store the received message body (only body, not headers). This makes it handy to use * variables for user data and to easily control what data to use for sending and receiving. * * Important: When using receive variable then the received body is stored only in this variable and not on the * current message. */ public EnrichDefinition variableSend(String variableSend) { setVariableSend(variableSend); return this; } /** * Sets the AggregationStrategy to be used to merge the reply from the external service, into a single outgoing * message. By default Camel will use the reply from the external service as outgoing message. */ @Override public EnrichDefinition aggregationStrategy(AggregationStrategy aggregationStrategy) { setAggregationStrategy(aggregationStrategy); return this; } /** * Refers to an AggregationStrategy to be used to merge the reply from the external service, into a single outgoing * message. By default Camel will use the reply from the external service as outgoing message. */ @Override public EnrichDefinition aggregationStrategy(String aggregationStrategy) { setAggregationStrategy(aggregationStrategy); return this; } /** * This option can be used to explicit declare the method name to use, when using POJOs as the AggregationStrategy. */ public EnrichDefinition aggregationStrategyMethodName(String aggregationStrategyMethodName) { setAggregationStrategyMethodName(aggregationStrategyMethodName); return this; } /** * If this option is false then the aggregate method is not used if there was no data to enrich. If this option is * true then null values is used as the oldExchange (when no data to enrich), when using POJOs as the * AggregationStrategy. */ public EnrichDefinition aggregationStrategyMethodAllowNull(boolean aggregationStrategyMethodAllowNull) { setAggregationStrategyMethodAllowNull(Boolean.toString(aggregationStrategyMethodAllowNull)); return this; } /** * If this option is false then the aggregate method is not used if there was an exception thrown while trying to * retrieve the data to enrich from the resource. Setting this option to true allows end users to control what to do * if there was an exception in the aggregate method. For example to suppress the exception or set a custom message * body etc. */ public EnrichDefinition aggregateOnException(boolean aggregateOnException) { setAggregateOnException(Boolean.toString(aggregateOnException)); return this; } /** * Shares the {@link org.apache.camel.spi.UnitOfWork} with the parent and the resource exchange. Enrich will by * default not share unit of work between the parent exchange and the resource exchange. This means the resource * exchange has its own individual unit of work. */ public EnrichDefinition shareUnitOfWork() { setShareUnitOfWork(Boolean.toString(true)); return this; } /** * Sets the maximum size used by the {@link org.apache.camel.spi.ProducerCache} which is used to cache and reuse * producer when uris are reused. * * Beware that when using dynamic endpoints then it affects how well the cache can be utilized. If each dynamic * endpoint is unique then its best to turn off caching by setting this to -1, which allows Camel to not cache both * the producers and endpoints; they are regarded as prototype scoped and will be stopped and discarded after use. * This reduces memory usage as otherwise producers/endpoints are stored in memory in the caches. * * However if there are a high degree of dynamic endpoints that have been used before, then it can benefit to use * the cache to reuse both producers and endpoints and therefore the cache size can be set accordingly or rely on * the default size (1000). * * If there is a mix of unique and used before dynamic endpoints, then setting a reasonable cache size can help * reduce memory usage to avoid storing too many non frequent used producers. * * @param cacheSize the cache size, use <tt>0</tt> for default cache size, or <tt>-1</tt> to turn cache off. * @return the builder */ public EnrichDefinition cacheSize(int cacheSize) { setCacheSize(Integer.toString(cacheSize)); return this; } /** * Sets the maximum size used by the {@link org.apache.camel.spi.ProducerCache} which is used to cache and reuse * producer when uris are reused. * * Beware that when using dynamic endpoints then it affects how well the cache can be utilized. If each dynamic * endpoint is unique then its best to turn off caching by setting this to -1, which allows Camel to not cache both * the producers and endpoints; they are regarded as prototype scoped and will be stopped and discarded after use. * This reduces memory usage as otherwise producers/endpoints are stored in memory in the caches. * * However if there are a high degree of dynamic endpoints that have been used before, then it can benefit to use * the cache to reuse both producers and endpoints and therefore the cache size can be set accordingly or rely on * the default size (1000). * * If there is a mix of unique and used before dynamic endpoints, then setting a reasonable cache size can help * reduce memory usage to avoid storing too many non frequent used producers. * * @param cacheSize the cache size, use <tt>0</tt> for default cache size, or <tt>-1</tt> to turn cache off. * @return the builder */ public EnrichDefinition cacheSize(String cacheSize) { setCacheSize(cacheSize); return this; } /** * Ignore the invalidate endpoint exception when try to create a producer with that endpoint * * @return the builder */ public EnrichDefinition ignoreInvalidEndpoint() { setIgnoreInvalidEndpoint(Boolean.toString(true)); return this; } /** * Whether to allow components to optimise enricher if they are {@link org.apache.camel.spi.SendDynamicAware} * * @return the builder */ public EnrichDefinition allowOptimisedComponents(boolean allowOptimisedComponents) { return allowOptimisedComponents(Boolean.toString(allowOptimisedComponents)); } /** * Whether to allow components to optimise enricher if they are {@link org.apache.camel.spi.SendDynamicAware} * * @return the builder */ public EnrichDefinition allowOptimisedComponents(String allowOptimisedComponents) { setAllowOptimisedComponents(allowOptimisedComponents); return this; } /** * Whether to auto startup components when enricher is starting up. * * @return the builder */ public EnrichDefinition autoStartComponents(String autoStartComponents) { setAutoStartComponents(autoStartComponents); return this; } // Properties // ------------------------------------------------------------------------- public AggregationStrategy getAggregationStrategyBean() { return aggregationStrategyBean; } @Override public String getAggregationStrategyRef() { return aggregationStrategy; } /** * Expression that computes the endpoint uri to use as the resource endpoint to enrich from */ @Override public void setExpression(ExpressionDefinition expression) { // override to include javadoc what the expression is used for super.setExpression(expression); } public String getAggregationStrategy() { return aggregationStrategy; } public void setAggregationStrategy(String aggregationStrategy) { this.aggregationStrategy = aggregationStrategy; } public void setAggregationStrategy(AggregationStrategy aggregationStrategy) { this.aggregationStrategyBean = aggregationStrategy; } public String getAggregationStrategyMethodName() { return aggregationStrategyMethodName; } public void setAggregationStrategyMethodName(String aggregationStrategyMethodName) { this.aggregationStrategyMethodName = aggregationStrategyMethodName; } public String getAggregationStrategyMethodAllowNull() { return aggregationStrategyMethodAllowNull; } public void setAggregationStrategyMethodAllowNull(String aggregationStrategyMethodAllowNull) { this.aggregationStrategyMethodAllowNull = aggregationStrategyMethodAllowNull; } public String getAggregateOnException() { return aggregateOnException; } public void setAggregateOnException(String aggregateOnException) { this.aggregateOnException = aggregateOnException; } public String getVariableSend() { return variableSend; } public void setVariableSend(String variableSend) { this.variableSend = variableSend; } public String getVariableReceive() { return variableReceive; } public void setVariableReceive(String variableReceive) { this.variableReceive = variableReceive; } public String getShareUnitOfWork() { return shareUnitOfWork; } public void setShareUnitOfWork(String shareUnitOfWork) { this.shareUnitOfWork = shareUnitOfWork; } public String getCacheSize() { return cacheSize; } public void setCacheSize(String cacheSize) { this.cacheSize = cacheSize; } public String getIgnoreInvalidEndpoint() { return ignoreInvalidEndpoint; } public void setIgnoreInvalidEndpoint(String ignoreInvalidEndpoint) { this.ignoreInvalidEndpoint = ignoreInvalidEndpoint; } public String getAllowOptimisedComponents() { return allowOptimisedComponents; } public void setAllowOptimisedComponents(String allowOptimisedComponents) { this.allowOptimisedComponents = allowOptimisedComponents; } public String getAutoStartComponents() { return autoStartComponents; } public void setAutoStartComponents(String autoStartComponents) { this.autoStartComponents = autoStartComponents; } @Override public EnrichDefinition copyDefinition() { return new EnrichDefinition(this); } }
EnrichDefinition
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/alterTable/MySqlAlterTableAddPrimaryKey.java
{ "start": 1053, "end": 2263 }
class ____ extends TestCase { public void test_alter_first() throws Exception { String sql = "ALTER TABLE `test`.`tb1` CHANGE COLUMN `fid` `fid` INT(11) NOT NULL DEFAULT NULL, ADD PRIMARY KEY (`fid`) ;"; MySqlStatementParser parser = new MySqlStatementParser(sql); SQLStatement stmt = parser.parseStatementList().get(0); parser.match(Token.EOF); assertEquals("ALTER TABLE `test`.`tb1`" + "\n\tCHANGE COLUMN `fid` `fid` INT(11) NOT NULL DEFAULT NULL,\n\t" + "ADD PRIMARY KEY (`fid`);", SQLUtils.toMySqlString(stmt)); assertEquals("alter table `test`.`tb1`" + "\n\tchange column `fid` `fid` INT(11) not null default null,\n\t" + "add primary key (`fid`);", SQLUtils.toMySqlString(stmt, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION)); SchemaStatVisitor visitor = new SQLUtils().createSchemaStatVisitor(JdbcConstants.MYSQL); stmt.accept(visitor); TableStat tableStat = visitor.getTableStat("test.tb1"); assertNotNull(tableStat); assertEquals(1, tableStat.getAlterCount()); assertEquals(1, tableStat.getCreateIndexCount()); } }
MySqlAlterTableAddPrimaryKey
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/relationship/JoinedInheritanceWithOneToManyTest.java
{ "start": 1553, "end": 2469 }
class ____ { @Test public void test(SessionFactoryScope scope) { scope.inTransaction( session -> { BuildingList buildingList = new BuildingList(); buildingList.setName( "ABC" ); session.persist( buildingList ); BLEHome home = new BLEHome(); home.setHasCtv( 123 ); home.setList( buildingList ); buildingList.getEntries().add( home ); session.persist( home ); BLENonLiving nonLiving = new BLENonLiving(); nonLiving.setDelayed( true ); nonLiving.setList( buildingList ); buildingList.getEntries().add( nonLiving ); session.persist( nonLiving ); } ); scope.inTransaction( session -> { List<BuildingList> buildingLists = session.createQuery( "from BuildingList" ).getResultList(); BuildingList buildingList = buildingLists.get( 0 ); assertEquals( 2, buildingList.getEntries().size() ); } ); } @MappedSuperclass public static
JoinedInheritanceWithOneToManyTest
java
apache__flink
flink-table/flink-table-planner/src/main/java/org/apache/calcite/rex/RexUtil.java
{ "start": 98115, "end": 101821 }
class ____ { final RexBuilder rexBuilder; int currentCount; final int maxNodeCount; // negative means no limit private CnfHelper(RexBuilder rexBuilder, int maxNodeCount) { this.rexBuilder = rexBuilder; this.maxNodeCount = maxNodeCount; } public RexNode toCnf(RexNode rex) { try { this.currentCount = 0; return toCnf2(rex); } catch (OverflowError e) { Util.swallow(e, null); return rex; } } private RexNode toCnf2(RexNode rex) { final List<RexNode> operands; switch (rex.getKind()) { case AND: incrementAndCheck(); operands = flattenAnd(((RexCall) rex).getOperands()); final List<RexNode> cnfOperands = new ArrayList<>(); for (RexNode node : operands) { RexNode cnf = toCnf2(node); switch (cnf.getKind()) { case AND: incrementAndCheck(); cnfOperands.addAll(((RexCall) cnf).getOperands()); break; default: incrementAndCheck(); cnfOperands.add(cnf); } } return and(cnfOperands); case OR: incrementAndCheck(); operands = flattenOr(((RexCall) rex).getOperands()); final RexNode head = operands.get(0); final RexNode headCnf = toCnf2(head); final List<RexNode> headCnfs = RelOptUtil.conjunctions(headCnf); final RexNode tail = or(Util.skip(operands)); final RexNode tailCnf = toCnf2(tail); final List<RexNode> tailCnfs = RelOptUtil.conjunctions(tailCnf); final List<RexNode> list = new ArrayList<>(); for (RexNode h : headCnfs) { for (RexNode t : tailCnfs) { list.add(or(ImmutableList.of(h, t))); } } return and(list); case NOT: final RexNode arg = ((RexCall) rex).getOperands().get(0); switch (arg.getKind()) { case NOT: return toCnf2(((RexCall) arg).getOperands().get(0)); case OR: operands = ((RexCall) arg).getOperands(); return toCnf2( and(Util.transform(flattenOr(operands), RexUtil::addNot))); case AND: operands = ((RexCall) arg).getOperands(); return toCnf2( or(Util.transform(flattenAnd(operands), RexUtil::addNot))); default: incrementAndCheck(); return rex; } default: incrementAndCheck(); return rex; } } private void incrementAndCheck() { if (maxNodeCount >= 0 && ++currentCount > maxNodeCount) { throw OverflowError.INSTANCE; } } /** Exception to catch when we pass the limit. */ @SuppressWarnings("serial") private static
CnfHelper
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverSleepsTests.java
{ "start": 901, "end": 9940 }
class ____ extends AbstractWireSerializingTestCase<DriverSleeps> { public static DriverSleeps randomDriverSleeps() { return randomDriverSleeps(between(0, DriverSleeps.RECORDS * 3)); } private static DriverSleeps randomDriverSleeps(int cycles) { DriverSleeps sleeps = DriverSleeps.empty(); long now = 0; for (int i = 0; i < cycles; i++) { now += between(1, 100000); sleeps = sleeps.sleep(randomSleepReason(), now); if (i != cycles - 1 || randomBoolean()) { // Randomly don't wake on the last sleep now += between(1, 100000); sleeps = sleeps.wake(now); } } return sleeps; } private static String randomSleepReason() { return randomFrom("driver time", "driver iteration", "exchange empty", "exchange full"); } public void testEmptyToXContent() { assertThat(Strings.toString(DriverSleeps.empty(), true, true), equalTo(""" { "counts" : { }, "first" : [ ], "last" : [ ] }""")); } public void testSleepingToXContent() { Formatter formatter = new Formatter(Locale.US); String expected = formatter.format(""" { "counts" : { "driver iterations" : 1 }, "first" : [ { "reason" : "driver iterations", "thread_name" : "%1$s", "sleep" : "2024-08-13T13:29:23.000Z", "sleep_millis" : 1723555763000 } ], "last" : [ { "reason" : "driver iterations", "thread_name" : "%1$s", "sleep" : "2024-08-13T13:29:23.000Z", "sleep_millis" : 1723555763000 } ] }""", Thread.currentThread().getName()).out().toString(); assertThat(Strings.toString(DriverSleeps.empty().sleep("driver iterations", 1723555763000L), true, true), equalTo(expected)); } public void testWakingToXContent() { Formatter formatter = new Formatter(Locale.US); String expected = formatter.format(""" { "counts" : { "driver iterations" : 1 }, "first" : [ { "reason" : "driver iterations", "thread_name" : "%1$s", "sleep" : "2024-08-13T13:29:23.000Z", "sleep_millis" : 1723555763000, "wake" : "2024-08-13T13:31:03.000Z", "wake_millis" : 1723555863000 } ], "last" : [ { "reason" : "driver iterations", "thread_name" : "%1$s", "sleep" : "2024-08-13T13:29:23.000Z", "sleep_millis" : 1723555763000, "wake" : "2024-08-13T13:31:03.000Z", "wake_millis" : 1723555863000 } ] }""", Thread.currentThread().getName()).out().toString(); assertThat( Strings.toString(DriverSleeps.empty().sleep("driver iterations", 1723555763000L).wake(1723555863000L), true, true), equalTo(expected) ); } @Override protected Writeable.Reader<DriverSleeps> instanceReader() { return DriverSleeps::read; } @Override protected DriverSleeps createTestInstance() { return randomDriverSleeps(); } @Override protected DriverSleeps mutateInstance(DriverSleeps instance) throws IOException { if (instance.last().isEmpty()) { return instance.sleep(randomSleepReason(), between(1, 10000)); } DriverSleeps.Sleep last = instance.last().get(instance.last().size() - 1); if (last.isStillSleeping()) { return instance.wake(last.sleep() + between(1, 10000)); } return instance.sleep(randomSleepReason(), last.wake() + between(1, 10000)); } public void testTracking() throws IOException { long now = 0; DriverSleeps sleeps = DriverSleeps.empty(); Map<String, Long> expectedCounts = new TreeMap<>(); List<DriverSleeps.Sleep> expectedFirst = new ArrayList<>(); assertThat(sleeps, equalTo(new DriverSleeps(expectedCounts, expectedFirst, expectedFirst))); /* * Simulate sleeping and waking when the records aren't full. * New sleeps and wakes should show up in both the "first" and "last" fields. */ for (int i = 0; i < DriverSleeps.RECORDS; i++) { now++; String reason = randomSleepReason(); expectedCounts.compute(reason, (k, v) -> v == null ? 1 : v + 1); sleeps = sleeps.sleep(reason, now); expectedFirst.add(new DriverSleeps.Sleep(reason, Thread.currentThread().getName(), now, 0)); assertThat(sleeps, equalTo(new DriverSleeps(expectedCounts, expectedFirst, expectedFirst))); assertXContent(sleeps, expectedCounts, expectedFirst, expectedFirst); now++; sleeps = sleeps.wake(now); expectedFirst.set(expectedFirst.size() - 1, new DriverSleeps.Sleep(reason, Thread.currentThread().getName(), now - 1, now)); assertThat(sleeps, equalTo(new DriverSleeps(expectedCounts, expectedFirst, expectedFirst))); assertXContent(sleeps, expectedCounts, expectedFirst, expectedFirst); } /* * Simulate sleeping and waking when the records are full. * New sleeps and wakes should show up in only the "last" field. */ List<DriverSleeps.Sleep> expectedLast = new ArrayList<>(expectedFirst); for (int i = 0; i < 1000; i++) { now++; String reason = randomSleepReason(); expectedCounts.compute(reason, (k, v) -> v == null ? 1 : v + 1); sleeps = sleeps.sleep(reason, now); expectedLast.remove(0); expectedLast.add(new DriverSleeps.Sleep(reason, Thread.currentThread().getName(), now, 0)); assertThat(sleeps, equalTo(new DriverSleeps(expectedCounts, expectedFirst, expectedLast))); assertXContent(sleeps, expectedCounts, expectedFirst, expectedLast); now++; sleeps = sleeps.wake(now); expectedLast.set(expectedLast.size() - 1, new DriverSleeps.Sleep(reason, Thread.currentThread().getName(), now - 1, now)); assertThat(sleeps, equalTo(new DriverSleeps(expectedCounts, expectedFirst, expectedLast))); assertXContent(sleeps, expectedCounts, expectedFirst, expectedLast); } } public void assertXContent( DriverSleeps sleeps, Map<String, Long> expectedCounts, List<DriverSleeps.Sleep> expectedFirst, List<DriverSleeps.Sleep> expectedLast ) throws IOException { try (BytesStreamOutput expected = new BytesStreamOutput()) { try (XContentBuilder b = new XContentBuilder(XContentType.JSON.xContent(), expected).prettyPrint().humanReadable(true)) { b.startObject(); b.startObject("counts"); { for (Map.Entry<String, Long> e : expectedCounts.entrySet()) { b.field(e.getKey(), e.getValue()); } } b.endObject(); { b.startArray("first"); for (DriverSleeps.Sleep sleep : expectedFirst) { sleep.toXContent(b, ToXContent.EMPTY_PARAMS); } b.endArray(); } { b.startArray("last"); for (DriverSleeps.Sleep sleep : expectedLast) { sleep.toXContent(b, ToXContent.EMPTY_PARAMS); } b.endArray(); } b.endObject(); } assertThat(Strings.toString(sleeps, true, true), equalTo(expected.bytes().utf8ToString())); } } public void testWakeNeverSlept() { Exception e = expectThrows(IllegalStateException.class, () -> DriverSleeps.empty().wake(1)); assertThat(e.getMessage(), equalTo("Never slept.")); } public void testWakeWhileAwake() { Exception e = expectThrows(IllegalStateException.class, () -> DriverSleeps.empty().sleep(randomSleepReason(), 1).wake(2).wake(3)); assertThat(e.getMessage(), equalTo("Already awake.")); } public void testSleepWhileSleeping() { Exception e = expectThrows( IllegalStateException.class, () -> DriverSleeps.empty().sleep(randomSleepReason(), 1).sleep(randomSleepReason(), 2) ); assertThat(e.getMessage(), equalTo("Still sleeping.")); } }
DriverSleepsTests
java
apache__flink
flink-core/src/main/java/org/apache/flink/util/TemporaryClassLoaderContext.java
{ "start": 1518, "end": 1612 }
class ____ implements AutoCloseable { /** * Sets the context
TemporaryClassLoaderContext
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/translog/OperationListener.java
{ "start": 540, "end": 940 }
interface ____ { /** * This method is called when a new operation is added to the translog. * * @param operation the serialized operation added to the translog * @param seqNo the sequence number of the operation * @param location the location written */ void operationAdded(Translog.Serialized operation, long seqNo, Translog.Location location); }
OperationListener
java
mockito__mockito
mockito-core/src/main/java/org/mockito/internal/progress/ArgumentMatcherStorage.java
{ "start": 314, "end": 571 }
interface ____ { void reportMatcher(ArgumentMatcher<?> matcher); List<LocalizedMatcher> pullLocalizedMatchers(); void reportAnd(); void reportNot(); void reportOr(); void validateState(); void reset(); }
ArgumentMatcherStorage
java
elastic__elasticsearch
x-pack/plugin/gpu/src/main/java/org/elasticsearch/xpack/gpu/GPUPlugin.java
{ "start": 1382, "end": 6904 }
enum ____ { TRUE, FALSE, AUTO } /** * Setting to control whether to use GPU for vectors indexing. * Currently only applicable for index_options.type: hnsw. * * If unset or "auto", an automatic decision is made based on the presence of GPU, necessary libraries, vectors' index type. * If set to <code>true</code>, GPU must be used for vectors indexing, and if GPU or necessary libraries are not available, * an exception will be thrown. * If set to <code>false</code>, GPU will not be used for vectors indexing. */ public static final Setting<GpuMode> VECTORS_INDEXING_USE_GPU_SETTING = Setting.enumSetting( GpuMode.class, "index.vectors.indexing.use_gpu", GpuMode.AUTO, Setting.Property.IndexScope, Setting.Property.Dynamic ); @Override public List<Setting<?>> getSettings() { if (GPU_FORMAT.isEnabled()) { return List.of(VECTORS_INDEXING_USE_GPU_SETTING); } else { return List.of(); } } // Allow tests to override the license state protected boolean isGpuIndexingFeatureAllowed() { var licenseState = XPackPlugin.getSharedLicenseState(); return licenseState != null && licenseState.isAllowedByLicense(MINIMUM_ALLOWED_LICENSE); } @Override public VectorsFormatProvider getVectorsFormatProvider() { return (indexSettings, indexOptions, similarity) -> { if (GPU_FORMAT.isEnabled()) { GpuMode gpuMode = indexSettings.getValue(VECTORS_INDEXING_USE_GPU_SETTING); if (gpuMode == GpuMode.TRUE) { if (vectorIndexTypeSupported(indexOptions.getType()) == false) { throw new IllegalArgumentException( "[index.vectors.indexing.use_gpu] doesn't support [index_options.type] of [" + indexOptions.getType() + "]." ); } if (GPUSupport.isSupported() == false) { throw new IllegalArgumentException( "[index.vectors.indexing.use_gpu] was set to [true], but GPU resources are not accessible on the node." ); } if (isGpuIndexingFeatureAllowed() == false) { throw new IllegalArgumentException( "[index.vectors.indexing.use_gpu] was set to [true], but GPU indexing is a [" + MINIMUM_ALLOWED_LICENSE + "] level feature" ); } return getVectorsFormat(indexOptions, similarity); } if (gpuMode == GpuMode.AUTO && vectorIndexTypeSupported(indexOptions.getType()) && GPUSupport.isSupported() && isGpuIndexingFeatureAllowed()) { return getVectorsFormat(indexOptions, similarity); } } return null; }; } private boolean vectorIndexTypeSupported(DenseVectorFieldMapper.VectorIndexType type) { return type == DenseVectorFieldMapper.VectorIndexType.HNSW || type == DenseVectorFieldMapper.VectorIndexType.INT8_HNSW; } private static KnnVectorsFormat getVectorsFormat( DenseVectorFieldMapper.DenseVectorIndexOptions indexOptions, DenseVectorFieldMapper.VectorSimilarity similarity ) { // TODO: cuvs 2025.12 will provide an API for converting HNSW CPU Params to Cagra params; use that instead if (indexOptions.getType() == DenseVectorFieldMapper.VectorIndexType.HNSW) { DenseVectorFieldMapper.HnswIndexOptions hnswIndexOptions = (DenseVectorFieldMapper.HnswIndexOptions) indexOptions; int efConstruction = hnswIndexOptions.efConstruction(); int m = hnswIndexOptions.m(); int gpuM = 2 + m * 2 / 3; int gpuEfConstruction = m + m * efConstruction / 256; return new ES92GpuHnswVectorsFormat(gpuM, gpuEfConstruction); } else if (indexOptions.getType() == DenseVectorFieldMapper.VectorIndexType.INT8_HNSW) { if (similarity == DenseVectorFieldMapper.VectorSimilarity.MAX_INNER_PRODUCT) { throw new IllegalArgumentException( "GPU vector indexing does not support [" + similarity + "] similarity for [int8_hnsw] index type. " + "Instead, consider using [" + DenseVectorFieldMapper.VectorSimilarity.COSINE + "] or " + " [hnsw] index type." ); } DenseVectorFieldMapper.Int8HnswIndexOptions int8HnswIndexOptions = (DenseVectorFieldMapper.Int8HnswIndexOptions) indexOptions; int efConstruction = int8HnswIndexOptions.efConstruction(); int m = int8HnswIndexOptions.m(); int gpuM = 2 + m * 2 / 3; int gpuEfConstruction = m + m * efConstruction / 256; return new ES92GpuHnswSQVectorsFormat(gpuM, gpuEfConstruction, int8HnswIndexOptions.confidenceInterval(), 7, false); } else { throw new IllegalArgumentException( "GPU vector indexing is not supported on this vector type: [" + indexOptions.getType() + "]" ); } } }
GpuMode
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/bugs/_2352/Issue2352Test.java
{ "start": 889, "end": 1315 }
class ____ { @ProcessorTest public void shouldGenerateValidCode() { TheModels theModels = new TheModels(); theModels.add( new TheModel( "1" ) ); theModels.add( new TheModel( "2" ) ); List<TheDto> theDtos = TheModelsMapper.INSTANCE.convert( theModels ); assertThat( theDtos ) .extracting( TheDto::getId ) .containsExactly( "1", "2" ); } }
Issue2352Test
java
quarkusio__quarkus
integration-tests/spring-data-jpa/src/main/java/io/quarkus/it/spring/data/jpa/AbstractPost.java
{ "start": 153, "end": 370 }
class ____ { private ZonedDateTime posted; public ZonedDateTime getPosted() { return posted; } public void setPosted(ZonedDateTime postedAt) { this.posted = postedAt; } }
AbstractPost
java
elastic__elasticsearch
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithSerializationTests.java
{ "start": 567, "end": 1434 }
class ____ extends AbstractExpressionSerializationTests<EndsWith> { @Override protected EndsWith createTestInstance() { Source source = randomSource(); Expression str = randomChild(); Expression suffix = randomChild(); return new EndsWith(source, str, suffix); } @Override protected EndsWith mutateInstance(EndsWith instance) throws IOException { Source source = instance.source(); Expression str = instance.str(); Expression suffix = instance.suffix(); if (randomBoolean()) { str = randomValueOtherThan(str, AbstractExpressionSerializationTests::randomChild); } else { suffix = randomValueOtherThan(suffix, AbstractExpressionSerializationTests::randomChild); } return new EndsWith(source, str, suffix); } }
EndsWithSerializationTests
java
apache__hadoop
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java
{ "start": 1528, "end": 1941 }
class ____ { static final Logger LOG = LoggerFactory.getLogger(OptionsParser.class); private static final Options cliOptions = new Options(); static { for (DistCpOptionSwitch option : DistCpOptionSwitch.values()) { if (LOG.isDebugEnabled()) { LOG.debug("Adding option " + option.getOption()); } cliOptions.addOption(option.getOption()); } } private static
OptionsParser
java
quarkusio__quarkus
extensions/panache/hibernate-orm-panache/deployment/src/test/java/io/quarkus/hibernate/orm/panache/deployment/test/ErroneousConfigHotReloadTestCase.java
{ "start": 1070, "end": 2451 }
class ____ is not, in fact, an entity. RestAssured.when().get("/unannotatedEntity").then().statusCode(500).body(containsString("@Entity")) .body(not(containsString("NullPointer"))); TEST.modifySourceFile(UnAnnotatedEntity.class, new Function<String, String>() { @Override public String apply(String s) { return s.replace("//", ""); } }); // Once we do have entities, the persistence unit will be active, // but will fail to start since there is no datasource. RestAssured.when() .get("/unannotatedEntity").then().statusCode(500) // Weirdly, in case of build errors, Quarkus will return the error as HTML, even if we set the content type to JSON... // Hence the &lt; / &gt; .body(containsString( "Datasource '&lt;default&gt;' is not configured. To solve this, configure datasource '&lt;default&gt;'.")) .body(not(containsString("NullPointer"))); TEST.modifyResourceFile("application.properties", new Function<String, String>() { @Override public String apply(String s) { return s.replace("#", ""); } }); RestAssured.when().get("/unannotatedEntity").then().statusCode(200); } }
that
java
elastic__elasticsearch
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AnalysisStep.java
{ "start": 1006, "end": 3196 }
class ____ extends AbstractDataFrameAnalyticsStep { private final AnalyticsProcessManager processManager; public AnalysisStep( NodeClient client, DataFrameAnalyticsTask task, DataFrameAnalyticsAuditor auditor, DataFrameAnalyticsConfig config, AnalyticsProcessManager processManager ) { super(client, task, auditor, config); this.processManager = Objects.requireNonNull(processManager); } @Override public Name name() { return Name.ANALYSIS; } @Override public void cancel(String reason, TimeValue timeout) { processManager.stop(task); } @Override public void updateProgress(ActionListener<Void> listener) { // Progress for the analysis step gets handled by the c++ process reporting it and the // results processor parsing the value in memory. listener.onResponse(null); } @Override protected void doExecute(ActionListener<StepResponse> listener) { task.getStatsHolder().getDataCountsTracker().reset(); final ParentTaskAssigningClient parentTaskClient = parentTaskClient(); // Update state to ANALYZING and start process ActionListener<DataFrameDataExtractorFactory> dataExtractorFactoryListener = listener.delegateFailureAndWrap( (l, dataExtractorFactory) -> processManager.runJob(task, config, dataExtractorFactory, l) ); ActionListener<BroadcastResponse> refreshListener = dataExtractorFactoryListener.delegateFailureAndWrap((l, refreshResponse) -> { // TODO This could fail with errors. In that case we get stuck with the copied index. // We could delete the index in case of failure or we could try building the factory before reindexing // to catch the error early on. DataFrameDataExtractorFactory.createForDestinationIndex(parentTaskClient, config, l); }); // First we need to refresh the dest index to ensure data is searchable in case the job // was stopped after reindexing was complete but before the index was refreshed. refreshDestAsync(refreshListener); } }
AnalysisStep
java
apache__maven
api/maven-api-cli/src/main/java/org/apache/maven/api/cli/mvnsh/ShellOptions.java
{ "start": 1014, "end": 1160 }
interface ____ the general {@link Options} interface, adding shell-specific configuration options. * * @since 4.0.0 */ @Experimental public
extends
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/ProfileServlet.java
{ "start": 2037, "end": 4154 }
class ____ instead of FQN * // -o fmt[,fmt...] output format: summary|traces|flat|collapsed|svg|tree|jfr|html * // --width px SVG width pixels (integer) * // --height px SVG frame height pixels (integer) * // --minwidth px skip frames smaller than px (double) * // --reverse generate stack-reversed FlameGraph / Call tree * <p> * Example: * If Namenode http address is localhost:9870, and ResourceManager http address is localhost:8088, * ProfileServlet running with async-profiler setup can be accessed with * http://localhost:9870/prof and http://localhost:8088/prof for Namenode and ResourceManager * processes respectively. * Deep dive into some params: * - To collect 10 second CPU profile of current process i.e. Namenode (returns FlameGraph svg) * curl "http://localhost:9870/prof" * - To collect 10 second CPU profile of pid 12345 (returns FlameGraph svg) * curl "http://localhost:9870/prof?pid=12345" (For instance, provide pid of Datanode) * - To collect 30 second CPU profile of pid 12345 (returns FlameGraph svg) * curl "http://localhost:9870/prof?pid=12345&amp;duration=30" * - To collect 1 minute CPU profile of current process and output in tree format (html) * curl "http://localhost:9870/prof?output=tree&amp;duration=60" * - To collect 10 second heap allocation profile of current process (returns FlameGraph svg) * curl "http://localhost:9870/prof?event=alloc" * - To collect lock contention profile of current process (returns FlameGraph svg) * curl "http://localhost:9870/prof?event=lock" * <p> * Following event types are supported (default is 'cpu') (NOTE: not all OS'es support all events) * // Perf events: * // cpu * // page-faults * // context-switches * // cycles * // instructions * // cache-references * // cache-misses * // branches * // branch-misses * // bus-cycles * // L1-dcache-load-misses * // LLC-load-misses * // dTLB-load-misses * // mem:breakpoint * // trace:tracepoint * // Java events: * // alloc * // lock */ @InterfaceAudience.Private public
names
java
apache__rocketmq
auth/src/test/java/org/apache/rocketmq/auth/authorization/manager/AuthorizationMetadataManagerTest.java
{ "start": 2001, "end": 13029 }
class ____ { private AuthConfig authConfig; private AuthenticationMetadataManager authenticationMetadataManager; private AuthorizationMetadataManager authorizationMetadataManager; @Before public void setUp() throws Exception { if (MixAll.isMac()) { return; } this.authConfig = AuthTestHelper.createDefaultConfig(); this.authenticationMetadataManager = AuthenticationFactory.getMetadataManager(this.authConfig); this.authorizationMetadataManager = AuthorizationFactory.getMetadataManager(this.authConfig); this.clearAllAcls(); this.clearAllUsers(); } @After public void tearDown() throws Exception { if (MixAll.isMac()) { return; } this.clearAllAcls(); this.clearAllUsers(); this.authenticationMetadataManager.shutdown(); this.authorizationMetadataManager.shutdown(); } @Test public void createAcl() { if (MixAll.isMac()) { return; } User user = User.of("test", "test"); this.authenticationMetadataManager.createUser(user).join(); Acl acl1 = AuthTestHelper.buildAcl("User:test", "Topic:test,Group:test", "PUB,SUB", "192.168.0.0/24,10.10.0.0/24", Decision.ALLOW); this.authorizationMetadataManager.createAcl(acl1).join(); Acl acl2 = this.authorizationMetadataManager.getAcl(Subject.of("User:test")).join(); Assert.assertTrue(AuthTestHelper.isEquals(acl1, acl2)); user = User.of("abc", "abc"); this.authenticationMetadataManager.createUser(user).join(); acl1 = AuthTestHelper.buildAcl("User:abc", PolicyType.DEFAULT, "Topic:*,Group:*", "PUB,SUB", null, Decision.DENY); this.authorizationMetadataManager.createAcl(acl1).join(); acl2 = this.authorizationMetadataManager.getAcl(Subject.of("User:abc")).join(); Assert.assertTrue(AuthTestHelper.isEquals(acl1, acl2)); Acl acl3 = AuthTestHelper.buildAcl("User:test", "Topic:test,Group:test", "PUB,SUB", "192.168.0.0/24,10.10.0.0/24", Decision.ALLOW); this.authorizationMetadataManager.createAcl(acl3).join(); Acl acl4 = this.authorizationMetadataManager.getAcl(Subject.of("User:test")).join(); Assert.assertTrue(AuthTestHelper.isEquals(acl3, acl4)); Assert.assertThrows(AuthorizationException.class, () -> { try { Acl acl5 = AuthTestHelper.buildAcl("User:ddd", "Topic:test,Group:test", "PUB,SUB", "192.168.0.0/24,10.10.0.0/24", Decision.ALLOW); this.authorizationMetadataManager.createAcl(acl5).join(); } catch (Exception e) { AuthTestHelper.handleException(e); } }); } @Test public void updateAcl() { if (MixAll.isMac()) { return; } User user = User.of("test", "test"); this.authenticationMetadataManager.createUser(user).join(); Acl acl1 = AuthTestHelper.buildAcl("User:test", "Topic:test,Group:test", "PUB,SUB", "192.168.0.0/24,10.10.0.0/24", Decision.ALLOW); this.authorizationMetadataManager.createAcl(acl1).join(); Acl acl2 = AuthTestHelper.buildAcl("User:test", "Topic:abc,Group:abc", "PUB,SUB", "192.168.0.0/24,10.10.0.0/24", Decision.ALLOW); this.authorizationMetadataManager.updateAcl(acl2).join(); Acl acl3 = AuthTestHelper.buildAcl("User:test", "Topic:test,Group:test,Topic:abc,Group:abc", "PUB,SUB", "192.168.0.0/24,10.10.0.0/24", Decision.ALLOW); Acl acl4 = this.authorizationMetadataManager.getAcl(Subject.of("User:test")).join(); Assert.assertTrue(AuthTestHelper.isEquals(acl3, acl4)); Policy policy = AuthTestHelper.buildPolicy("Topic:test,Group:test", "PUB,SUB,Create", "192.168.0.0/24", Decision.DENY); acl4.updatePolicy(policy); this.authorizationMetadataManager.updateAcl(acl4); Acl acl5 = this.authorizationMetadataManager.getAcl(Subject.of("User:test")).join(); Assert.assertTrue(AuthTestHelper.isEquals(acl4, acl5)); User user2 = User.of("abc", "abc"); this.authenticationMetadataManager.createUser(user2).join(); Acl acl6 = AuthTestHelper.buildAcl("User:abc", "Topic:test,Group:test", "PUB,SUB", "192.168.0.0/24,10.10.0.0/24", Decision.ALLOW); this.authorizationMetadataManager.updateAcl(acl6).join(); Acl acl7 = this.authorizationMetadataManager.getAcl(Subject.of("User:abc")).join(); Assert.assertTrue(AuthTestHelper.isEquals(acl6, acl7)); } @Test public void deleteAcl() { if (MixAll.isMac()) { return; } User user = User.of("test", "test"); this.authenticationMetadataManager.createUser(user).join(); Acl acl1 = AuthTestHelper.buildAcl("User:test", "Topic:test,Group:test", "PUB,SUB", "192.168.0.0/24,10.10.0.0/24", Decision.ALLOW); this.authorizationMetadataManager.createAcl(acl1).join(); this.authorizationMetadataManager.deleteAcl(Subject.of("User:test"), PolicyType.CUSTOM, Resource.ofTopic("abc")).join(); Acl acl2 = this.authorizationMetadataManager.getAcl(Subject.of("User:test")).join(); Assert.assertTrue(AuthTestHelper.isEquals(acl1, acl2)); this.authorizationMetadataManager.deleteAcl(Subject.of("User:test"), PolicyType.CUSTOM, Resource.ofTopic("test")).join(); Acl acl3 = AuthTestHelper.buildAcl("User:test", "Group:test", "PUB,SUB", "192.168.0.0/24,10.10.0.0/24", Decision.ALLOW); Acl acl4 = this.authorizationMetadataManager.getAcl(Subject.of("User:test")).join(); Assert.assertTrue(AuthTestHelper.isEquals(acl3, acl4)); this.authorizationMetadataManager.deleteAcl(Subject.of("User:test")); Acl acl5 = this.authorizationMetadataManager.getAcl(Subject.of("User:test")).join(); Assert.assertNull(acl5); Assert.assertThrows(AuthorizationException.class, () -> { try { this.authorizationMetadataManager.deleteAcl(Subject.of("User:abc")).join(); } catch (Exception e) { AuthTestHelper.handleException(e); } }); } @Test public void getAcl() { if (MixAll.isMac()) { return; } User user = User.of("test", "test"); this.authenticationMetadataManager.createUser(user).join(); Acl acl1 = AuthTestHelper.buildAcl("User:test", "Topic:test,Group:test", "PUB,SUB", "192.168.0.0/24,10.10.0.0/24", Decision.ALLOW); this.authorizationMetadataManager.createAcl(acl1).join(); Acl acl2 = this.authorizationMetadataManager.getAcl(Subject.of("User:test")).join(); Assert.assertTrue(AuthTestHelper.isEquals(acl1, acl2)); Assert.assertThrows(AuthorizationException.class, () -> { try { this.authorizationMetadataManager.getAcl(Subject.of("User:abc")).join(); } catch (Exception e) { AuthTestHelper.handleException(e); } }); } @Test public void testGetAclWithNullSubject() { if (MixAll.isMac()) { return; } AuthorizationException authorizationException = Assert.assertThrows(AuthorizationException.class, () -> { try { this.authorizationMetadataManager.getAcl(null).join(); } catch (Exception e) { AuthTestHelper.handleException(e); } }); Assert.assertEquals("The subject is null.", authorizationException.getMessage()); } @Test public void listAcl() { if (MixAll.isMac()) { return; } User user1 = User.of("test-1", "test-1"); this.authenticationMetadataManager.createUser(user1).join(); User user2 = User.of("test-2", "test-2"); this.authenticationMetadataManager.createUser(user2).join(); Acl acl1 = AuthTestHelper.buildAcl("User:test-1", "Topic:test-1,Group:test-1", "PUB,SUB", "192.168.0.0/24,10.10.0.0/24", Decision.ALLOW); this.authorizationMetadataManager.createAcl(acl1).join(); Acl acl2 = AuthTestHelper.buildAcl("User:test-2", "Topic:test-2,Group:test-2", "PUB,SUB", "192.168.0.0/24,10.10.0.0/24", Decision.ALLOW); this.authorizationMetadataManager.createAcl(acl2).join(); Acl acl3 = AuthTestHelper.buildAcl("User:test-2", "Topic:acl-2,Group:acl-2", "PUB,SUB", "192.168.0.0/24,10.10.0.0/24", Decision.ALLOW); this.authorizationMetadataManager.createAcl(acl3).join(); List<Acl> acls1 = this.authorizationMetadataManager.listAcl(null, null).join(); Assert.assertEquals(acls1.size(), 2); List<Acl> acls2 = this.authorizationMetadataManager.listAcl("User:test-1", null).join(); Assert.assertEquals(acls2.size(), 1); List<Acl> acls3 = this.authorizationMetadataManager.listAcl("test", null).join(); Assert.assertEquals(acls3.size(), 2); List<Acl> acls4 = this.authorizationMetadataManager.listAcl(null, "Topic:test-1").join(); Assert.assertEquals(acls4.size(), 1); Assert.assertEquals(acls4.get(0).getPolicy(PolicyType.CUSTOM).getEntries().size(), 1); List<Acl> acls5 = this.authorizationMetadataManager.listAcl(null, "test-1").join(); Assert.assertEquals(acls5.size(), 1); Assert.assertEquals(acls5.get(0).getPolicy(PolicyType.CUSTOM).getEntries().size(), 2); List<Acl> acls6 = this.authorizationMetadataManager.listAcl("User:abc", null).join(); Assert.assertTrue(CollectionUtils.isEmpty(acls6)); List<Acl> acls7 = this.authorizationMetadataManager.listAcl(null, "Topic:abc").join(); Assert.assertTrue(CollectionUtils.isEmpty(acls7)); List<Acl> acls8 = this.authorizationMetadataManager.listAcl("test-2", "test-2").join(); Assert.assertEquals(acls8.size(), 1); List<PolicyEntry> policyEntries = acls8.get(0).getPolicy(PolicyType.CUSTOM).getEntries(); Assert.assertEquals(policyEntries.size(), 2); for (PolicyEntry policyEntry : policyEntries) { Assert.assertTrue(policyEntry.toResourceStr().contains("test-2")); } } private void clearAllUsers() { List<User> users = this.authenticationMetadataManager.listUser(null).join(); if (CollectionUtils.isEmpty(users)) { return; } users.forEach(user -> this.authenticationMetadataManager.deleteUser(user.getUsername()).join()); } private void clearAllAcls() { List<Acl> acls = this.authorizationMetadataManager.listAcl(null, null).join(); if (CollectionUtils.isEmpty(acls)) { return; } acls.forEach(acl -> this.authorizationMetadataManager.deleteAcl(acl.getSubject(), null, null).join()); } }
AuthorizationMetadataManagerTest
java
quarkusio__quarkus
integration-tests/hibernate-reactive-panache/src/test/java/io/quarkus/it/panache/reactive/DuplicateMethodTest.java
{ "start": 275, "end": 835 }
class ____ { @Inject DuplicateRepository repository; @Test public void shouldNotDuplicateMethodsInRepository() { assertThat(repository.findById(1)).isNotNull(); } @Test public void shouldNotDuplicateMethodsInEntity() { DuplicateEntity entity = DuplicateEntity.<DuplicateEntity> findById(1).await().indefinitely(); assertThat(entity).isNotNull(); entity.persist().await().indefinitely(); DuplicateEntity.update("foo", Parameters.with("a", 1)).await().indefinitely(); } }
DuplicateMethodTest
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/pool/basic/ConnectionTest5.java
{ "start": 1137, "end": 6509 }
class ____ extends PoolTestCase { private MockDriver driver; private DruidDataSource dataSource; protected void setUp() throws Exception { DruidDataSourceStatManager.clear(); driver = new MockDriver(); dataSource = new DruidDataSource(); dataSource.setUrl("jdbc:mock:xxx"); dataSource.setDriver(driver); dataSource.setInitialSize(1); dataSource.setMaxActive(2); dataSource.setMaxIdle(2); dataSource.setMinIdle(1); dataSource.setMinEvictableIdleTimeMillis(300 * 1000); // 300 / 10 dataSource.setTimeBetweenEvictionRunsMillis(180 * 1000); // 180 / 10 dataSource.setTestWhileIdle(true); dataSource.setTestOnBorrow(true); dataSource.setTestOnReturn(true); dataSource.setValidationQuery("SELECT 1"); dataSource.setFilters("stat,trace"); JdbcStatContext context = new JdbcStatContext(); context.setTraceEnable(true); JdbcStatManager.getInstance().setStatContext(context); } protected void tearDown() throws Exception { dataSource.close(); assertEquals(0, DruidDataSourceStatManager.getInstance().getDataSourceList().size()); JdbcStatManager.getInstance().setStatContext(null); super.tearDown(); } public void test_basic() throws Exception { DruidPooledConnection conn = (DruidPooledConnection) dataSource.getConnection(); conn.close(); assertEquals(true, dataSource.isResetStatEnable()); dataSource.setResetStatEnable(false); assertEquals(false, dataSource.isResetStatEnable()); assertEquals(1, dataSource.getConnectCount()); dataSource.resetStat(); assertEquals(1, dataSource.getConnectCount()); dataSource.setResetStatEnable(true); dataSource.resetStat(); assertEquals(0, dataSource.getConnectCount()); } public void test_handleException() throws Exception { DruidPooledConnection conn = (DruidPooledConnection) dataSource.getConnection(); conn.close(); SQLException error = new SQLException(); try { conn.handleException(error); } catch (SQLException ex) { assertEquals(error, ex); } } public void test_handleException_2() throws Exception { DruidPooledConnection conn = dataSource.getConnection().unwrap(DruidPooledConnection.class); conn.getConnection().close(); { SQLException error = null; try { conn.handleException(new RuntimeException()); } catch (SQLException ex) { error = ex; } assertNotNull(error); } conn.close(); } public void test_handleException_3() throws Exception { DruidPooledConnection conn = dataSource.getConnection().unwrap(DruidPooledConnection.class); conn.getConnection().close(); { SQLException error = null; try { conn.handleException(new RuntimeException()); } catch (SQLException ex) { error = ex; } assertNotNull(error); } conn.close(); } public void test_handleException_4() throws Exception { DruidPooledConnection conn = dataSource.getConnection().unwrap(DruidPooledConnection.class); conn.getConnection().close(); { SQLException error = null; try { conn.prepareStatement("SELECT 1"); } catch (SQLException ex) { error = ex; } assertNotNull(error); } assertEquals(true, conn.isClosed()); } public void test_handleException_5() throws Exception { DruidPooledConnection conn = dataSource.getConnection().unwrap(DruidPooledConnection.class); conn.addConnectionEventListener(new ConnectionEventListener() { @Override public void connectionClosed(ConnectionEvent event) { } @Override public void connectionErrorOccurred(ConnectionEvent event) { } }); conn.close(); { SQLException error = null; try { conn.handleException(new RuntimeException()); } catch (SQLException ex) { error = ex; } assertNotNull(error); } } public void test_setClientInfo() throws Exception { DruidPooledConnection conn = dataSource.getConnection().unwrap(DruidPooledConnection.class); conn.close(); { SQLException error = null; try { conn.setClientInfo("name", "xxx"); } catch (SQLException ex) { error = ex; } assertNotNull(error); } } public void test_setClientInfo_1() throws Exception { DruidPooledConnection conn = dataSource.getConnection().unwrap(DruidPooledConnection.class); conn.close(); { SQLException error = null; try { conn.setClientInfo(new Properties()); } catch (SQLException ex) { error = ex; } assertNotNull(error); } } }
ConnectionTest5
java
spring-projects__spring-boot
module/spring-boot-opentelemetry/src/main/java/org/springframework/boot/opentelemetry/autoconfigure/logging/otlp/OtlpLoggingConfigurations.java
{ "start": 1928, "end": 2363 }
class ____ { @Bean @ConditionalOnMissingBean @ConditionalOnProperty("management.opentelemetry.logging.export.otlp.endpoint") PropertiesOtlpLoggingConnectionDetails openTelemetryLoggingConnectionDetails(OtlpLoggingProperties properties) { return new PropertiesOtlpLoggingConnectionDetails(properties); } /** * Adapts {@link OtlpLoggingProperties} to {@link OtlpLoggingConnectionDetails}. */ static
ConnectionDetails
java
elastic__elasticsearch
plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleWhitelistedClass.java
{ "start": 546, "end": 767 }
class ____ be whitelisted for use by painless scripts * * Each of the members and methods below are whitelisted for use in search scripts. * See <a href="file:example_whitelist.txt">example_whitelist.txt</a>. */ public
to
java
apache__camel
core/camel-xml-jaxp/src/main/java/org/apache/camel/support/processor/validation/ValidatorErrorHandler.java
{ "start": 1088, "end": 1635 }
interface ____ extends ErrorHandler { /** * Resets any state within this error handler */ void reset(); /** * Process any errors which may have occurred during validation * * @param exchange the exchange * @param schema the schema * @param result the result * @throws ValidationException is thrown in case of validation errors */ void handleErrors(Exchange exchange, Schema schema, Result result) throws ValidationException; }
ValidatorErrorHandler
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/api/SoftAssertions_ThrowableTypeAssert_Test.java
{ "start": 5262, "end": 5924 }
class ____<T extends Throwable> implements Function<SoftAssertions, ThrowableTypeAssert<T>> { private Function<SoftAssertions, ThrowableTypeAssert<T>> function; private String assertionMethod; SoftAssertionsFunction(String assertionMethod, Function<SoftAssertions, ThrowableTypeAssert<T>> softAssertionsFunction) { this.function = softAssertionsFunction; this.assertionMethod = assertionMethod; } @Override public ThrowableTypeAssert<T> apply(SoftAssertions softly) { return function.apply(softly); } @Override public String toString() { return this.assertionMethod; } } }
SoftAssertionsFunction
java
quarkusio__quarkus
test-framework/junit5/src/main/java/io/quarkus/test/junit/main/QuarkusMainTest.java
{ "start": 649, "end": 1246 }
class ____ be annotated with {@link io.quarkus.test.junit.main.Launch} or have a * {@link io.quarkus.test.junit.main.QuarkusMainLauncher} parameter to be able to start the application manually. * * Note that this can be used in conjunction with other {@link io.quarkus.test.junit.QuarkusTest} * based tests. {@code QuarkusMainTest} is used to check a complete execution, while {@code QuarkusTest} can be * used to inject components and perform more fine-grained checks. */ @Target(ElementType.TYPE) @ExtendWith({ QuarkusMainTestExtension.class }) @Retention(RetentionPolicy.RUNTIME) public @
must
java
apache__camel
test-infra/camel-test-infra-qdrant/src/main/java/org/apache/camel/test/infra/qdrant/services/QdrantLocalContainerInfraService.java
{ "start": 2690, "end": 4341 }
class ____ extends QdrantContainer { public TestInfraQdrantContainer() { super(DockerImageName.parse(imageName) .asCompatibleSubstituteFor("qdrant/qdrant")); if (fixedPort) { addFixedExposedPort(6333, 6333); addFixedExposedPort(6334, 6334); } } } return new TestInfraQdrantContainer(); } @Override public void registerProperties() { System.setProperty(QdrantProperties.QDRANT_HTTP_HOST, getHttpHost()); System.setProperty(QdrantProperties.QDRANT_HTTP_PORT, String.valueOf(getHttpPort())); } @Override public void initialize() { LOG.info("Trying to start the Qdrant container"); container.withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger(QdrantContainer.class))); container.start(); registerProperties(); LOG.info("Qdrant instance running at {}:{}", getHttpHost(), getHttpPort()); } @Override public void shutdown() { LOG.info("Stopping the Qdrant container"); container.stop(); } @Override public QdrantContainer getContainer() { return container; } @Override public String getHttpHost() { return container.getHost(); } @Override public int getHttpPort() { return container.getMappedPort(HTTP_PORT); } @Override public String host() { return container.getHost(); } @Override public int port() { return container.getMappedPort(GRPC_PORT); } }
TestInfraQdrantContainer
java
apache__kafka
clients/src/main/java/org/apache/kafka/clients/consumer/internals/events/ShareSubscriptionChangeEvent.java
{ "start": 1215, "end": 1705 }
class ____ extends CompletableApplicationEvent<Void> { private final Set<String> topics; public ShareSubscriptionChangeEvent(final Collection<String> topics) { super(Type.SHARE_SUBSCRIPTION_CHANGE, Long.MAX_VALUE); this.topics = Set.copyOf(topics); } public Set<String> topics() { return topics; } @Override protected String toStringBase() { return super.toStringBase() + ", topics=" + topics; } }
ShareSubscriptionChangeEvent
java
quarkusio__quarkus
extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/mapping/timezone/TimezoneDefaultStorageNormalizeUtcTest.java
{ "start": 427, "end": 1856 }
class ____ extends AbstractTimezoneDefaultStorageTest { @RegisterExtension static QuarkusUnitTest TEST = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(EntityWithTimezones.class) .addClasses(SchemaUtil.class)) .withConfigurationResource("application.properties") .overrideConfigKey("quarkus.hibernate-orm.mapping.timezone.default-storage", "normalize-utc"); @Test public void schema() { assertThat(SchemaUtil.getColumnNames(EntityWithTimezones.class, mappingMetamodel())) .doesNotContain("zonedDateTime_tz", "offsetDateTime_tz", "offsetTime_tz"); assertThat(SchemaUtil.getColumnTypeName(EntityWithTimezones.class, "zonedDateTime", mappingMetamodel())) .isEqualTo("TIMESTAMP_UTC"); assertThat(SchemaUtil.getColumnTypeName(EntityWithTimezones.class, "offsetDateTime", mappingMetamodel())) .isEqualTo("TIMESTAMP_UTC"); } @Test @RunOnVertxContext public void persistAndLoad(UniAsserter asserter) { assertPersistedThenLoadedValues(asserter, PERSISTED_ZONED_DATE_TIME.withZoneSameInstant(ZoneOffset.UTC), PERSISTED_OFFSET_DATE_TIME.withOffsetSameInstant(ZoneOffset.UTC), PERSISTED_OFFSET_TIME.withOffsetSameInstant(ZoneOffset.UTC)); } }
TimezoneDefaultStorageNormalizeUtcTest
java
apache__kafka
clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/ClientsTestUtils.java
{ "start": 14256, "end": 18575 }
class ____ { public static final int BROKER_COUNT = 3; public static final String TOPIC = "topic"; public static final TopicPartition TP = new TopicPartition(TOPIC, 0); private BaseConsumerTestcase() { } public static void testSimpleConsumption( ClusterInstance cluster, Map<String, Object> config ) throws InterruptedException { var numRecords = 10000; var startingTimestamp = System.currentTimeMillis(); sendRecords(cluster, TP, numRecords, startingTimestamp); try (Consumer<byte[], byte[]> consumer = cluster.consumer(config)) { assertEquals(0, consumer.assignment().size()); consumer.assign(List.of(TP)); assertEquals(1, consumer.assignment().size()); consumer.seek(TP, 0); consumeAndVerifyRecords(consumer, TP, numRecords, 0, 0, startingTimestamp); // check async commit callbacks sendAndAwaitAsyncCommit(consumer, Optional.empty()); } } public static void testClusterResourceListener( ClusterInstance cluster, Map<String, Object> consumerConfig ) throws InterruptedException { var numRecords = 100; Map<String, Object> producerConfig = Map.of( KEY_SERIALIZER_CLASS_CONFIG, TestClusterResourceListenerSerializer.class, VALUE_SERIALIZER_CLASS_CONFIG, TestClusterResourceListenerSerializer.class ); Map<String, Object> consumerConfigOverrides = new HashMap<>(consumerConfig); consumerConfigOverrides.put(KEY_DESERIALIZER_CLASS_CONFIG, TestClusterResourceListenerDeserializer.class); consumerConfigOverrides.put(VALUE_DESERIALIZER_CLASS_CONFIG, TestClusterResourceListenerDeserializer.class); try (Producer<byte[], byte[]> producer = cluster.producer(producerConfig); Consumer<byte[], byte[]> consumer = cluster.consumer(consumerConfigOverrides) ) { var startingTimestamp = System.currentTimeMillis(); sendRecords(producer, TP, numRecords, startingTimestamp, -1); consumer.subscribe(List.of(TP.topic())); consumeAndVerifyRecords(consumer, TP, numRecords, 0, 0, startingTimestamp); assertNotEquals(0, UPDATE_PRODUCER_COUNT.get()); assertNotEquals(0, UPDATE_CONSUMER_COUNT.get()); TestClusterResourceListenerSerializer.resetCount(); TestClusterResourceListenerDeserializer.resetCount(); } } public static void testCoordinatorFailover( ClusterInstance cluster, Map<String, Object> consumerConfig ) throws InterruptedException { var listener = new TestConsumerReassignmentListener(); try (Consumer<byte[], byte[]> consumer = cluster.consumer(consumerConfig)) { consumer.subscribe(List.of(TOPIC), listener); // the initial subscription should cause a callback execution awaitRebalance(consumer, listener); assertEquals(1, listener.callsToAssigned); // get metadata for the topic List<PartitionInfo> parts = null; while (parts == null) { parts = consumer.partitionsFor(Topic.GROUP_METADATA_TOPIC_NAME); } assertEquals(1, parts.size()); assertNotNull(parts.get(0).leader()); // shutdown the coordinator int coordinator = parts.get(0).leader().id(); cluster.shutdownBroker(coordinator); // the failover should not cause a rebalance ensureNoRebalance(consumer, listener); } } } public static <K, V> void sendAsyncCommit( Consumer<K, V> consumer, OffsetCommitCallback callback, Optional<Map<TopicPartition, OffsetAndMetadata>> offsetsOpt ) { offsetsOpt.ifPresentOrElse( offsets -> consumer.commitAsync(offsets, callback), () -> consumer.commitAsync(callback) ); } public static
BaseConsumerTestcase
java
netty__netty
transport/src/main/java/io/netty/channel/ChannelPipeline.java
{ "start": 10868, "end": 26997 }
interface ____ extends ChannelInboundInvoker, ChannelOutboundInvoker, Iterable<Entry<String, ChannelHandler>> { /** * Inserts a {@link ChannelHandler} at the first position of this pipeline. * * @param name the name of the handler to insert first * @param handler the handler to insert first * * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified handler is {@code null} */ ChannelPipeline addFirst(String name, ChannelHandler handler); /** * Inserts a {@link ChannelHandler} at the first position of this pipeline. * * @param group the {@link EventExecutorGroup} which will be used to execute the {@link ChannelHandler} * methods * @param name the name of the handler to insert first * @param handler the handler to insert first * * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified handler is {@code null} * @deprecated use {@link #addFirst(String, ChannelHandler)} */ @Deprecated ChannelPipeline addFirst(EventExecutorGroup group, String name, ChannelHandler handler); /** * Appends a {@link ChannelHandler} at the last position of this pipeline. * * @param name the name of the handler to append * @param handler the handler to append * * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified handler is {@code null} */ ChannelPipeline addLast(String name, ChannelHandler handler); /** * Appends a {@link ChannelHandler} at the last position of this pipeline. * * @param group the {@link EventExecutorGroup} which will be used to execute the {@link ChannelHandler} * methods * @param name the name of the handler to append * @param handler the handler to append * * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified handler is {@code null} * @deprecated use {@link #addLast(String, ChannelHandler)} */ @Deprecated ChannelPipeline addLast(EventExecutorGroup group, String name, ChannelHandler handler); /** * Inserts a {@link ChannelHandler} before an existing handler of this * pipeline. * * @param baseName the name of the existing handler * @param name the name of the handler to insert before * @param handler the handler to insert before * * @throws NoSuchElementException * if there's no such entry with the specified {@code baseName} * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified baseName or handler is {@code null} */ ChannelPipeline addBefore(String baseName, String name, ChannelHandler handler); /** * Inserts a {@link ChannelHandler} before an existing handler of this * pipeline. * * @param group the {@link EventExecutorGroup} which will be used to execute the {@link ChannelHandler} * methods * @param baseName the name of the existing handler * @param name the name of the handler to insert before * @param handler the handler to insert before * * @throws NoSuchElementException * if there's no such entry with the specified {@code baseName} * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified baseName or handler is {@code null} * @deprecated use {@link #addBefore(String, String, ChannelHandler)} */ @Deprecated ChannelPipeline addBefore(EventExecutorGroup group, String baseName, String name, ChannelHandler handler); /** * Inserts a {@link ChannelHandler} after an existing handler of this * pipeline. * * @param baseName the name of the existing handler * @param name the name of the handler to insert after * @param handler the handler to insert after * * @throws NoSuchElementException * if there's no such entry with the specified {@code baseName} * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified baseName or handler is {@code null} */ ChannelPipeline addAfter(String baseName, String name, ChannelHandler handler); /** * Inserts a {@link ChannelHandler} after an existing handler of this * pipeline. * * @param group the {@link EventExecutorGroup} which will be used to execute the {@link ChannelHandler} * methods * @param baseName the name of the existing handler * @param name the name of the handler to insert after * @param handler the handler to insert after * * @throws NoSuchElementException * if there's no such entry with the specified {@code baseName} * @throws IllegalArgumentException * if there's an entry with the same name already in the pipeline * @throws NullPointerException * if the specified baseName or handler is {@code null} * @deprecated use {@link #addAfter(String, String, ChannelHandler)} */ @Deprecated ChannelPipeline addAfter(EventExecutorGroup group, String baseName, String name, ChannelHandler handler); /** * Inserts {@link ChannelHandler}s at the first position of this pipeline. * * @param handlers the handlers to insert first * */ ChannelPipeline addFirst(ChannelHandler... handlers); /** * Inserts {@link ChannelHandler}s at the first position of this pipeline. * * @param group the {@link EventExecutorGroup} which will be used to execute the {@link ChannelHandler}s * methods. * @param handlers the handlers to insert first * @deprecated use {@link #addFirst(String, ChannelHandler)} */ @Deprecated ChannelPipeline addFirst(EventExecutorGroup group, ChannelHandler... handlers); /** * Inserts {@link ChannelHandler}s at the last position of this pipeline. * * @param handlers the handlers to insert last * */ ChannelPipeline addLast(ChannelHandler... handlers); /** * Inserts {@link ChannelHandler}s at the last position of this pipeline. * * @param group the {@link EventExecutorGroup} which will be used to execute the {@link ChannelHandler}s * methods. * @param handlers the handlers to insert last * @deprecated use {@link #addLast(ChannelHandler...)} */ @Deprecated ChannelPipeline addLast(EventExecutorGroup group, ChannelHandler... handlers); /** * Removes the specified {@link ChannelHandler} from this pipeline. * * @param handler the {@link ChannelHandler} to remove * * @throws NoSuchElementException * if there's no such handler in this pipeline * @throws NullPointerException * if the specified handler is {@code null} */ ChannelPipeline remove(ChannelHandler handler); /** * Removes the {@link ChannelHandler} with the specified name from this pipeline. * * @param name the name under which the {@link ChannelHandler} was stored. * * @return the removed handler * * @throws NoSuchElementException * if there's no such handler with the specified name in this pipeline * @throws NullPointerException * if the specified name is {@code null} */ ChannelHandler remove(String name); /** * Removes the {@link ChannelHandler} of the specified type from this pipeline. * * @param <T> the type of the handler * @param handlerType the type of the handler * * @return the removed handler * * @throws NoSuchElementException * if there's no such handler of the specified type in this pipeline * @throws NullPointerException * if the specified handler type is {@code null} */ <T extends ChannelHandler> T remove(Class<T> handlerType); /** * Removes the first {@link ChannelHandler} in this pipeline. * * @return the removed handler * * @throws NoSuchElementException * if this pipeline is empty */ ChannelHandler removeFirst(); /** * Removes the last {@link ChannelHandler} in this pipeline. * * @return the removed handler * * @throws NoSuchElementException * if this pipeline is empty */ ChannelHandler removeLast(); /** * Replaces the specified {@link ChannelHandler} with a new handler in this pipeline. * * @param oldHandler the {@link ChannelHandler} to be replaced * @param newName the name under which the replacement should be added * @param newHandler the {@link ChannelHandler} which is used as replacement * * @return itself * @throws NoSuchElementException * if the specified old handler does not exist in this pipeline * @throws IllegalArgumentException * if a handler with the specified new name already exists in this * pipeline, except for the handler to be replaced * @throws NullPointerException * if the specified old handler or new handler is * {@code null} */ ChannelPipeline replace(ChannelHandler oldHandler, String newName, ChannelHandler newHandler); /** * Replaces the {@link ChannelHandler} of the specified name with a new handler in this pipeline. * * @param oldName the name of the {@link ChannelHandler} to be replaced * @param newName the name under which the replacement should be added * @param newHandler the {@link ChannelHandler} which is used as replacement * * @return the removed handler * * @throws NoSuchElementException * if the handler with the specified old name does not exist in this pipeline * @throws IllegalArgumentException * if a handler with the specified new name already exists in this * pipeline, except for the handler to be replaced * @throws NullPointerException * if the specified old handler or new handler is * {@code null} */ ChannelHandler replace(String oldName, String newName, ChannelHandler newHandler); /** * Replaces the {@link ChannelHandler} of the specified type with a new handler in this pipeline. * * @param oldHandlerType the type of the handler to be removed * @param newName the name under which the replacement should be added * @param newHandler the {@link ChannelHandler} which is used as replacement * * @return the removed handler * * @throws NoSuchElementException * if the handler of the specified old handler type does not exist * in this pipeline * @throws IllegalArgumentException * if a handler with the specified new name already exists in this * pipeline, except for the handler to be replaced * @throws NullPointerException * if the specified old handler or new handler is * {@code null} */ <T extends ChannelHandler> T replace(Class<T> oldHandlerType, String newName, ChannelHandler newHandler); /** * Returns the first {@link ChannelHandler} in this pipeline. * * @return the first handler. {@code null} if this pipeline is empty. */ ChannelHandler first(); /** * Returns the context of the first {@link ChannelHandler} in this pipeline. * * @return the context of the first handler. {@code null} if this pipeline is empty. */ ChannelHandlerContext firstContext(); /** * Returns the last {@link ChannelHandler} in this pipeline. * * @return the last handler. {@code null} if this pipeline is empty. */ ChannelHandler last(); /** * Returns the context of the last {@link ChannelHandler} in this pipeline. * * @return the context of the last handler. {@code null} if this pipeline is empty. */ ChannelHandlerContext lastContext(); /** * Returns the {@link ChannelHandler} with the specified name in this * pipeline. * * @return the handler with the specified name. * {@code null} if there's no such handler in this pipeline. */ ChannelHandler get(String name); /** * Returns the {@link ChannelHandler} of the specified type in this * pipeline. * * @return the handler of the specified handler type. * {@code null} if there's no such handler in this pipeline. */ <T extends ChannelHandler> T get(Class<T> handlerType); /** * Returns the context object of the specified {@link ChannelHandler} in * this pipeline. * * @return the context object of the specified handler. * {@code null} if there's no such handler in this pipeline. */ ChannelHandlerContext context(ChannelHandler handler); /** * Returns the context object of the {@link ChannelHandler} with the * specified name in this pipeline. * * @return the context object of the handler with the specified name. * {@code null} if there's no such handler in this pipeline. */ ChannelHandlerContext context(String name); /** * Returns the context object of the {@link ChannelHandler} of the * specified type in this pipeline. * * @return the context object of the handler of the specified type. * {@code null} if there's no such handler in this pipeline. */ ChannelHandlerContext context(Class<? extends ChannelHandler> handlerType); /** * Returns the {@link Channel} that this pipeline is attached to. * * @return the channel. {@code null} if this pipeline is not attached yet. */ Channel channel(); /** * Returns the {@link List} of the handler names. */ List<String> names(); /** * Converts this pipeline into an ordered {@link Map} whose keys are * handler names and whose values are handlers. */ Map<String, ChannelHandler> toMap(); @Override ChannelPipeline fireChannelRegistered(); @Override ChannelPipeline fireChannelUnregistered(); @Override ChannelPipeline fireChannelActive(); @Override ChannelPipeline fireChannelInactive(); @Override ChannelPipeline fireExceptionCaught(Throwable cause); @Override ChannelPipeline fireUserEventTriggered(Object event); @Override ChannelPipeline fireChannelRead(Object msg); @Override ChannelPipeline fireChannelReadComplete(); @Override ChannelPipeline fireChannelWritabilityChanged(); @Override ChannelPipeline flush(); @Override default ChannelPromise newPromise() { return new DefaultChannelPromise(channel()); } @Override default ChannelProgressivePromise newProgressivePromise() { return new DefaultChannelProgressivePromise(channel()); } @Override default ChannelFuture newFailedFuture(Throwable cause) { return new FailedChannelFuture(channel(), null, cause); } }
ChannelPipeline