language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
quarkusio__quarkus
extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/certReload/ManagementHttpServerTlsCertificateReloadWithTlsRegistryTest.java
{ "start": 1963, "end": 5343 }
class ____ { public static final File temp = new File("target/test-certificates-" + UUID.randomUUID()); private static final String APP_PROPS = """ quarkus.management.enabled=true quarkus.tls.key-store.pem.0.cert=%s quarkus.tls.key-store.pem.0.key=%s loc=%s """.formatted(temp.getAbsolutePath() + "/tls.crt", temp.getAbsolutePath() + "/tls.key", temp.getAbsolutePath()); @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addAsResource(new StringAsset(APP_PROPS), "application.properties")) .setBeforeAllCustomizer(() -> { try { // Prepare a random directory to store the certificates. temp.mkdirs(); Files.copy(new File("target/certificates/reload-C.crt").toPath(), new File(temp, "/tls.crt").toPath()); Files.copy(new File("target/certificates/reload-C.key").toPath(), new File(temp, "/tls.key").toPath()); Files.copy(new File("target/certificates/reload-C-ca.crt").toPath(), new File(temp, "/ca.crt").toPath()); } catch (Exception e) { throw new RuntimeException(e); } }) .addBuildChainCustomizer(buildCustomizer()) .setAfterAllCustomizer(() -> { try { Files.deleteIfExists(new File(temp, "/tls.crt").toPath()); Files.deleteIfExists(new File(temp, "/tls.key").toPath()); Files.deleteIfExists(new File(temp, "/ca.crt").toPath()); Files.deleteIfExists(temp.toPath()); } catch (Exception e) { throw new RuntimeException(e); } }); static Consumer<BuildChainBuilder> buildCustomizer() { return new Consumer<BuildChainBuilder>() { @Override public void accept(BuildChainBuilder builder) { builder.addBuildStep(new BuildStep() { @Override public void execute(BuildContext context) { NonApplicationRootPathBuildItem buildItem = context.consume(NonApplicationRootPathBuildItem.class); context.produce(buildItem.routeBuilder() .management() .route("/hello") .handler(new MyHandler()) .build()); } }).produces(RouteBuildItem.class) .consumes(NonApplicationRootPathBuildItem.class) .build(); } }; } @Inject Vertx vertx; @ConfigProperty(name = "loc") File certs; @Inject TlsConfigurationRegistry registry; @Inject Event<CertificateUpdatedEvent> event; @Test void test() throws IOException, ExecutionException, InterruptedException, TimeoutException { var options = new HttpClientOptions() .setSsl(true) .setDefaultPort(9001) // Management
ManagementHttpServerTlsCertificateReloadWithTlsRegistryTest
java
apache__hadoop
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/SimpleKeyProvider.java
{ "start": 1228, "end": 2135 }
class ____ implements KeyProvider { private static final Logger LOG = LoggerFactory.getLogger(SimpleKeyProvider.class); protected static final String KEY_ACCOUNT_KEY_PREFIX = "fs.azure.account.key."; @Override public String getStorageAccountKey(String accountName, Configuration conf) throws KeyProviderException { String key = null; try { Configuration c = ProviderUtils.excludeIncompatibleCredentialProviders( conf, NativeAzureFileSystem.class); char[] keyChars = c.getPassword(getStorageAccountKeyName(accountName)); if (keyChars != null) { key = new String(keyChars); } } catch(IOException ioe) { LOG.warn("Unable to get key from credential providers.", ioe); } return key; } protected String getStorageAccountKeyName(String accountName) { return KEY_ACCOUNT_KEY_PREFIX + accountName; } }
SimpleKeyProvider
java
spring-projects__spring-framework
spring-test/src/test/java/org/springframework/test/context/jdbc/AfterTestClassSqlScriptsTests.java
{ "start": 2514, "end": 3029 }
class ____ extends AbstractTransactionalTests { @Test @Order(1) @Sql("data-add-catbert.sql") void databaseHasBeenInitialized() { assertUsers("Catbert"); } @Test @Order(2) @Sql("data-add-dogbert.sql") void databaseIsNotWipedBetweenTests() { assertUsers("Catbert", "Dogbert"); } @Nested @TestMethodOrder(OrderAnnotation.class) @Sql(scripts = "recreate-schema.sql", executionPhase = BEFORE_TEST_CLASS) @Sql(scripts = "drop-schema.sql", executionPhase = AFTER_TEST_CLASS)
AfterTestClassSqlScriptsTests
java
spring-projects__spring-security
oauth2/oauth2-authorization-server/src/test/java/org/springframework/security/oauth2/server/authorization/http/converter/OAuth2AuthorizationServerMetadataHttpMessageConverterTests.java
{ "start": 1713, "end": 13542 }
class ____ { private final OAuth2AuthorizationServerMetadataHttpMessageConverter messageConverter = new OAuth2AuthorizationServerMetadataHttpMessageConverter(); @Test public void supportsWhenOAuth2AuthorizationServerMetadataThenTrue() { assertThat(this.messageConverter.supports(OAuth2AuthorizationServerMetadata.class)).isTrue(); } @Test public void setAuthorizationServerMetadataParametersConverterWhenConverterIsNullThenThrowIllegalArgumentException() { assertThatIllegalArgumentException() .isThrownBy(() -> this.messageConverter.setAuthorizationServerMetadataParametersConverter(null)); } @Test public void setAuthorizationServerMetadataConverterWhenConverterIsNullThenThrowIllegalArgumentException() { assertThatIllegalArgumentException() .isThrownBy(() -> this.messageConverter.setAuthorizationServerMetadataConverter(null)); } @Test public void readInternalWhenRequiredParametersThenSuccess() throws Exception { // @formatter:off String authorizationServerMetadataResponse = "{\n" + " \"issuer\": \"https://example.com\",\n" + " \"authorization_endpoint\": \"https://example.com/oauth2/authorize\",\n" + " \"token_endpoint\": \"https://example.com/oauth2/token\",\n" + " \"response_types_supported\": [\"code\"]\n" + "}\n"; // @formatter:on MockClientHttpResponse response = new MockClientHttpResponse(authorizationServerMetadataResponse.getBytes(), HttpStatus.OK); OAuth2AuthorizationServerMetadata authorizationServerMetadata = this.messageConverter .readInternal(OAuth2AuthorizationServerMetadata.class, response); assertThat(authorizationServerMetadata.getIssuer()).isEqualTo(new URL("https://example.com")); assertThat(authorizationServerMetadata.getAuthorizationEndpoint()) .isEqualTo(new URL("https://example.com/oauth2/authorize")); assertThat(authorizationServerMetadata.getTokenEndpoint()) .isEqualTo(new URL("https://example.com/oauth2/token")); assertThat(authorizationServerMetadata.getTokenEndpointAuthenticationMethods()).isNull(); assertThat(authorizationServerMetadata.getJwkSetUrl()).isNull(); assertThat(authorizationServerMetadata.getResponseTypes()).containsExactly("code"); assertThat(authorizationServerMetadata.getScopes()).isNull(); assertThat(authorizationServerMetadata.getGrantTypes()).isNull(); assertThat(authorizationServerMetadata.getTokenRevocationEndpoint()).isNull(); assertThat(authorizationServerMetadata.getTokenRevocationEndpointAuthenticationMethods()).isNull(); assertThat(authorizationServerMetadata.getTokenIntrospectionEndpoint()).isNull(); assertThat(authorizationServerMetadata.getTokenIntrospectionEndpointAuthenticationMethods()).isNull(); assertThat(authorizationServerMetadata.getCodeChallengeMethods()).isNull(); } @Test public void readInternalWhenValidParametersThenSuccess() throws Exception { // @formatter:off String authorizationServerMetadataResponse = "{\n" + " \"issuer\": \"https://example.com\",\n" + " \"authorization_endpoint\": \"https://example.com/oauth2/authorize\",\n" + " \"token_endpoint\": \"https://example.com/oauth2/token\",\n" + " \"token_endpoint_auth_methods_supported\": [\"client_secret_basic\"],\n" + " \"jwks_uri\": \"https://example.com/oauth2/jwks\",\n" + " \"scopes_supported\": [\"openid\"],\n" + " \"response_types_supported\": [\"code\"],\n" + " \"grant_types_supported\": [\"authorization_code\", \"client_credentials\"],\n" + " \"revocation_endpoint\": \"https://example.com/oauth2/revoke\",\n" + " \"revocation_endpoint_auth_methods_supported\": [\"client_secret_basic\"],\n" + " \"introspection_endpoint\": \"https://example.com/oauth2/introspect\",\n" + " \"introspection_endpoint_auth_methods_supported\": [\"client_secret_basic\"],\n" + " \"code_challenge_methods_supported\": [\"S256\"],\n" + " \"custom_claim\": \"value\",\n" + " \"custom_collection_claim\": [\"value1\", \"value2\"]\n" + "}\n"; // @formatter:on MockClientHttpResponse response = new MockClientHttpResponse(authorizationServerMetadataResponse.getBytes(), HttpStatus.OK); OAuth2AuthorizationServerMetadata authorizationServerMetadata = this.messageConverter .readInternal(OAuth2AuthorizationServerMetadata.class, response); assertThat(authorizationServerMetadata.getClaims()).hasSize(15); assertThat(authorizationServerMetadata.getIssuer()).isEqualTo(new URL("https://example.com")); assertThat(authorizationServerMetadata.getAuthorizationEndpoint()) .isEqualTo(new URL("https://example.com/oauth2/authorize")); assertThat(authorizationServerMetadata.getTokenEndpoint()) .isEqualTo(new URL("https://example.com/oauth2/token")); assertThat(authorizationServerMetadata.getTokenEndpointAuthenticationMethods()) .containsExactly(ClientAuthenticationMethod.CLIENT_SECRET_BASIC.getValue()); assertThat(authorizationServerMetadata.getJwkSetUrl()).isEqualTo(new URL("https://example.com/oauth2/jwks")); assertThat(authorizationServerMetadata.getScopes()).containsExactly("openid"); assertThat(authorizationServerMetadata.getResponseTypes()).containsExactly("code"); assertThat(authorizationServerMetadata.getGrantTypes()).containsExactlyInAnyOrder("authorization_code", "client_credentials"); assertThat(authorizationServerMetadata.getTokenRevocationEndpoint()) .isEqualTo(new URL("https://example.com/oauth2/revoke")); assertThat(authorizationServerMetadata.getTokenRevocationEndpointAuthenticationMethods()) .containsExactly(ClientAuthenticationMethod.CLIENT_SECRET_BASIC.getValue()); assertThat(authorizationServerMetadata.getTokenIntrospectionEndpoint()) .isEqualTo(new URL("https://example.com/oauth2/introspect")); assertThat(authorizationServerMetadata.getTokenIntrospectionEndpointAuthenticationMethods()) .containsExactly(ClientAuthenticationMethod.CLIENT_SECRET_BASIC.getValue()); assertThat(authorizationServerMetadata.getCodeChallengeMethods()).containsExactly("S256"); assertThat(authorizationServerMetadata.getClaimAsString("custom_claim")).isEqualTo("value"); assertThat(authorizationServerMetadata.getClaimAsStringList("custom_collection_claim")) .containsExactlyInAnyOrder("value1", "value2"); } @Test public void readInternalWhenFailingConverterThenThrowException() { String errorMessage = "this is not a valid converter"; this.messageConverter.setAuthorizationServerMetadataConverter((source) -> { throw new RuntimeException(errorMessage); }); MockClientHttpResponse response = new MockClientHttpResponse("{}".getBytes(), HttpStatus.OK); assertThatExceptionOfType(HttpMessageNotReadableException.class) .isThrownBy(() -> this.messageConverter.readInternal(OAuth2AuthorizationServerMetadata.class, response)) .withMessageContaining("An error occurred reading the OAuth 2.0 Authorization Server Metadata") .withMessageContaining(errorMessage); } @Test public void readInternalWhenInvalidOAuth2AuthorizationServerMetadataThenThrowException() { String authorizationServerMetadataResponse = "{ \"issuer\": null }"; MockClientHttpResponse response = new MockClientHttpResponse(authorizationServerMetadataResponse.getBytes(), HttpStatus.OK); assertThatExceptionOfType(HttpMessageNotReadableException.class) .isThrownBy(() -> this.messageConverter.readInternal(OAuth2AuthorizationServerMetadata.class, response)) .withMessageContaining("An error occurred reading the OAuth 2.0 Authorization Server Metadata") .withMessageContaining("issuer cannot be null"); } @Test public void writeInternalWhenOAuth2AuthorizationServerMetadataThenSuccess() { OAuth2AuthorizationServerMetadata authorizationServerMetadata = OAuth2AuthorizationServerMetadata.builder() .issuer("https://example.com") .authorizationEndpoint("https://example.com/oauth2/authorize") .tokenEndpoint("https://example.com/oauth2/token") .tokenEndpointAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC.getValue()) .jwkSetUrl("https://example.com/oauth2/jwks") .scope("openid") .responseType("code") .grantType("authorization_code") .grantType("client_credentials") .tokenRevocationEndpoint("https://example.com/oauth2/revoke") .tokenRevocationEndpointAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC.getValue()) .tokenIntrospectionEndpoint("https://example.com/oauth2/introspect") .tokenIntrospectionEndpointAuthenticationMethod(ClientAuthenticationMethod.CLIENT_SECRET_BASIC.getValue()) .codeChallengeMethod("S256") .claim("custom_claim", "value") .claim("custom_collection_claim", Arrays.asList("value1", "value2")) .build(); MockHttpOutputMessage outputMessage = new MockHttpOutputMessage(); this.messageConverter.writeInternal(authorizationServerMetadata, outputMessage); String authorizationServerMetadataResponse = outputMessage.getBodyAsString(); assertThat(authorizationServerMetadataResponse).contains("\"issuer\":\"https://example.com\""); assertThat(authorizationServerMetadataResponse) .contains("\"authorization_endpoint\":\"https://example.com/oauth2/authorize\""); assertThat(authorizationServerMetadataResponse) .contains("\"token_endpoint\":\"https://example.com/oauth2/token\""); assertThat(authorizationServerMetadataResponse) .contains("\"token_endpoint_auth_methods_supported\":[\"client_secret_basic\"]"); assertThat(authorizationServerMetadataResponse).contains("\"jwks_uri\":\"https://example.com/oauth2/jwks\""); assertThat(authorizationServerMetadataResponse).contains("\"scopes_supported\":[\"openid\"]"); assertThat(authorizationServerMetadataResponse).contains("\"response_types_supported\":[\"code\"]"); assertThat(authorizationServerMetadataResponse) .contains("\"grant_types_supported\":[\"authorization_code\",\"client_credentials\"]"); assertThat(authorizationServerMetadataResponse) .contains("\"revocation_endpoint\":\"https://example.com/oauth2/revoke\""); assertThat(authorizationServerMetadataResponse) .contains("\"revocation_endpoint_auth_methods_supported\":[\"client_secret_basic\"]"); assertThat(authorizationServerMetadataResponse) .contains("\"introspection_endpoint\":\"https://example.com/oauth2/introspect\""); assertThat(authorizationServerMetadataResponse) .contains("\"introspection_endpoint_auth_methods_supported\":[\"client_secret_basic\"]"); assertThat(authorizationServerMetadataResponse).contains("\"code_challenge_methods_supported\":[\"S256\"]"); assertThat(authorizationServerMetadataResponse).contains("\"custom_claim\":\"value\""); assertThat(authorizationServerMetadataResponse).contains("\"custom_collection_claim\":[\"value1\",\"value2\"]"); } @Test public void writeInternalWhenWriteFailsThenThrowException() { String errorMessage = "this is not a valid converter"; Converter<OAuth2AuthorizationServerMetadata, Map<String, Object>> failingConverter = (source) -> { throw new RuntimeException(errorMessage); }; this.messageConverter.setAuthorizationServerMetadataParametersConverter(failingConverter); MockHttpOutputMessage outputMessage = new MockHttpOutputMessage(); OAuth2AuthorizationServerMetadata authorizationServerMetadata = OAuth2AuthorizationServerMetadata.builder() .issuer("https://example.com") .authorizationEndpoint("https://example.com/oauth2/authorize") .tokenEndpoint("https://example.com/oauth2/token") .responseType("code") .build(); assertThatExceptionOfType(HttpMessageNotWritableException.class) .isThrownBy(() -> this.messageConverter.writeInternal(authorizationServerMetadata, outputMessage)) .withMessageContaining("An error occurred writing the OAuth 2.0 Authorization Server Metadata") .withMessageContaining(errorMessage); } }
OAuth2AuthorizationServerMetadataHttpMessageConverterTests
java
apache__kafka
streams/integration-tests/src/test/java/org/apache/kafka/streams/integration/ResetPartitionTimeIntegrationTest.java
{ "start": 2942, "end": 6720 }
class ____ { private static final int NUM_BROKERS = 1; private static final Properties BROKER_CONFIG; private static final long NOW = Instant.now().toEpochMilli(); static { BROKER_CONFIG = new Properties(); BROKER_CONFIG.put("transaction.state.log.replication.factor", (short) 1); BROKER_CONFIG.put("transaction.state.log.min.isr", 1); } public static final EmbeddedKafkaCluster CLUSTER = new EmbeddedKafkaCluster(NUM_BROKERS, BROKER_CONFIG); @BeforeAll public static void startCluster() throws IOException { CLUSTER.start(); } @AfterAll public static void closeCluster() { CLUSTER.stop(); } private static final StringDeserializer STRING_DESERIALIZER = new StringDeserializer(); private static final StringSerializer STRING_SERIALIZER = new StringSerializer(); private static final Serde<String> STRING_SERDE = Serdes.String(); private static final int DEFAULT_TIMEOUT = 100; private static long lastRecordedTimestamp = -2L; @Test public void shouldPreservePartitionTimeOnKafkaStreamRestart(final TestInfo testInfo) { final String appId = "app-" + safeUniqueTestName(testInfo); final String input = "input"; final String outputRaw = "output-raw"; cleanStateBeforeTest(CLUSTER, 2, input, outputRaw); final StreamsBuilder builder = new StreamsBuilder(); builder .stream(input, Consumed.with(STRING_SERDE, STRING_SERDE)) .to(outputRaw); final Properties streamsConfig = new Properties(); streamsConfig.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, MaxTimestampExtractor.class); streamsConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, appId); streamsConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); streamsConfig.put(StreamsConfig.POLL_MS_CONFIG, Integer.toString(DEFAULT_TIMEOUT)); streamsConfig.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, (long) DEFAULT_TIMEOUT); streamsConfig.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath()); KafkaStreams kafkaStreams = getStartedStreams(streamsConfig, builder, true); try { // start sending some records to have partition time committed produceSynchronouslyToPartitionZero( input, Collections.singletonList( new KeyValueTimestamp<>("k3", "v3", NOW + 5000) ) ); verifyOutput( outputRaw, Collections.singletonList( new KeyValueTimestamp<>("k3", "v3", NOW + 5000) ) ); assertThat(lastRecordedTimestamp, is(-1L)); lastRecordedTimestamp = -2L; kafkaStreams.close(); assertThat(kafkaStreams.state(), is(KafkaStreams.State.NOT_RUNNING)); kafkaStreams = getStartedStreams(streamsConfig, builder, true); // resend some records and retrieve the last committed timestamp produceSynchronouslyToPartitionZero( input, Collections.singletonList( new KeyValueTimestamp<>("k5", "v5", NOW + 4999) ) ); verifyOutput( outputRaw, Collections.singletonList( new KeyValueTimestamp<>("k5", "v5", NOW + 4999) ) ); assertThat(lastRecordedTimestamp, is(NOW + 5000L)); } finally { kafkaStreams.close(); quietlyCleanStateAfterTest(CLUSTER, kafkaStreams); } } public static final
ResetPartitionTimeIntegrationTest
java
google__error-prone
core/src/test/java/com/google/errorprone/refaster/UnificationTest.java
{ "start": 4145, "end": 5525 }
class ____ {", " public void example() {", " String foo = \"\";", " foo += 5;", " foo += \"bar\";", " foo += 10;", " }", "}"); expectMatches( template, Match.create(ImmutableMap.of("str", "foo", "n", "5")), Match.create(ImmutableMap.of("str", "foo", "n", "10"))); } @Test public void methodInvocation() { // template : md.digest(str.getBytes()) UType byteArrayType = UArrayType.create(UPrimitiveType.BYTE); ExpressionTemplate template = ExpressionTemplate.create( ImmutableMap.of( "md", UClassType.create("java.security.MessageDigest"), "str", UClassType.create("java.lang.String")), UMethodInvocation.create( UMemberSelect.create( UFreeIdent.create("md"), "digest", UMethodType.create(byteArrayType, byteArrayType)), UMethodInvocation.create( UMemberSelect.create( UFreeIdent.create("str"), "getBytes", UMethodType.create(byteArrayType)))), byteArrayType); compile( "import java.security.MessageDigest;", "import java.security.NoSuchAlgorithmException;", "import java.nio.charset.Charset;", "
CompoundAssignmentExample
java
spring-projects__spring-framework
spring-jms/src/test/java/org/springframework/jms/listener/adapter/MessageContentsDelegate.java
{ "start": 743, "end": 823 }
class ____ usage. * * @author Rick Evans * @author Juergen Hoeller */ public
for
java
elastic__elasticsearch
libs/geo/src/test/java/org/elasticsearch/geometry/PointTests.java
{ "start": 880, "end": 4095 }
class ____ extends BaseGeometryTestCase<Point> { @Override protected Point createTestInstance(boolean hasAlt) { return GeometryTestUtils.randomPoint(hasAlt); } public void testBasicSerialization() throws IOException, ParseException { GeometryValidator validator = GeographyValidator.instance(true); assertEquals("POINT (20.0 10.0)", WellKnownText.toWKT(new Point(20, 10))); assertEquals(new Point(20, 10), WellKnownText.fromWKT(validator, true, "point (20.0 10.0)")); assertEquals("POINT (20.0 10.0 100.0)", WellKnownText.toWKT(new Point(20, 10, 100))); assertEquals(new Point(20, 10, 100), WellKnownText.fromWKT(validator, true, "POINT (20.0 10.0 100.0)")); assertEquals("POINT EMPTY", WellKnownText.toWKT(Point.EMPTY)); assertEquals(Point.EMPTY, WellKnownText.fromWKT(validator, true, "POINT EMPTY)")); } public void testInitValidation() { GeometryValidator validator = GeographyValidator.instance(true); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Point(10, 100))); assertEquals("invalid latitude 100.0; must be between -90.0 and 90.0", ex.getMessage()); ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Point(500, 10))); assertEquals("invalid longitude 500.0; must be between -180.0 and 180.0", ex.getMessage()); ex = expectThrows(IllegalArgumentException.class, () -> StandardValidator.instance(false).validate(new Point(2, 1, 3))); assertEquals("found Z value [3.0] but [ignore_z_value] parameter is [false]", ex.getMessage()); StandardValidator.instance(true).validate(new Point(2, 1, 3)); } public void testWKTValidation() { IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, () -> WellKnownText.fromWKT(GeographyValidator.instance(false), randomBoolean(), "point (20.0 10.0 100.0)") ); assertEquals("found Z value [100.0] but [ignore_z_value] parameter is [false]", ex.getMessage()); } public void testParsePointZorMWithThreeCoordinates() throws IOException, ParseException { GeometryValidator validator = GeographyValidator.instance(true); assertEquals(new Point(20, 10, 100), WellKnownText.fromWKT(validator, true, "POINT Z (20.0 10.0 100.0)")); assertEquals(new Point(20, 10, 100), WellKnownText.fromWKT(validator, true, "POINT M (20.0 10.0 100.0)")); } public void testParsePointZorMWithTwoCoordinatesThrowsException() { GeometryValidator validator = GeographyValidator.instance(true); List<String> pointsWkt = List.of("POINT Z (20.0 10.0)", "POINT M (20.0 10.0)"); for (String point : pointsWkt) { IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> WellKnownText.fromWKT(validator, true, point)); assertEquals(ZorMMustIncludeThreeValuesMsg, ex.getMessage()); } } @Override protected Point mutateInstance(Point instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 } }
PointTests
java
elastic__elasticsearch
plugins/analysis-nori/src/yamlRestTest/java/org/elasticsearch/index/analysis/NoriClientYamlTestSuiteIT.java
{ "start": 874, "end": 1468 }
class ____ extends ESClientYamlSuiteTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local().plugin("analysis-nori").build(); public NoriClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory public static Iterable<Object[]> parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } @Override protected String getTestRestCluster() { return cluster.getHttpAddresses(); } }
NoriClientYamlTestSuiteIT
java
apache__flink
flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/fs/cache/FileCacheEntry.java
{ "start": 2753, "end": 11850 }
enum ____ { /** The cache entry is fully loaded and available for use. */ LOADED, /** The cache entry is in the process of being loaded. */ LOADING, /** The cache entry is invalid and should not be used. */ INVALID, /** The cache entry is in the process of being removed. */ REMOVING, /** The cache entry has been removed and is not available. Can be reopeneded for reading. */ REMOVED, /** The cache entry is closed and no longer available for use permanently. */ CLOSED } /** * Constructs a new FileCacheEntry. This entry is initially in the REMOVED state with no * reference. * * @param fileBasedCache the file-based cache that this entry belongs to * @param originalPath the original path of the file * @param cachePath the path in the cache * @param entrySize the size of the file */ FileCacheEntry( FileBasedCache fileBasedCache, Path originalPath, Path cachePath, long entrySize) { super(0); this.fileBasedCache = fileBasedCache; this.cacheFs = fileBasedCache.cacheFs; this.originalPath = originalPath; this.cachePath = cachePath; this.entrySize = entrySize; this.openedStreams = new LinkedBlockingQueue<>(); this.status = new AtomicReference<>(EntryStatus.REMOVED); LOG.trace("Create new cache entry {}.", cachePath); } /** * Opens a new {@link CachedDataInputStream} from this cache entry. If the cache stream is * available, it will be used; otherwise, the original stream will be used. But the cache stream * will be used once available. The opened stream is added to the queue of opened streams * associated with this cache entry. * * @param originalStream the original input stream to be used if the cache stream is not * available * @return a new {@link CachedDataInputStream} for reading data * @throws IOException if an I/O error occurs while opening the stream */ public CachedDataInputStream open(FSDataInputStream originalStream) throws IOException { LOG.trace("Open new stream for cache entry {}.", cachePath); FSDataInputStream cacheStream = getCacheStream(); if (cacheStream != null) { CachedDataInputStream inputStream = new CachedDataInputStream(fileBasedCache, this, cacheStream, originalStream); openedStreams.add(inputStream); release(); return inputStream; } else { CachedDataInputStream inputStream = new CachedDataInputStream(fileBasedCache, this, originalStream); openedStreams.add(inputStream); return inputStream; } } /** * Retrieves the cached input stream for this cache entry if it is available and the entry is in * a valid state. The method attempts to open the cached stream if the entry is in the LOADED * state and retains a reference to it. * * @return the cached input stream if available, otherwise null * @throws IOException if an I/O error occurs while opening the cached stream */ FSDataInputStream getCacheStream() throws IOException { if (status.get() == EntryStatus.LOADED && tryRetain() > 0) { return cacheFs.open(cachePath); } return null; } /** * Sets the touch function associated with this cache entry. The reason for setting the touch * function is to update the entry order in {@link FileBasedCache}. The touch function is not * initialized in constructor, since the node in LRU should be created before the touch function * is available, and this all happens after this entry is built. */ void setTouchFunction(Runnable touchFunction) { this.touchFunction = touchFunction; } /** * Invokes the touch function associated with this cache entry. This method is called to * indicate that the cache entry has been accessed, and as a result, the entry order in {@link * FileBasedCache} is expected to be updated. */ void touch() { if (touchFunction != null) { touchFunction.run(); } } /** * Loads the file from the original path to the cache path. This method reads the file from the * original path and writes it to the cache path. If the file is successfully loaded, the cache * path is returned. If an I/O error occurs during the loading process, null is returned. * * @see FileBasedCache#movedToFirst(FileCacheEntry) * @return the cache path if the file is successfully loaded, otherwise null. */ Path load() { FSDataInputStream inputStream = null; FSDataOutputStream outputStream = null; try { final byte[] buffer = new byte[READ_BUFFER_SIZE]; inputStream = originalPath.getFileSystem().open(originalPath, READ_BUFFER_SIZE); outputStream = cacheFs.create(cachePath, FileSystem.WriteMode.OVERWRITE); long maxTransferBytes = originalPath.getFileSystem().getFileStatus(originalPath).getLen(); while (maxTransferBytes > 0) { int maxReadBytes = (int) Math.min(maxTransferBytes, READ_BUFFER_SIZE); int readBytes = inputStream.read(buffer, 0, maxReadBytes); if (readBytes == -1) { break; } outputStream.write(buffer, 0, readBytes); maxTransferBytes -= readBytes; } return cachePath; } catch (IOException e) { return null; } finally { try { if (inputStream != null) { inputStream.close(); } if (outputStream != null) { outputStream.close(); } } catch (IOException e) { // ignore } } } /** * Only two scenario that the reference count can reach 0. 1. The cache entry is invalid the * reference count is released. {@see invalidate()} 2. The cache entry is closed and the * reference count is scheduled released. {@see close()} */ @Override protected void referenceCountReachedZero(@Nullable Object o) { if (switchStatus(EntryStatus.INVALID, EntryStatus.REMOVING) || checkStatus(EntryStatus.CLOSED)) { fileBasedCache.removeFile(this); } } /** * Removes the cache file associated with this cache entry. This method deletes the cache file * and closes all opened streams associated with this cache entry. */ synchronized void doRemoveFile() { try { Iterator<CachedDataInputStream> iterator = openedStreams.iterator(); while (iterator.hasNext()) { CachedDataInputStream stream = iterator.next(); if (stream.isClosed()) { iterator.remove(); } else { stream.closeCachedStream(); } } cacheFs.delete(cachePath, false); if (status.get() != EntryStatus.CLOSED) { status.set(FileCacheEntry.EntryStatus.REMOVED); } } catch (Exception e) { LOG.warn("Failed to delete cache entry {}.", cachePath, e); } } // ----------------------------------------------------- // Status change methods, invoked by different threads. // ----------------------------------------------------- synchronized void loaded() { // 0 -> 1 if (checkStatus(EntryStatus.LOADED)) { retain(); } } /** * Invalidate the cache entry if it is LOADED. * * @return true if the cache entry is actually invalidated, false otherwise. */ synchronized boolean invalidate() { if (switchStatus(EntryStatus.LOADED, EntryStatus.INVALID)) { release(); return true; } return false; } synchronized void close() { if (getAndSetStatus(EntryStatus.CLOSED) == EntryStatus.LOADED) { release(); } else { status.set(EntryStatus.CLOSED); } } // ---------------------------- // Status related methods // ---------------------------- boolean switchStatus(EntryStatus from, EntryStatus to) { if (status.compareAndSet(from, to)) { LOG.trace( "Cache {} (for {}) Switch status from {} to {}.", originalPath, cachePath, from, to); return true; } else { return false; } } boolean checkStatus(EntryStatus to) { return status.get() == to; } EntryStatus getAndSetStatus(EntryStatus to) { return status.getAndSet(to); } }
EntryStatus
java
grpc__grpc-java
binder/src/main/java/io/grpc/binder/internal/BlockInputStream.java
{ "start": 1107, "end": 1261 }
class ____ owned by the new instance. * * <p>This also assumes byte arrays are created by the BlockPool class, and should be returned to it * when this
are
java
quarkusio__quarkus
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/contexts/request/propagation/ActivateRequestContextInterceptorTest.java
{ "start": 9558, "end": 9849 }
class ____ { @Produces @RequestScoped FakeSession produceSession() { return new FakeSession(); } void disposeSession(@Disposes FakeSession session) { session.close(); } } @Singleton static
FakeSessionProducer
java
apache__camel
core/camel-support/src/main/java/org/apache/camel/support/RowMapper.java
{ "start": 1225, "end": 1437 }
interface ____<T, R> { /** * Maps a value from type T to type R. * * @param value the input value to be mapped * @return the mapped output value */ R map(T value); }
RowMapper
java
quarkusio__quarkus
core/deployment/src/test/java/io/quarkus/deployment/util/JandexUtilTest.java
{ "start": 8749, "end": 8846 }
class ____ implements InverseMultiple<Integer, Double> { } public static
InverseMultipleImpl
java
apache__camel
dsl/camel-jbang/camel-jbang-plugin-kubernetes/src/main/java/org/apache/camel/dsl/jbang/core/commands/kubernetes/MetadataHelper.java
{ "start": 14111, "end": 14519 }
class ____ extends DefaultModelReifierFactory { @Override public Route createRoute(CamelContext camelContext, Object routeDefinition) { if (routeDefinition instanceof RouteDefinition) { ((RouteDefinition) routeDefinition).autoStartup(false); } return super.createRoute(camelContext, routeDefinition); } } }
AgentModelReifierFactory
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/parser/deser/InnerClassDeser3.java
{ "start": 210, "end": 516 }
class ____ extends TestCase { public void test_for_inner_class() throws Exception { Model model = JSON.parseObject("{\"items\":{\"123\":{\"id\":123}}}", Model.class); assertNotNull(model.items); assertEquals(123, model.items.get("123").id); } public static
InnerClassDeser3
java
spring-projects__spring-data-jpa
spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/QueryInformation.java
{ "start": 2247, "end": 2551 }
enum ____ { /** * SELECT statement. */ SELECT, /** * INSERT statement. */ INSERT, /** * UPDATE statement. */ UPDATE, /** * DELETE statement. */ DELETE, /** * MERGE statement. */ MERGE, /** * Other statement types. */ OTHER } }
StatementType
java
apache__hadoop
hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/UGIResolver.java
{ "start": 1472, "end": 6356 }
class ____ { public static final int USER_STRID_OFFSET = 40; public static final int GROUP_STRID_OFFSET = 16; public static final long USER_GROUP_STRID_MASK = (1 << 24) - 1; /** * Permission is serialized as a 64-bit long. [0:24):[25:48):[48:64) (in Big * Endian). * The first and the second parts are the string ids of the user and * group name, and the last 16 bits are the permission bits. * @param owner name of owner * @param group name of group * @param permission Permission octects * @return FSImage encoding of permissions */ protected final long buildPermissionStatus( String owner, String group, short permission) { long userId = users.get(owner); if (0L != ((~USER_GROUP_STRID_MASK) & userId)) { throw new IllegalArgumentException("UID must fit in 24 bits"); } long groupId = groups.get(group); if (0L != ((~USER_GROUP_STRID_MASK) & groupId)) { throw new IllegalArgumentException("GID must fit in 24 bits"); } return ((userId & USER_GROUP_STRID_MASK) << USER_STRID_OFFSET) | ((groupId & USER_GROUP_STRID_MASK) << GROUP_STRID_OFFSET) | permission; } private final Map<String, Integer> users; private final Map<String, Integer> groups; public UGIResolver() { this(new HashMap<String, Integer>(), new HashMap<String, Integer>()); } UGIResolver(Map<String, Integer> users, Map<String, Integer> groups) { this.users = users; this.groups = groups; } public Map<Integer, String> ugiMap() { Map<Integer, String> ret = new HashMap<>(); for (Map<String, Integer> m : Arrays.asList(users, groups)) { for (Map.Entry<String, Integer> e : m.entrySet()) { String s = ret.put(e.getValue(), e.getKey()); if (s != null) { throw new IllegalStateException("Duplicate mapping: " + e.getValue() + " " + s + " " + e.getKey()); } } } return ret; } public abstract void addUser(String name); protected void addUser(String name, int id) { Integer uid = users.put(name, id); if (uid != null) { throw new IllegalArgumentException("Duplicate mapping: " + name + " " + uid + " " + id); } } public abstract void addGroup(String name); protected void addGroup(String name, int id) { Integer gid = groups.put(name, id); if (gid != null) { throw new IllegalArgumentException("Duplicate mapping: " + name + " " + gid + " " + id); } } protected void resetUGInfo() { users.clear(); groups.clear(); } public long resolve(FileStatus s) { String resolvedGroup = group(s.getGroup()); String resolvedOwner = user(s.getOwner()); FsPermission resolvedPermission = permission(s.getPermission()); return buildPermissionStatus( resolvedOwner, resolvedGroup, resolvedPermission.toShort()); } private long resolve(AclStatus aclStatus) { String resolvedOwner = user(aclStatus.getOwner()); String resolvedGroup = group(aclStatus.getGroup()); FsPermission resolvedPermision = permission(aclStatus.getPermission()); return buildPermissionStatus( resolvedOwner, resolvedGroup, resolvedPermision.toShort()); } protected String user(String s) { return s; } protected String group(String s) { return s; } public FsPermission permission(FsPermission s) { return s; } /** * Get the serialized, local permissions for the external * {@link FileStatus} or {@link AclStatus}. {@code remoteAcl} is used when it * is not null, otherwise {@code remoteStatus} is used. * * @param remoteStatus FileStatus on remote store. * @param remoteAcl AclStatus on external store. * @return serialized, local permissions the FileStatus or AclStatus map to. */ public long getPermissionsProto(FileStatus remoteStatus, AclStatus remoteAcl) { addUGI(remoteStatus, remoteAcl); if (remoteAcl == null) { return resolve(remoteStatus); } else { return resolve(remoteAcl); } } /** * Add the users and groups specified by the given {@link FileStatus} and * {@link AclStatus}. * * @param remoteStatus * @param remoteAcl */ private void addUGI(FileStatus remoteStatus, AclStatus remoteAcl) { if (remoteAcl != null) { addUser(remoteAcl.getOwner()); addGroup(remoteAcl.getGroup()); for (AclEntry entry : remoteAcl.getEntries()) { // add the users and groups in this acl entry to ugi String name = entry.getName(); if (name != null) { if (entry.getType() == AclEntryType.USER) { addUser(name); } else if (entry.getType() == AclEntryType.GROUP) { addGroup(name); } } } } else { addUser(remoteStatus.getOwner()); addGroup(remoteStatus.getGroup()); } } }
UGIResolver
java
ReactiveX__RxJava
src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableSwitchTest.java
{ "start": 1495, "end": 48765 }
class ____ extends RxJavaTest { private TestScheduler scheduler; private Scheduler.Worker innerScheduler; private Subscriber<String> subscriber; @Before public void before() { scheduler = new TestScheduler(); innerScheduler = scheduler.createWorker(); subscriber = TestHelper.mockSubscriber(); } @Test public void switchWhenOuterCompleteBeforeInner() { Flowable<Flowable<String>> source = Flowable.unsafeCreate(new Publisher<Flowable<String>>() { @Override public void subscribe(Subscriber<? super Flowable<String>> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 50, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 70, "one"); publishNext(subscriber, 100, "two"); publishCompleted(subscriber, 200); } })); publishCompleted(subscriber, 60); } }); Flowable<String> sampled = Flowable.switchOnNext(source); sampled.subscribe(subscriber); InOrder inOrder = inOrder(subscriber); scheduler.advanceTimeTo(350, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, times(2)).onNext(anyString()); inOrder.verify(subscriber, times(1)).onComplete(); } @Test public void switchWhenInnerCompleteBeforeOuter() { Flowable<Flowable<String>> source = Flowable.unsafeCreate(new Publisher<Flowable<String>>() { @Override public void subscribe(Subscriber<? super Flowable<String>> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 10, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 0, "one"); publishNext(subscriber, 10, "two"); publishCompleted(subscriber, 20); } })); publishNext(subscriber, 100, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 0, "three"); publishNext(subscriber, 10, "four"); publishCompleted(subscriber, 20); } })); publishCompleted(subscriber, 200); } }); Flowable<String> sampled = Flowable.switchOnNext(source); sampled.subscribe(subscriber); InOrder inOrder = inOrder(subscriber); scheduler.advanceTimeTo(150, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, never()).onComplete(); inOrder.verify(subscriber, times(1)).onNext("one"); inOrder.verify(subscriber, times(1)).onNext("two"); inOrder.verify(subscriber, times(1)).onNext("three"); inOrder.verify(subscriber, times(1)).onNext("four"); scheduler.advanceTimeTo(250, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, never()).onNext(anyString()); inOrder.verify(subscriber, times(1)).onComplete(); } @Test public void switchWithComplete() { Flowable<Flowable<String>> source = Flowable.unsafeCreate(new Publisher<Flowable<String>>() { @Override public void subscribe(Subscriber<? super Flowable<String>> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 50, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(final Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 60, "one"); publishNext(subscriber, 100, "two"); } })); publishNext(subscriber, 200, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(final Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 0, "three"); publishNext(subscriber, 100, "four"); } })); publishCompleted(subscriber, 250); } }); Flowable<String> sampled = Flowable.switchOnNext(source); sampled.subscribe(subscriber); InOrder inOrder = inOrder(subscriber); scheduler.advanceTimeTo(90, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, never()).onNext(anyString()); verify(subscriber, never()).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(125, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, times(1)).onNext("one"); verify(subscriber, never()).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(175, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, times(1)).onNext("two"); verify(subscriber, never()).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(225, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, times(1)).onNext("three"); verify(subscriber, never()).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(350, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, times(1)).onNext("four"); verify(subscriber, never()).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); } @Test public void switchWithError() { Flowable<Flowable<String>> source = Flowable.unsafeCreate(new Publisher<Flowable<String>>() { @Override public void subscribe(Subscriber<? super Flowable<String>> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 50, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(final Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 50, "one"); publishNext(subscriber, 100, "two"); } })); publishNext(subscriber, 200, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 0, "three"); publishNext(subscriber, 100, "four"); } })); publishError(subscriber, 250, new TestException()); } }); Flowable<String> sampled = Flowable.switchOnNext(source); sampled.subscribe(subscriber); InOrder inOrder = inOrder(subscriber); scheduler.advanceTimeTo(90, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, never()).onNext(anyString()); verify(subscriber, never()).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(125, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, times(1)).onNext("one"); verify(subscriber, never()).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(175, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, times(1)).onNext("two"); verify(subscriber, never()).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(225, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, times(1)).onNext("three"); verify(subscriber, never()).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(350, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, never()).onNext(anyString()); verify(subscriber, never()).onComplete(); verify(subscriber, times(1)).onError(any(TestException.class)); } @Test public void switchWithSubsequenceComplete() { Flowable<Flowable<String>> source = Flowable.unsafeCreate(new Publisher<Flowable<String>>() { @Override public void subscribe(Subscriber<? super Flowable<String>> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 50, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 50, "one"); publishNext(subscriber, 100, "two"); } })); publishNext(subscriber, 130, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishCompleted(subscriber, 0); } })); publishNext(subscriber, 150, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 50, "three"); } })); } }); Flowable<String> sampled = Flowable.switchOnNext(source); sampled.subscribe(subscriber); InOrder inOrder = inOrder(subscriber); scheduler.advanceTimeTo(90, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, never()).onNext(anyString()); verify(subscriber, never()).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(125, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, times(1)).onNext("one"); verify(subscriber, never()).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(250, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, times(1)).onNext("three"); verify(subscriber, never()).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); } @Test public void switchWithSubsequenceError() { Flowable<Flowable<String>> source = Flowable.unsafeCreate(new Publisher<Flowable<String>>() { @Override public void subscribe(Subscriber<? super Flowable<String>> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 50, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 50, "one"); publishNext(subscriber, 100, "two"); } })); publishNext(subscriber, 130, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishError(subscriber, 0, new TestException()); } })); publishNext(subscriber, 150, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 50, "three"); } })); } }); Flowable<String> sampled = Flowable.switchOnNext(source); sampled.subscribe(subscriber); InOrder inOrder = inOrder(subscriber); scheduler.advanceTimeTo(90, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, never()).onNext(anyString()); verify(subscriber, never()).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(125, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, times(1)).onNext("one"); verify(subscriber, never()).onComplete(); verify(subscriber, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(250, TimeUnit.MILLISECONDS); inOrder.verify(subscriber, never()).onNext("three"); verify(subscriber, never()).onComplete(); verify(subscriber, times(1)).onError(any(TestException.class)); } private <T> void publishCompleted(final Subscriber<T> subscriber, long delay) { innerScheduler.schedule(new Runnable() { @Override public void run() { subscriber.onComplete(); } }, delay, TimeUnit.MILLISECONDS); } private <T> void publishError(final Subscriber<T> subscriber, long delay, final Throwable error) { innerScheduler.schedule(new Runnable() { @Override public void run() { subscriber.onError(error); } }, delay, TimeUnit.MILLISECONDS); } private <T> void publishNext(final Subscriber<T> subscriber, long delay, final T value) { innerScheduler.schedule(new Runnable() { @Override public void run() { subscriber.onNext(value); } }, delay, TimeUnit.MILLISECONDS); } @Test public void switchIssue737() { // https://github.com/ReactiveX/RxJava/issues/737 Flowable<Flowable<String>> source = Flowable.unsafeCreate(new Publisher<Flowable<String>>() { @Override public void subscribe(Subscriber<? super Flowable<String>> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 0, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 10, "1-one"); publishNext(subscriber, 20, "1-two"); // The following events will be ignored publishNext(subscriber, 30, "1-three"); publishCompleted(subscriber, 40); } })); publishNext(subscriber, 25, Flowable.unsafeCreate(new Publisher<String>() { @Override public void subscribe(Subscriber<? super String> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); publishNext(subscriber, 10, "2-one"); publishNext(subscriber, 20, "2-two"); publishNext(subscriber, 30, "2-three"); publishCompleted(subscriber, 40); } })); publishCompleted(subscriber, 30); } }); Flowable<String> sampled = Flowable.switchOnNext(source); sampled.subscribe(subscriber); scheduler.advanceTimeTo(1000, TimeUnit.MILLISECONDS); InOrder inOrder = inOrder(subscriber); inOrder.verify(subscriber, times(1)).onNext("1-one"); inOrder.verify(subscriber, times(1)).onNext("1-two"); inOrder.verify(subscriber, times(1)).onNext("2-one"); inOrder.verify(subscriber, times(1)).onNext("2-two"); inOrder.verify(subscriber, times(1)).onNext("2-three"); inOrder.verify(subscriber, times(1)).onComplete(); inOrder.verifyNoMoreInteractions(); } @Test public void backpressure() { PublishProcessor<String> o1 = PublishProcessor.create(); PublishProcessor<String> o2 = PublishProcessor.create(); PublishProcessor<String> o3 = PublishProcessor.create(); PublishProcessor<PublishProcessor<String>> o = PublishProcessor.create(); publishNext(o, 0, o1); publishNext(o, 5, o2); publishNext(o, 10, o3); publishCompleted(o, 15); for (int i = 0; i < 10; i++) { publishNext(o1, i * 5, "a" + (i + 1)); publishNext(o2, 5 + i * 5, "b" + (i + 1)); publishNext(o3, 10 + i * 5, "c" + (i + 1)); } publishCompleted(o1, 45); publishCompleted(o2, 50); publishCompleted(o3, 55); final TestSubscriberEx<String> testSubscriber = new TestSubscriberEx<>(); Flowable.switchOnNext(o).subscribe(new DefaultSubscriber<String>() { private int requested; @Override public void onStart() { requested = 3; request(3); testSubscriber.onSubscribe(new BooleanSubscription()); } @Override public void onComplete() { testSubscriber.onComplete(); } @Override public void onError(Throwable e) { testSubscriber.onError(e); } @Override public void onNext(String s) { testSubscriber.onNext(s); requested--; if (requested == 0) { requested = 3; request(3); } } }); scheduler.advanceTimeBy(1, TimeUnit.SECONDS); testSubscriber.assertValues("a1", "b1", "c1", "c2", "c3", "c4", "c5", "c6", "c7", "c8", "c9", "c10"); testSubscriber.assertNoErrors(); testSubscriber.assertTerminated(); } @Test public void unsubscribe() { final AtomicBoolean isUnsubscribed = new AtomicBoolean(); Flowable.switchOnNext( Flowable.unsafeCreate(new Publisher<Flowable<Integer>>() { @Override public void subscribe(final Subscriber<? super Flowable<Integer>> subscriber) { BooleanSubscription bs = new BooleanSubscription(); subscriber.onSubscribe(bs); subscriber.onNext(Flowable.just(1)); isUnsubscribed.set(bs.isCancelled()); } }) ).take(1).subscribe(); assertTrue("Switch doesn't propagate 'unsubscribe'", isUnsubscribed.get()); } /** The upstream producer hijacked the switch producer stopping the requests aimed at the inner observables. */ @Test public void issue2654() { Flowable<String> oneItem = Flowable.just("Hello").mergeWith(Flowable.<String>never()); Flowable<String> src = oneItem.switchMap(new Function<String, Flowable<String>>() { @Override public Flowable<String> apply(final String s) { return Flowable.just(s) .mergeWith(Flowable.interval(10, TimeUnit.MILLISECONDS) .map(new Function<Long, String>() { @Override public String apply(Long i) { return s + " " + i; } })).take(250); } }) .share() ; TestSubscriberEx<String> ts = new TestSubscriberEx<String>() { @Override public void onNext(String t) { super.onNext(t); if (values().size() == 250) { onComplete(); dispose(); } } }; src.subscribe(ts); ts.awaitDone(10, TimeUnit.SECONDS); System.out.println("> testIssue2654: " + ts.values().size()); ts.assertTerminated(); ts.assertNoErrors(); Assert.assertEquals(250, ts.values().size()); } @Test public void initialRequestsAreAdditive() { TestSubscriber<Long> ts = new TestSubscriber<>(0L); Flowable.switchOnNext( Flowable.interval(100, TimeUnit.MILLISECONDS) .map( new Function<Long, Flowable<Long>>() { @Override public Flowable<Long> apply(Long t) { return Flowable.just(1L, 2L, 3L); } } ).take(3)) .subscribe(ts); ts.request(Long.MAX_VALUE - 100); ts.request(1); ts.awaitDone(5, TimeUnit.SECONDS); } @Test public void initialRequestsDontOverflow() { TestSubscriber<Long> ts = new TestSubscriber<>(0L); Flowable.switchOnNext( Flowable.interval(100, TimeUnit.MILLISECONDS) .map(new Function<Long, Flowable<Long>>() { @Override public Flowable<Long> apply(Long t) { return Flowable.fromIterable(Arrays.asList(1L, 2L, 3L)).hide(); } }).take(3)).subscribe(ts); ts.request(Long.MAX_VALUE - 1); ts.request(2); ts.awaitDone(5, TimeUnit.SECONDS); assertTrue(ts.values().size() > 0); } @Test public void secondaryRequestsDontOverflow() throws InterruptedException { TestSubscriber<Long> ts = new TestSubscriber<>(0L); Flowable.switchOnNext( Flowable.interval(100, TimeUnit.MILLISECONDS) .map(new Function<Long, Flowable<Long>>() { @Override public Flowable<Long> apply(Long t) { return Flowable.fromIterable(Arrays.asList(1L, 2L, 3L)).hide(); } }).take(3)).subscribe(ts); ts.request(1); //we will miss two of the first observable Thread.sleep(250); ts.request(Long.MAX_VALUE - 1); ts.request(Long.MAX_VALUE - 1); ts.awaitDone(5, TimeUnit.SECONDS); ts.assertValueCount(7); } @Test public void delayErrors() { PublishProcessor<Publisher<Integer>> source = PublishProcessor.create(); TestSubscriberEx<Integer> ts = source.switchMapDelayError(Functions.<Publisher<Integer>>identity()) .to(TestHelper.<Integer>testConsumer()); ts.assertNoValues() .assertNoErrors() .assertNotComplete(); source.onNext(Flowable.just(1)); source.onNext(Flowable.<Integer>error(new TestException("Forced failure 1"))); source.onNext(Flowable.just(2, 3, 4)); source.onNext(Flowable.<Integer>error(new TestException("Forced failure 2"))); source.onNext(Flowable.just(5)); source.onError(new TestException("Forced failure 3")); ts.assertValues(1, 2, 3, 4, 5) .assertNotComplete() .assertError(CompositeException.class); List<Throwable> errors = ExceptionHelper.flatten(ts.errors().get(0)); TestHelper.assertError(errors, 0, TestException.class, "Forced failure 1"); TestHelper.assertError(errors, 1, TestException.class, "Forced failure 2"); TestHelper.assertError(errors, 2, TestException.class, "Forced failure 3"); } @Test public void switchOnNextPrefetch() { final List<Integer> list = new ArrayList<>(); Flowable<Integer> source = Flowable.range(1, 10).hide().doOnNext(new Consumer<Integer>() { @Override public void accept(Integer v) throws Exception { list.add(v); } }); Flowable.switchOnNext(Flowable.just(source).hide(), 2) .test(1); assertEquals(Arrays.asList(1, 2, 3), list); } @Test public void switchOnNextDelayError() { final List<Integer> list = new ArrayList<>(); Flowable<Integer> source = Flowable.range(1, 10).hide().doOnNext(new Consumer<Integer>() { @Override public void accept(Integer v) throws Exception { list.add(v); } }); Flowable.switchOnNextDelayError(Flowable.just(source).hide()) .test(1); assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), list); } @Test public void switchOnNextDelayErrorPrefetch() { final List<Integer> list = new ArrayList<>(); Flowable<Integer> source = Flowable.range(1, 10).hide().doOnNext(new Consumer<Integer>() { @Override public void accept(Integer v) throws Exception { list.add(v); } }); Flowable.switchOnNextDelayError(Flowable.just(source).hide(), 2) .test(1); assertEquals(Arrays.asList(1, 2, 3), list); } @Test public void switchOnNextDelayErrorWithError() { PublishProcessor<Flowable<Integer>> pp = PublishProcessor.create(); TestSubscriber<Integer> ts = Flowable.switchOnNextDelayError(pp).test(); pp.onNext(Flowable.just(1)); pp.onNext(Flowable.<Integer>error(new TestException())); pp.onNext(Flowable.range(2, 4)); pp.onComplete(); ts.assertFailure(TestException.class, 1, 2, 3, 4, 5); } @Test public void switchOnNextDelayErrorBufferSize() { PublishProcessor<Flowable<Integer>> pp = PublishProcessor.create(); TestSubscriber<Integer> ts = Flowable.switchOnNextDelayError(pp, 2).test(); pp.onNext(Flowable.just(1)); pp.onNext(Flowable.range(2, 4)); pp.onComplete(); ts.assertResult(1, 2, 3, 4, 5); } @Test public void switchMapDelayErrorEmptySource() { assertSame(Flowable.empty(), Flowable.<Object>empty() .switchMapDelayError(new Function<Object, Publisher<Integer>>() { @Override public Publisher<Integer> apply(Object v) throws Exception { return Flowable.just(1); } }, 16)); } @Test public void switchMapDelayErrorJustSource() { Flowable.just(0) .switchMapDelayError(new Function<Object, Publisher<Integer>>() { @Override public Publisher<Integer> apply(Object v) throws Exception { return Flowable.just(1); } }, 16) .test() .assertResult(1); } @Test public void switchMapErrorEmptySource() { assertSame(Flowable.empty(), Flowable.<Object>empty() .switchMap(new Function<Object, Publisher<Integer>>() { @Override public Publisher<Integer> apply(Object v) throws Exception { return Flowable.just(1); } }, 16)); } @Test public void switchMapJustSource() { Flowable.just(0) .switchMap(new Function<Object, Publisher<Integer>>() { @Override public Publisher<Integer> apply(Object v) throws Exception { return Flowable.just(1); } }, 16) .test() .assertResult(1); } @Test public void switchMapInnerCancelled() { PublishProcessor<Integer> pp = PublishProcessor.create(); TestSubscriber<Integer> ts = Flowable.just(1) .switchMap(Functions.justFunction(pp)) .test(); assertTrue(pp.hasSubscribers()); ts.cancel(); assertFalse(pp.hasSubscribers()); } @Test public void dispose() { TestHelper.checkDisposed(Flowable.switchOnNext( Flowable.just(Flowable.just(1)).hide())); } @Test public void nextSourceErrorRace() { for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) { List<Throwable> errors = TestHelper.trackPluginErrors(); try { final PublishProcessor<Integer> pp1 = PublishProcessor.create(); final PublishProcessor<Integer> pp2 = PublishProcessor.create(); pp1.switchMap(new Function<Integer, Flowable<Integer>>() { @Override public Flowable<Integer> apply(Integer v) throws Exception { if (v == 1) { return pp2; } return Flowable.never(); } }) .test(); Runnable r1 = new Runnable() { @Override public void run() { pp1.onNext(2); } }; final TestException ex = new TestException(); Runnable r2 = new Runnable() { @Override public void run() { pp2.onError(ex); } }; TestHelper.race(r1, r2); for (Throwable e : errors) { assertTrue(e.toString(), e instanceof TestException); } } finally { RxJavaPlugins.reset(); } } } @Test public void outerInnerErrorRace() { for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) { List<Throwable> errors = TestHelper.trackPluginErrors(); try { final PublishProcessor<Integer> pp1 = PublishProcessor.create(); final PublishProcessor<Integer> pp2 = PublishProcessor.create(); pp1.switchMap(new Function<Integer, Flowable<Integer>>() { @Override public Flowable<Integer> apply(Integer v) throws Exception { if (v == 1) { return pp2; } return Flowable.never(); } }) .test(); final TestException ex1 = new TestException(); Runnable r1 = new Runnable() { @Override public void run() { pp1.onError(ex1); } }; final TestException ex2 = new TestException(); Runnable r2 = new Runnable() { @Override public void run() { pp2.onError(ex2); } }; TestHelper.race(r1, r2); for (Throwable e : errors) { assertTrue(e.toString(), e instanceof TestException); } } finally { RxJavaPlugins.reset(); } } } @Test public void nextCancelRace() { for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) { final PublishProcessor<Integer> pp1 = PublishProcessor.create(); final TestSubscriber<Integer> ts = pp1.switchMap(new Function<Integer, Flowable<Integer>>() { @Override public Flowable<Integer> apply(Integer v) throws Exception { return Flowable.never(); } }) .test(); Runnable r1 = new Runnable() { @Override public void run() { pp1.onNext(2); } }; Runnable r2 = new Runnable() { @Override public void run() { ts.cancel(); } }; TestHelper.race(r1, r2); } } @Test public void mapperThrows() { Flowable.just(1).hide() .switchMap(new Function<Integer, Flowable<Object>>() { @Override public Flowable<Object> apply(Integer v) throws Exception { throw new TestException(); } }) .test() .assertFailure(TestException.class); } @Test public void badMainSource() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { new Flowable<Integer>() { @Override protected void subscribeActual(Subscriber<? super Integer> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); subscriber.onComplete(); subscriber.onError(new TestException()); subscriber.onComplete(); } } .switchMap(Functions.justFunction(Flowable.never())) .test() .assertResult(); TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } @Test public void emptyInner() { Flowable.range(1, 5) .switchMap(Functions.justFunction(Flowable.empty())) .test() .assertResult(); } @Test public void justInner() { Flowable.range(1, 5) .switchMap(Functions.justFunction(Flowable.just(1))) .test() .assertResult(1, 1, 1, 1, 1); } @Test public void badInnerSource() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { Flowable.just(1).hide() .switchMap(Functions.justFunction(new Flowable<Integer>() { @Override protected void subscribeActual(Subscriber<? super Integer> subscriber) { subscriber.onSubscribe(new BooleanSubscription()); subscriber.onError(new TestException()); subscriber.onComplete(); subscriber.onError(new TestException()); subscriber.onComplete(); } })) .test() .assertFailure(TestException.class); TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } @Test public void innerCompletesReentrant() { final PublishProcessor<Integer> pp = PublishProcessor.create(); TestSubscriber<Integer> ts = new TestSubscriber<Integer>() { @Override public void onNext(Integer t) { super.onNext(t); pp.onComplete(); } }; Flowable.just(1).hide() .switchMap(Functions.justFunction(pp)) .subscribe(ts); pp.onNext(1); ts.assertResult(1); } @Test public void innerErrorsReentrant() { final PublishProcessor<Integer> pp = PublishProcessor.create(); TestSubscriber<Integer> ts = new TestSubscriber<Integer>() { @Override public void onNext(Integer t) { super.onNext(t); pp.onError(new TestException()); } }; Flowable.just(1).hide() .switchMap(Functions.justFunction(pp)) .subscribe(ts); pp.onNext(1); ts.assertFailure(TestException.class, 1); } @Test public void scalarMap() { Flowable.switchOnNext(Flowable.just(Flowable.just(1))) .test() .assertResult(1); } @Test public void scalarMapDelayError() { Flowable.switchOnNextDelayError(Flowable.just(Flowable.just(1))) .test() .assertResult(1); } @Test public void scalarXMap() { Flowable.fromCallable(Functions.justCallable(1)) .switchMap(Functions.justFunction(Flowable.just(1))) .test() .assertResult(1); } @Test public void badSource() { TestHelper.checkBadSourceFlowable(new Function<Flowable<Integer>, Object>() { @Override public Object apply(Flowable<Integer> f) throws Exception { return f.switchMap(Functions.justFunction(Flowable.just(1))); } }, false, 1, 1, 1); } @Test public void innerOverflow() { Flowable.just(1).hide() .switchMap(Functions.justFunction(new Flowable<Integer>() { @Override protected void subscribeActual(Subscriber<? super Integer> s) { s.onSubscribe(new BooleanSubscription()); for (int i = 0; i < 10; i++) { s.onNext(i); } } }), 8) .test(1L) .assertFailure(QueueOverflowException.class, 0); } @Test public void drainCancelRace() { for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) { final TestSubscriber<Integer> ts = new TestSubscriber<>(); final PublishProcessor<Integer> pp = PublishProcessor.create(); Flowable.just(1).hide() .switchMap(Functions.justFunction(pp)) .subscribe(ts); Runnable r1 = new Runnable() { @Override public void run() { ts.cancel(); } }; Runnable r2 = new Runnable() { @Override public void run() { pp.onNext(1); } }; TestHelper.race(r1, r2); } } @Test public void fusedInnerCrash() { Flowable.just(1).hide() .switchMap(Functions.justFunction(Flowable.just(1) .map(new Function<Integer, Object>() { @Override public Object apply(Integer v) throws Exception { throw new TestException(); } }) .compose(TestHelper.<Object>flowableStripBoundary()) ) ) .test() .assertFailure(TestException.class); } @Test public void innerCancelledOnMainError() { final PublishProcessor<Integer> main = PublishProcessor.create(); final PublishProcessor<Integer> inner = PublishProcessor.create(); TestSubscriber<Integer> ts = main.switchMap(Functions.justFunction(inner)) .test(); assertTrue(main.hasSubscribers()); main.onNext(1); assertTrue(inner.hasSubscribers()); main.onError(new TestException()); assertFalse(inner.hasSubscribers()); ts.assertFailure(TestException.class); } @Test public void fusedBoundary() { String thread = Thread.currentThread().getName(); Flowable.range(1, 10000) .switchMap(new Function<Integer, Flowable<Object>>() { @Override public Flowable<Object> apply(Integer v) throws Exception { return Flowable.just(2).hide() .observeOn(Schedulers.single()) .map(new Function<Integer, Object>() { @Override public Object apply(Integer w) throws Exception { return Thread.currentThread().getName(); } }); } }) .to(TestHelper.<Object>testConsumer()) .awaitDone(5, TimeUnit.SECONDS) .assertNever(thread) .assertNoErrors() .assertComplete(); } @Test public void undeliverableUponCancel() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { final TestSubscriberEx<Integer> ts = new TestSubscriberEx<>(); Flowable.just(1) .map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) throws Throwable { ts.cancel(); throw new TestException(); } }) .switchMap(new Function<Integer, Publisher<Integer>>() { @Override public Publisher<Integer> apply(Integer v) throws Throwable { return Flowable.just(v).hide(); } }) .subscribe(ts); ts.assertEmpty(); TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } @Test public void switchMapFusedIterable() { Flowable.range(1, 2) .switchMap(new Function<Integer, Publisher<Integer>>() { @Override public Publisher<Integer> apply(Integer v) throws Throwable { return Flowable.fromIterable(Arrays.asList(v * 10)); } }) .test() .assertResult(10, 20); } @Test public void switchMapHiddenIterable() { Flowable.range(1, 2) .switchMap(new Function<Integer, Publisher<Integer>>() { @Override public Publisher<Integer> apply(Integer v) throws Throwable { return Flowable.fromIterable(Arrays.asList(v * 10)).hide(); } }) .test() .assertResult(10, 20); } @Test public void asyncFusedInner() { Flowable.just(1) .hide() .switchMap(v -> Flowable.fromCallable(() -> 1)) .test() .assertResult(1); } @Test public void innerIgnoresCancelAndErrors() throws Throwable { TestHelper.withErrorTracking(errors -> { PublishProcessor<Integer> pp = PublishProcessor.create(); TestSubscriber<Object> ts = pp .switchMap(v -> { if (v == 1) { return Flowable.unsafeCreate(s -> { s.onSubscribe(new BooleanSubscription()); pp.onNext(2); s.onError(new TestException()); }); } return Flowable.never(); }) .test(); pp.onNext(1); ts.assertEmpty(); TestHelper.assertUndeliverable(errors, 0, TestException.class); }); } @Test public void doubleOnSubscribe() { TestHelper.checkDoubleOnSubscribeFlowable(f -> f.switchMap(v -> Flowable.never())); } @Test public void badRequest() { TestHelper.assertBadRequestReported(Flowable.never().switchMap(v -> Flowable.never())); } @Test public void innerFailed() { BehaviorProcessor.createDefault(Flowable.error(new TestException())) .switchMap(v -> v) .test() .assertFailure(TestException.class) ; } @Test public void innerCompleted() { BehaviorProcessor.createDefault(Flowable.empty().hide()) .switchMap(v -> v) .test() .assertEmpty() ; } @Test public void innerCompletedBackpressureBoundary() { PublishProcessor<Integer> pp = PublishProcessor.create(); TestSubscriber<Integer> ts = BehaviorProcessor.createDefault(pp) .onBackpressureBuffer() .switchMap(v -> v) .test(1L) ; ts.assertEmpty(); pp.onNext(1); pp.onComplete(); ts.assertValuesOnly(1); } @Test public void innerCompletedDelayError() { BehaviorProcessor.createDefault(Flowable.empty().hide()) .switchMapDelayError(v -> v) .test() .assertEmpty() ; } @Test public void innerCompletedBackpressureBoundaryDelayError() { PublishProcessor<Integer> pp = PublishProcessor.create(); TestSubscriber<Integer> ts = BehaviorProcessor.createDefault(pp) .onBackpressureBuffer() .switchMapDelayError(v -> v) .test(1L) ; ts.assertEmpty(); pp.onNext(1); pp.onComplete(); ts.assertValuesOnly(1); } @Test public void cancellationShouldTriggerInnerCancellationRace() throws Throwable { AtomicInteger outer = new AtomicInteger(); AtomicInteger inner = new AtomicInteger(); int n = 10_000; for (int i = 0; i < n; i++) { Flowable.<Integer>create(it -> { it.onNext(0); }, BackpressureStrategy.MISSING) .switchMap(v -> createFlowable(inner)) .observeOn(Schedulers.computation()) .doFinally(() -> { outer.incrementAndGet(); }) .take(1) .blockingSubscribe(v -> { }, Throwable::printStackTrace); } Thread.sleep(100); assertEquals(inner.get(), outer.get()); assertEquals(n, inner.get()); } Flowable<Integer> createFlowable(AtomicInteger inner) { return Flowable.<Integer>unsafeCreate(s -> { SerializedSubscriber<Integer> it = new SerializedSubscriber<>(s); it.onSubscribe(new BooleanSubscription()); Schedulers.io().scheduleDirect(() -> { it.onNext(1); }, 0, TimeUnit.MILLISECONDS); Schedulers.io().scheduleDirect(() -> { it.onNext(2); }, 0, TimeUnit.MILLISECONDS); }) .doFinally(() -> { inner.incrementAndGet(); }); } @Test public void innerOnSubscribeOuterCancelRace() { TestSubscriber<Integer> ts = new TestSubscriber<Integer>(); Flowable.just(1) .hide() .switchMap(v -> Flowable.just(1) .doOnSubscribe(d -> ts.cancel()) .scan(1, (a, b) -> a) ) .subscribe(ts); ts.assertEmpty(); } }
FlowableSwitchTest
java
netty__netty
codec-redis/src/test/java/io/netty/handler/codec/redis/RedisEncoderTest.java
{ "start": 1293, "end": 6448 }
class ____ { private EmbeddedChannel channel; @BeforeEach public void setup() throws Exception { channel = new EmbeddedChannel(new RedisEncoder()); } @AfterEach public void teardown() throws Exception { assertFalse(channel.finish()); } @Test public void shouldEncodeInlineCommand() { RedisMessage msg = new InlineCommandRedisMessage("ping"); boolean result = channel.writeOutbound(msg); assertTrue(result); ByteBuf written = readAll(channel); assertArrayEquals(bytesOf("ping\r\n"), bytesOf(written)); written.release(); } @Test public void shouldEncodeSimpleString() { RedisMessage msg = new SimpleStringRedisMessage("simple"); boolean result = channel.writeOutbound(msg); assertTrue(result); ByteBuf written = readAll(channel); assertArrayEquals(bytesOf("+simple\r\n"), bytesOf(written)); written.release(); } @Test public void shouldEncodeError() { RedisMessage msg = new ErrorRedisMessage("error1"); boolean result = channel.writeOutbound(msg); assertTrue(result); ByteBuf written = readAll(channel); assertArrayEquals(bytesOf("-error1\r\n"), bytesOf(written)); written.release(); } @Test public void shouldEncodeInteger() { RedisMessage msg = new IntegerRedisMessage(1234L); boolean result = channel.writeOutbound(msg); assertTrue(result); ByteBuf written = readAll(channel); assertArrayEquals(bytesOf(":1234\r\n"), bytesOf(written)); written.release(); } @Test public void shouldEncodeBulkStringContent() { RedisMessage header = new BulkStringHeaderRedisMessage(16); RedisMessage body1 = new DefaultBulkStringRedisContent(byteBufOf("bulk\nstr").retain()); RedisMessage body2 = new DefaultLastBulkStringRedisContent(byteBufOf("ing\ntest").retain()); assertTrue(channel.writeOutbound(header)); assertTrue(channel.writeOutbound(body1)); assertTrue(channel.writeOutbound(body2)); ByteBuf written = readAll(channel); assertArrayEquals(bytesOf("$16\r\nbulk\nstring\ntest\r\n"), bytesOf(written)); written.release(); } @Test public void shouldEncodeFullBulkString() { ByteBuf bulkString = byteBufOf("bulk\nstring\ntest").retain(); int length = bulkString.readableBytes(); RedisMessage msg = new FullBulkStringRedisMessage(bulkString); boolean result = channel.writeOutbound(msg); assertTrue(result); ByteBuf written = readAll(channel); assertArrayEquals(bytesOf("$" + length + "\r\nbulk\nstring\ntest\r\n"), bytesOf(written)); written.release(); } @Test public void shouldEncodeSimpleArray() { List<RedisMessage> children = new ArrayList<RedisMessage>(); children.add(new FullBulkStringRedisMessage(byteBufOf("foo").retain())); children.add(new FullBulkStringRedisMessage(byteBufOf("bar").retain())); RedisMessage msg = new ArrayRedisMessage(children); boolean result = channel.writeOutbound(msg); assertTrue(result); ByteBuf written = readAll(channel); assertArrayEquals(bytesOf("*2\r\n$3\r\nfoo\r\n$3\r\nbar\r\n"), bytesOf(written)); written.release(); } @Test public void shouldEncodeNullArray() { RedisMessage msg = ArrayRedisMessage.NULL_INSTANCE; boolean result = channel.writeOutbound(msg); assertTrue(result); ByteBuf written = readAll(channel); assertArrayEquals(bytesOf("*-1\r\n"), bytesOf(written)); written.release(); } @Test public void shouldEncodeEmptyArray() { RedisMessage msg = ArrayRedisMessage.EMPTY_INSTANCE; boolean result = channel.writeOutbound(msg); assertTrue(result); ByteBuf written = readAll(channel); assertArrayEquals(bytesOf("*0\r\n"), bytesOf(written)); written.release(); } @Test public void shouldEncodeNestedArray() { List<RedisMessage> grandChildren = new ArrayList<RedisMessage>(); grandChildren.add(new FullBulkStringRedisMessage(byteBufOf("bar"))); grandChildren.add(new IntegerRedisMessage(-1234L)); List<RedisMessage> children = new ArrayList<RedisMessage>(); children.add(new SimpleStringRedisMessage("foo")); children.add(new ArrayRedisMessage(grandChildren)); RedisMessage msg = new ArrayRedisMessage(children); boolean result = channel.writeOutbound(msg); assertTrue(result); ByteBuf written = readAll(channel); assertArrayEquals(bytesOf("*2\r\n+foo\r\n*2\r\n$3\r\nbar\r\n:-1234\r\n"), bytesOf(written)); written.release(); } private static ByteBuf readAll(EmbeddedChannel channel) { ByteBuf buf = Unpooled.buffer(); ByteBuf read; while ((read = channel.readOutbound()) != null) { buf.writeBytes(read); read.release(); } return buf; } }
RedisEncoderTest
java
apache__kafka
server-common/src/main/java/org/apache/kafka/server/metrics/TimeRatio.java
{ "start": 1352, "end": 3817 }
class ____ implements MeasurableStat { private long intervalStartTimestampMs = -1; private long lastRecordedTimestampMs = -1; private double totalRecordedDurationMs = 0; private final double defaultRatio; public TimeRatio(double defaultRatio) { if (defaultRatio < 0.0 || defaultRatio > 1.0) { throw new IllegalArgumentException("Invalid ratio: value " + defaultRatio + " is not between 0 and 1."); } this.defaultRatio = defaultRatio; } @Override public double measure(MetricConfig config, long currentTimestampMs) { return measure(); } @Override public void record(MetricConfig config, double value, long currentTimestampMs) { record(value, currentTimestampMs); } /** * Measures the ratio of recorded duration to the interval duration * since the last measurement. * * @return The measured ratio value between 0.0 and 1.0 */ public double measure() { if (lastRecordedTimestampMs < 0) { // Return the default value if no recordings have been captured. return defaultRatio; } else { // We measure the ratio over the interval double intervalDurationMs = Math.max(lastRecordedTimestampMs - intervalStartTimestampMs, 0); final double ratio; if (intervalDurationMs == 0) { ratio = defaultRatio; } else if (totalRecordedDurationMs > intervalDurationMs) { ratio = 1.0; } else { ratio = totalRecordedDurationMs / intervalDurationMs; } // The next interval begins at the last recorded timestamp intervalStartTimestampMs = lastRecordedTimestampMs; totalRecordedDurationMs = 0; return ratio; } } /** * Records a duration value at the specified timestamp. * * @param value The duration value to record * @param currentTimestampMs The current timestamp in milliseconds */ public void record(double value, long currentTimestampMs) { if (intervalStartTimestampMs < 0) { // Discard the initial value since the value occurred prior to the interval start intervalStartTimestampMs = currentTimestampMs; } else { totalRecordedDurationMs += value; lastRecordedTimestampMs = currentTimestampMs; } } }
TimeRatio
java
micronaut-projects__micronaut-core
inject/src/main/java/io/micronaut/context/DefaultBeanContext.java
{ "start": 11712, "end": 77475 }
class ____. * * @param resourceLoader The resource loader */ public DefaultBeanContext(@NonNull ClassPathResourceLoader resourceLoader) { this(new BeanContextConfiguration() { @NonNull @Override public ClassLoader getClassLoader() { ArgumentUtils.requireNonNull("resourceLoader", resourceLoader); return resourceLoader.getClassLoader(); } }); } /** * Creates a new bean context with the given configuration. * * @param contextConfiguration The context configuration */ public DefaultBeanContext(@NonNull BeanContextConfiguration contextConfiguration) { ArgumentUtils.requireNonNull("contextConfiguration", contextConfiguration); // enable classloader logging System.setProperty(ClassUtils.PROPERTY_MICRONAUT_CLASSLOADER_LOGGING, "true"); this.classLoader = contextConfiguration.getClassLoader(); this.customScopeRegistry = Objects.requireNonNull(createCustomScopeRegistry(), "Scope registry cannot be null"); Set<Class<? extends Annotation>> eagerInitAnnotated = contextConfiguration.getEagerInitAnnotated(); List<String> configuredEagerSingletonAnnotations = new ArrayList<>(eagerInitAnnotated.size()); for (Class<? extends Annotation> ann : eagerInitAnnotated) { configuredEagerSingletonAnnotations.add(ann.getName()); } this.beanContextConfiguration = contextConfiguration; BeanResolutionTraceConfiguration traceConfiguration = beanContextConfiguration .getTraceConfiguration(); this.traceMode = traceConfiguration.mode(); this.tracePatterns = traceConfiguration.classPatterns(); this.eventsEnabled = contextConfiguration.eventsEnabled(); this.eagerBeansEnabled = contextConfiguration.eagerBeansEnabled(); this.conversionService = new DefaultMutableConversionService(); beanDefinitionProvider = new DefaultBeanDefinitionService(beanContextConfiguration); } /** * Allows customizing the custom scope registry. * * @return The custom scope registry to use. * @since 3.0.0 */ @NonNull protected CustomScopeRegistry createCustomScopeRegistry() { return new DefaultCustomScopeRegistry(this); } /** * @return The custom scope registry */ @Internal @NonNull CustomScopeRegistry getCustomScopeRegistry() { return customScopeRegistry; } @Override public boolean isRunning() { return running.get() && !initializing.get(); } /** * The start method will read all bean definition classes found on the classpath and initialize any pre-required * state. */ @Override public synchronized BeanContext start() { if (!isRunning()) { if (initializing.compareAndSet(false, true)) { if (LOG.isDebugEnabled()) { LOG.debug("Starting BeanContext"); } configureAndStartContext(); if (LOG.isDebugEnabled()) { String activeConfigurations = beanConfigurations .values() .stream() .filter(config -> config.isEnabled(this)) .map(BeanConfiguration::getName) .collect(Collectors.joining(",")); if (StringUtils.isNotEmpty(activeConfigurations)) { LOG.debug("Loaded active configurations: {}", activeConfigurations); } } if (LOG.isDebugEnabled()) { LOG.debug("BeanContext Started."); } publishEvent(new StartupEvent(this)); } running.set(true); initializing.set(false); } return this; } /** * Registers conversion service. */ protected void registerConversionService() { //noinspection resource registerSingleton(MutableConversionService.class, conversionService, null, false); } /** * Tracks when a bean or configuration is disabled. * * @param conditionContext The conditional context * @param <C> The component type */ @Internal <C extends AnnotationMetadataProvider> void trackDisabledComponent(@NonNull ConditionContext<C> conditionContext) { C component = conditionContext.getComponent(); List<String> reasons = conditionContext.getFailures().stream().map(Failure::getMessage).toList(); if (component instanceof QualifiedBeanType<?> beanType) { beanDefinitionProvider.trackDisabled(beanType, reasons); } else if (component instanceof BeanConfiguration configuration) { this.disabledConfigurations.put(configuration.getName(), reasons); } } /** * The close method will shut down the context calling {@link jakarta.annotation.PreDestroy} hooks on loaded * singletons. */ @Override public synchronized BeanContext stop() { if (terminating.compareAndSet(false, true) && isRunning()) { if (LOG.isDebugEnabled()) { LOG.debug("Stopping BeanContext"); } publishEvent(new ShutdownEvent(this)); attributes.clear(); // need to sort registered singletons so that beans with that require other beans appear first List<BeanRegistration> objects = topologicalSort(singletonScope.getBeanRegistrations()); Map<Boolean, List<BeanRegistration>> result = objects.stream().collect(Collectors.groupingBy(br -> br.bean != null && (br.bean instanceof BeanPreDestroyEventListener || br.bean instanceof BeanDestroyedEventListener))); List<BeanRegistration> listeners = result.get(true); if (listeners != null) { // destroy all bean destroy listeners at the end objects.clear(); objects.addAll(result.get(false)); objects.addAll(listeners); } Set<Integer> processed = new HashSet<>(); for (BeanRegistration beanRegistration : objects) { Object bean = beanRegistration.bean; int sysId = System.identityHashCode(bean); if (processed.contains(sysId)) { continue; } if (LOG_LIFECYCLE.isDebugEnabled()) { LOG_LIFECYCLE.debug("Destroying bean [{}] with identifier [{}]", bean, beanRegistration.identifier); } processed.add(sysId); try { destroyBean(beanRegistration); } catch (BeanDestructionException e) { if (LOG.isErrorEnabled()) { LOG.error(e.getMessage(), e); } } } if (checkEnabledBeans != null) { checkEnabledBeans.cancel(true); } singlesInCreation.clear(); singletonBeanRegistrations.clear(); beanConcreteCandidateCache.clear(); beanCandidateCache.clear(); beanProxyTargetCache.clear(); containsBeanCache.clear(); beanConfigurations.clear(); disabledConfigurations.clear(); singletonScope.clear(); attributes.clear(); beanInitializedEventListeners = null; beanCreationEventListeners = null; beanPreDestroyEventListeners = null; beanDestroyedEventListeners = null; terminating.set(false); running.set(false); configured.set(false); if (traceMode != BeanResolutionTraceMode.NONE) { traceMode.getTracer().ifPresent(tracer -> { tracer.traceContextShutdown(this); }); } beanDefinitionProvider.reset(); } return this; } @Override @NonNull public AnnotationMetadata resolveMetadata(Class<?> type) { if (type == null) { return AnnotationMetadata.EMPTY_METADATA; } return findBeanDefinitionInternal(Argument.of(type), null) .map(AnnotationMetadataProvider::getAnnotationMetadata) .orElse(AnnotationMetadata.EMPTY_METADATA); } @Override public <T> Optional<T> refreshBean(@Nullable BeanIdentifier identifier) { if (identifier == null) { return Optional.empty(); } BeanRegistration<T> beanRegistration = singletonScope.findBeanRegistration(identifier); if (beanRegistration != null) { refreshBean(beanRegistration); return Optional.of(beanRegistration.bean); } return Optional.empty(); } @Override public <T> void refreshBean(@NonNull BeanRegistration<T> beanRegistration) { Objects.requireNonNull(beanRegistration, "BeanRegistration cannot be null"); T bean = beanRegistration.bean; if (bean != null) { BeanDefinition<T> definition = beanRegistration.definition(); if (definition instanceof InjectableBeanDefinition<T> injectableBeanDefinition) { injectableBeanDefinition.inject(this, bean); } } } @Override public Collection<BeanRegistration<?>> getActiveBeanRegistrations(Qualifier<?> qualifier) { if (qualifier == null) { return Collections.emptyList(); } return singletonScope.getBeanRegistrations(qualifier); } @Override public <T> Collection<BeanRegistration<T>> getActiveBeanRegistrations(Class<T> beanType) { if (beanType == null) { return Collections.emptyList(); } return singletonScope.getBeanRegistrations(beanType); } @Override public <T> Collection<BeanRegistration<T>> getBeanRegistrations(Class<T> beanType) { if (beanType == null) { return Collections.emptyList(); } return getBeanRegistrations(null, Argument.of(beanType), null); } @Override public <T> BeanRegistration<T> getBeanRegistration(Class<T> beanType, Qualifier<T> qualifier) { return getBeanRegistration(null, Argument.of(beanType), qualifier); } @Override public <T> Collection<BeanRegistration<T>> getBeanRegistrations(Class<T> beanType, Qualifier<T> qualifier) { if (beanType == null) { return Collections.emptyList(); } return getBeanRegistrations(null, Argument.of(beanType), null); } @Override public <T> Collection<BeanRegistration<T>> getBeanRegistrations(Argument<T> beanType, Qualifier<T> qualifier) { return getBeanRegistrations( null, Objects.requireNonNull(beanType, "Bean type cannot be null"), qualifier ); } @Override public <T> BeanRegistration<T> getBeanRegistration(Argument<T> beanType, Qualifier<T> qualifier) { return getBeanRegistration( null, Objects.requireNonNull(beanType, "Bean type cannot be null"), qualifier ); } @Override public <T> BeanRegistration<T> getBeanRegistration(BeanDefinition<T> beanDefinition) { return resolveBeanRegistration(null, beanDefinition); } @Override public <T> Optional<BeanRegistration<T>> findBeanRegistration(T bean) { if (bean == null) { return Optional.empty(); } BeanRegistration<T> beanRegistration = singletonScope.findBeanRegistration(bean); if (beanRegistration != null) { return Optional.of(beanRegistration); } return customScopeRegistry.findBeanRegistration(bean); } @Override public <T, R> Optional<MethodExecutionHandle<T, R>> findExecutionHandle(Class<T> beanType, String method, Class<?>... arguments) { return findExecutionHandle(beanType, null, method, arguments); } @Override public MethodExecutionHandle<?, Object> createExecutionHandle(BeanDefinition<?> beanDefinition, ExecutableMethod<Object, ?> method) { if (method instanceof UnsafeExecutable<?, ?>) { return new BeanContextUnsafeExecutionHandle(method, beanDefinition, (UnsafeExecutable<Object, Object>) method); } return new BeanContextExecutionHandle(method, beanDefinition); } @SuppressWarnings("unchecked") @Override public <T, R> Optional<MethodExecutionHandle<T, R>> findExecutionHandle(Class<T> beanType, Qualifier<?> q, String method, Class<?>... arguments) { Qualifier<T> qualifier = (Qualifier<T>) q; Optional<BeanDefinition<T>> foundBean = findBeanDefinition(beanType, qualifier); if (foundBean.isEmpty()) { return Optional.empty(); } BeanDefinition<T> beanDefinition = foundBean.get(); Optional<ExecutableMethod<T, R>> foundMethod = beanDefinition.findMethod(method, arguments); if (foundMethod.isEmpty()) { foundMethod = beanDefinition.<R>findPossibleMethods(method) .findFirst() .filter(m -> { Class<?>[] argTypes = m.getArgumentTypes(); if (argTypes.length == arguments.length) { for (int i = 0; i < argTypes.length; i++) { if (!arguments[i].isAssignableFrom(argTypes[i])) { return false; } } return true; } return false; }); } return foundMethod.map(executableMethod -> new BeanExecutionHandle<>(this, beanDefinition, beanType, qualifier, executableMethod)); } @Override public <T, R> Optional<ExecutableMethod<T, R>> findExecutableMethod(Class<T> beanType, String method, Class<?>[] arguments) { if (beanType == null) { return Optional.empty(); } Collection<BeanDefinition<T>> definitions = getBeanDefinitions(beanType); if (definitions.isEmpty()) { return Optional.empty(); } BeanDefinition<T> beanDefinition = definitions.iterator().next(); Optional<ExecutableMethod<T, R>> foundMethod = beanDefinition.findMethod(method, arguments); if (foundMethod.isPresent()) { return foundMethod; } return beanDefinition.<R>findPossibleMethods(method).findFirst(); } @SuppressWarnings("unchecked") @Override public <T, R> Optional<MethodExecutionHandle<T, R>> findExecutionHandle(T bean, String method, Class<?>[] arguments) { if (bean != null) { Class<T> aClass = (Class<T>) bean.getClass(); return findExecutionHandle(aClass, method, arguments); } return Optional.empty(); } @Override public <T> BeanContext registerSingleton(@NonNull Class<T> type, @NonNull T singleton, Qualifier<T> qualifier, boolean inject) { purgeCacheForBeanInstance(singleton); BeanDefinition<T> beanDefinition; if (inject && running.get()) { // Bean cannot be injected before the start of the context beanDefinition = findConcreteCandidate(null, Argument.of(type), qualifier, false).orElse(null); if (beanDefinition == null) { // Purge cache miss purgeCacheForBeanInstance(singleton); } } else { beanDefinition = null; } if (beanDefinition != null && !(beanDefinition instanceof RuntimeBeanDefinition<T>) && beanDefinition.getBeanType().isInstance(singleton)) { if (inject) { try (BeanResolutionContext context = newResolutionContext(beanDefinition, null)) { doInjectAndInitialize(context, singleton, beanDefinition); } } } else { RuntimeBeanDefinition<T> runtimeBeanDefinition = RuntimeBeanDefinition.builder(type, () -> singleton) .singleton(true) .exposedTypes(ReflectionUtils.getAllClassesInHierarchy(type).toArray(Class<?>[]::new)) .qualifier(qualifier) .build(); registerBeanDefinition(runtimeBeanDefinition); beanDefinition = runtimeBeanDefinition; } var registration = BeanRegistration.of( this, new BeanKey<>(beanDefinition, qualifier), beanDefinition, singleton ); singletonScope.registerSingletonBean(registration, qualifier); return this; } private <T> void purgeCacheForBeanInstance(T singleton) { beanCandidateCache.entrySet().removeIf(entry -> entry.getKey().isInstance(singleton)); beanConcreteCandidateCache.entrySet().removeIf(entry -> entry.getKey().beanType.isInstance(singleton)); singletonBeanRegistrations.entrySet().removeIf(entry -> entry.getKey().beanType.isInstance(singleton)); containsBeanCache.entrySet().removeIf(entry -> entry.getKey().beanType.isInstance(singleton)); } @NonNull final BeanResolutionContext newResolutionContext(BeanDefinition<?> beanDefinition, @Nullable BeanResolutionContext currentContext) { if (currentContext == null) { return new SingletonBeanResolutionContext(beanDefinition); } else { return currentContext; } } @Override public ClassLoader getClassLoader() { return classLoader; } @Override public BeanDefinitionValidator getBeanValidator() { if (beanValidator == null) { this.beanValidator = findBean(BeanDefinitionValidator.class).orElse(BeanDefinitionValidator.DEFAULT); } return beanValidator; } @Override public Optional<BeanConfiguration> findBeanConfiguration(String configurationName) { BeanConfiguration configuration = beanConfigurations.get(configurationName); if (configuration != null) { return Optional.of(configuration); } else { return Optional.empty(); } } @Override public <T> BeanDefinition<T> getBeanDefinition(Argument<T> beanType, Qualifier<T> qualifier) { return findBeanDefinition(beanType, qualifier) .orElseThrow(() -> newNoSuchBeanException(null, beanType, qualifier, null)); } @Override public <T> Optional<BeanDefinition<T>> findBeanDefinition(Argument<T> beanType, Qualifier<T> qualifier) { BeanDefinition<T> beanDefinition = singletonScope.findCachedSingletonBeanDefinition(beanType, qualifier); if (beanDefinition != null) { return Optional.of(beanDefinition); } return findConcreteCandidate(null, beanType, qualifier, true); } private <T> Optional<BeanDefinition<T>> findBeanDefinitionInternal(Argument<T> beanType, Qualifier<T> qualifier) { return findConcreteCandidate(null, beanType, qualifier, false); } @Override public <T> Optional<BeanDefinition<T>> findBeanDefinition(Class<T> beanType, Qualifier<T> qualifier) { return findBeanDefinition(Argument.of(beanType), qualifier); } @Override public <T> Collection<BeanDefinition<T>> getBeanDefinitions(Class<T> beanType) { return getBeanDefinitions(Argument.of(beanType)); } @Override public <T> Collection<BeanDefinition<T>> getBeanDefinitions(Argument<T> beanType) { Objects.requireNonNull(beanType, "Bean type cannot be null"); Collection<BeanDefinition<T>> candidates = findBeanCandidatesInternal(null, beanType); return Collections.unmodifiableCollection(candidates); } @Override public <T> Collection<BeanDefinition<T>> getBeanDefinitions(Class<T> beanType, Qualifier<T> qualifier) { Objects.requireNonNull(beanType, "Bean type cannot be null"); return getBeanDefinitions(Argument.of(beanType), qualifier); } @Override public <T> Collection<BeanDefinition<T>> getBeanDefinitions(Argument<T> beanType, Qualifier<T> qualifier) { Objects.requireNonNull(beanType, "Bean type cannot be null"); Collection<BeanDefinition<T>> candidates = findBeanCandidatesInternal(null, beanType); if (qualifier != null) { candidates = qualifier.filterQualified(beanType.getType(), candidates); } return Collections.unmodifiableCollection(candidates); } @Override public <T> boolean containsBean(@NonNull Class<T> beanType, Qualifier<T> qualifier) { return containsBean(Argument.of(beanType), qualifier); } @Override public <T> boolean containsBean(Argument<T> beanType, Qualifier<T> qualifier) { ArgumentUtils.requireNonNull("beanType", beanType); BeanKey<T> beanKey = new BeanKey<>(beanType, qualifier); if (containsBeanCache.containsKey(beanKey)) { return containsBeanCache.get(beanKey); } else { boolean result = singletonScope.containsBean(beanType, qualifier) || isCandidatePresent(beanKey.beanType, qualifier); containsBeanCache.put(beanKey, result); return result; } } @NonNull @Override public <T> T getBean(@NonNull Class<T> beanType, @Nullable Qualifier<T> qualifier) { Objects.requireNonNull(beanType, "Bean type cannot be null"); return getBean(Argument.of(beanType), qualifier); } @NonNull @Override public <T> T getBean(@NonNull Class<T> beanType) { Objects.requireNonNull(beanType, "Bean type cannot be null"); return getBean(Argument.of(beanType), null); } @NonNull @Override public <T> T getBean(@NonNull Argument<T> beanType, @Nullable Qualifier<T> qualifier) { Objects.requireNonNull(beanType, "Bean type cannot be null"); try { return getBean(null, beanType, qualifier); } catch (DisabledBeanException e) { if (AbstractBeanContextConditional.ConditionLog.LOG.isDebugEnabled()) { AbstractBeanContextConditional.ConditionLog.LOG.debug("Bean of type [{}] disabled for reason: {}", beanType.getSimpleName(), e.getMessage(), e); } throw newNoSuchBeanException( null, beanType, qualifier, "Bean of type [" + beanType.getTypeString(true) + "] disabled for reason: " + e.getMessage() ); } } @Override public <T> Optional<T> findBean(Class<T> beanType, Qualifier<T> qualifier) { return findBean(null, beanType, qualifier); } @Override public <T> Optional<T> findBean(Argument<T> beanType, Qualifier<T> qualifier) { return findBean(null, beanType, qualifier); } @Override public <T> Collection<T> getBeansOfType(Class<T> beanType) { return getBeansOfType(null, Argument.of(beanType)); } @Override public <T> Collection<T> getBeansOfType(Class<T> beanType, Qualifier<T> qualifier) { return getBeansOfType(Argument.of(beanType), qualifier); } @Override public <T> Collection<T> getBeansOfType(Argument<T> beanType) { return getBeansOfType(null, beanType); } @Override public <T> Collection<T> getBeansOfType(Argument<T> beanType, Qualifier<T> qualifier) { return getBeansOfType(null, beanType, qualifier); } @Override public <T> Stream<T> streamOfType(Class<T> beanType, Qualifier<T> qualifier) { return streamOfType(null, beanType, qualifier); } @Override public <T> Stream<T> streamOfType(Argument<T> beanType, Qualifier<T> qualifier) { return streamOfType(null, beanType, qualifier); } @Override public <V> Map<String, V> mapOfType(Argument<V> beanType, Qualifier<V> qualifier) { return mapOfType(null, beanType, qualifier); } /** * Obtains a stream of beans of the given type and qualifier. * * @param resolutionContext The bean resolution context * @param beanType The bean type * @param qualifier The qualifier * @param <T> The bean concrete type * @return A stream */ protected <T> Stream<T> streamOfType(BeanResolutionContext resolutionContext, Class<T> beanType, Qualifier<T> qualifier) { return streamOfType(resolutionContext, Argument.of(beanType), qualifier); } /** * Obtains a map of beans of the given type and qualifier. * * @param resolutionContext The resolution context * @param beanType The bean type * @param qualifier The qualifier * @param <V> The bean type * @return A map of beans, never {@code null}. * @since 4.0.0 */ protected <V> @NonNull Map<String, V> mapOfType(@Nullable BeanResolutionContext resolutionContext, @NonNull Argument<V> beanType, @Nullable Qualifier<V> qualifier) { // try and find a bean that implements the map with the generics Argument<Map<String, V>> mapType = Argument.mapOf(Argument.STRING, beanType); @SuppressWarnings("unchecked") Qualifier<Map<String, V>> mapQualifier = (Qualifier<Map<String, V>>) qualifier; BeanDefinition<Map<String, V>> existingBean = findBeanDefinitionInternal(mapType, mapQualifier).orElse(null); if (existingBean != null) { return getBean(existingBean); } Collection<BeanRegistration<V>> beanRegistrations = getBeanRegistrations(resolutionContext, beanType, qualifier); if (beanRegistrations.isEmpty()) { return Collections.emptyMap(); } try { return beanRegistrations.stream().collect(Collectors.toUnmodifiableMap( DefaultBeanContext::resolveKey, reg -> reg.bean )); } catch (IllegalStateException e) { // occurs for duplicate keys throw new DependencyInjectionException( resolutionContext, "Injecting a map of beans requires `@Named` qualifier. Multiple beans were found missing a qualifier resulting in duplicate keys: " + e.getMessage(), new NonUniqueBeanException( beanType.getType(), beanRegistrations.stream().map(reg -> reg.beanDefinition).iterator() ) ); } } @NonNull private static String resolveKey(BeanRegistration<?> reg) { BeanDefinition<?> definition = reg.beanDefinition; if (definition instanceof NameResolver resolver && resolver.resolveName().isPresent()) { return resolver.resolveName().get(); } Qualifier<?> declaredQualifier = definition.getDeclaredQualifier(); if (declaredQualifier != null) { String name = Qualifiers.findName(declaredQualifier); if (name != null) { return name; } } // Must be the primary or a single bean Class<?> candidateType = reg.beanDefinition.getBeanType(); String candidateSimpleName = candidateType.getSimpleName(); return NameUtils.decapitalize(candidateSimpleName); } /** * Obtains a stream of beans of the given type and qualifier. * * @param resolutionContext The bean resolution context * @param beanType The bean type * @param qualifier The qualifier * @param <T> The bean concrete type * @return A stream */ @Internal public <T> Stream<T> streamOfType(BeanResolutionContext resolutionContext, Argument<T> beanType, Qualifier<T> qualifier) { Objects.requireNonNull(beanType, "Bean type cannot be null"); return getBeanRegistrations(resolutionContext, beanType, qualifier).stream() .map(BeanRegistration::getBean); } @NonNull @Override public <T> T inject(@NonNull T instance) { Objects.requireNonNull(instance, "Instance cannot be null"); Collection<BeanDefinition<T>> candidates = findBeanCandidatesForInstance(instance); BeanDefinition<T> beanDefinition; if (candidates.size() == 1) { beanDefinition = candidates.iterator().next(); } else if (!candidates.isEmpty()) { beanDefinition = lastChanceResolve(Argument.of((Class<T>) instance.getClass()), null, true, candidates); } else { beanDefinition = null; } if (beanDefinition != null && !(beanDefinition instanceof RuntimeBeanDefinition<T>)) { try (BeanResolutionContext resolutionContext = newResolutionContext(beanDefinition, null)) { final BeanKey<T> beanKey = new BeanKey<>(beanDefinition.getBeanType(), null); resolutionContext.addInFlightBean( beanKey, new BeanRegistration<>(beanKey, beanDefinition, instance) ); doInjectAndInitialize( resolutionContext, instance, beanDefinition ); } } return instance; } @NonNull @Override public <T> T createBean(@NonNull Class<T> beanType, @Nullable Qualifier<T> qualifier) { return createBean(null, beanType, qualifier); } @NonNull @Override public <T> T createBean(@NonNull Class<T> beanType, @Nullable Qualifier<T> qualifier, @Nullable Map<String, Object> argumentValues) { ArgumentUtils.requireNonNull("beanType", beanType); Argument<T> beanArg = Argument.of(beanType); Optional<BeanDefinition<T>> candidate = findBeanDefinition(beanArg, qualifier); if (candidate.isPresent()) { BeanDefinition<T> beanDefinition = candidate.get(); try (BeanResolutionContext resolutionContext = newResolutionContext(beanDefinition, null)) { if (beanDefinition instanceof InstantiatableBeanDefinition<T> instantiatableBeanDefinition) { T bean = resolveByBeanFactory(resolutionContext, instantiatableBeanDefinition, qualifier, argumentValues); return postBeanCreated(resolutionContext, beanDefinition, beanArg, qualifier, bean); } } } throw newNoSuchBeanException( null, beanArg, qualifier, null ); } @NonNull @Override public <T> T createBean(@NonNull Class<T> beanType, @Nullable Qualifier<T> qualifier, @Nullable Object... args) { ArgumentUtils.requireNonNull("beanType", beanType); final Argument<T> beanArg = Argument.of(beanType); Optional<BeanDefinition<T>> candidate = findBeanDefinition(beanArg, qualifier); if (candidate.isPresent()) { BeanDefinition<T> definition = candidate.get(); try (BeanResolutionContext resolutionContext = newResolutionContext(definition, null)) { return doCreateBeanWithArguments(resolutionContext, definition, beanArg, qualifier, args); } } throw newNoSuchBeanException( null, Argument.of(beanType), qualifier, null ); } @NonNull private <T> T doCreateBeanWithArguments(@NonNull BeanResolutionContext resolutionContext, @NonNull BeanDefinition<T> definition, @NonNull Argument<T> beanType, @Nullable Qualifier<T> qualifier, @Nullable Object... args) { Map<String, Object> argumentValues = resolveArgumentValues(resolutionContext, definition, args); if (LOG.isTraceEnabled()) { LOG.trace("Computed bean argument values: {}", argumentValues); } if (definition instanceof InstantiatableBeanDefinition<T> instantiatableBeanDefinition) { T bean = resolveByBeanFactory(resolutionContext, instantiatableBeanDefinition, qualifier, argumentValues); return postBeanCreated(resolutionContext, definition, beanType, qualifier, bean); } else { throw new BeanInstantiationException("BeanDefinition doesn't support creating a new instance of the bean"); } } @NonNull private <T> Map<String, Object> resolveArgumentValues(BeanResolutionContext resolutionContext, BeanDefinition<T> definition, Object[] args) { Argument[] requiredArguments; if (definition instanceof ParametrizedInstantiatableBeanDefinition<T> parametrizedInstantiatableBeanDefinition) { requiredArguments = parametrizedInstantiatableBeanDefinition.getRequiredArguments(); } else { return null; } if (LOG.isTraceEnabled()) { LOG.trace("Creating bean for parameters: {}", ArrayUtils.toString(args)); } MutableConversionService conversionService = getConversionService(); Map<String, Object> argumentValues = CollectionUtils.newLinkedHashMap(requiredArguments.length); BeanResolutionContext.Path currentPath = resolutionContext.getPath(); for (int i = 0; i < requiredArguments.length; i++) { Argument<?> requiredArgument = requiredArguments[i]; try (BeanResolutionContext.Path ignored = currentPath.pushConstructorResolve(definition, requiredArgument)) { Class<?> argumentType = requiredArgument.getType(); if (args.length > i) { Object val = args[i]; if (val != null) { if (argumentType.isInstance(val) && !CollectionUtils.isIterableOrMap(argumentType)) { argumentValues.put(requiredArgument.getName(), val); } else { argumentValues.put(requiredArgument.getName(), conversionService.convert(val, requiredArgument).orElseThrow(() -> new BeanInstantiationException(resolutionContext, "Invalid bean @Argument [" + requiredArgument + "]. Cannot convert object [" + val + "] to required type: " + argumentType) )); } } else if (!requiredArgument.isDeclaredNullable()) { throw new BeanInstantiationException(resolutionContext, "Invalid bean @Argument [" + requiredArgument + "]. Argument cannot be null"); } } else { // attempt resolve from context Optional<?> existingBean = findBean(resolutionContext, argumentType, null); if (existingBean.isPresent()) { argumentValues.put(requiredArgument.getName(), existingBean.get()); } else if (!requiredArgument.isDeclaredNullable()) { throw new BeanInstantiationException(resolutionContext, "Invalid bean @Argument [" + requiredArgument + "]. No bean found for type: " + argumentType); } } } } return argumentValues; } @Nullable @Override public <T> T destroyBean(@NonNull Argument<T> beanType, Qualifier<T> qualifier) { ArgumentUtils.requireNonNull("beanType", beanType); return findBeanDefinition(beanType, qualifier) .map(this::destroyBean) .orElse(null); } @Override @NonNull public <T> T destroyBean(@NonNull T bean) { ArgumentUtils.requireNonNull("bean", bean); Optional<BeanRegistration<T>> beanRegistration = findBeanRegistration(bean); if (beanRegistration.isPresent()) { destroyBean(beanRegistration.get()); } else { Optional<BeanDefinition<T>> beanDefinition = findBeanDefinition((Class<T>) bean.getClass()); if (beanDefinition.isPresent()) { BeanDefinition<T> definition = beanDefinition.get(); BeanKey<T> key = new BeanKey<>(definition, definition.getDeclaredQualifier()); destroyBean(BeanRegistration.of(this, key, definition, bean)); } } return bean; } @Override @Nullable public <T> T destroyBean(@NonNull Class<T> beanType) { ArgumentUtils.requireNonNull("beanType", beanType); return destroyBean(Argument.of(beanType), null); } @Nullable private <T> T destroyBean(@NonNull BeanDefinition<T> beanDefinition) { if (beanDefinition.isSingleton()) { BeanRegistration<T> beanRegistration = singletonScope.findBeanRegistration(beanDefinition); if (beanRegistration != null) { destroyBean(beanRegistration); return beanRegistration.bean; } } throw new IllegalArgumentException("Cannot destroy non-singleton bean using bean definition! Use 'destroyBean(BeanRegistration)` or `destroyBean(<BeanInstance>)`."); } @Override public <T> void destroyBean(@NonNull BeanRegistration<T> registration) { destroyBean(registration, false); } private <T> void destroyBean(@NonNull BeanRegistration<T> registration, boolean dependent) { if (LOG_LIFECYCLE.isDebugEnabled()) { LOG_LIFECYCLE.debug("Destroying bean [{}] with identifier [{}]", registration.bean, registration.identifier); } if (registration.beanDefinition instanceof ProxyBeanDefinition) { if (registration.bean instanceof InterceptedBeanProxy) { // Ignore the proxy and destroy the target destroyProxyTargetBean(registration, dependent); return; } if (dependent && registration.beanDefinition.isSingleton()) { return; } } T beanToDestroy = registration.getBean(); BeanDefinition<T> definition = registration.getBeanDefinition(); if (beanToDestroy != null) { purgeCacheForBeanInstance(beanToDestroy); if (definition.isSingleton()) { singletonScope.purgeCacheForBeanInstance(definition, beanToDestroy); } } beanToDestroy = triggerPreDestroyListeners(definition, beanToDestroy); if (definition instanceof DisposableBeanDefinition) { try { ((DisposableBeanDefinition<T>) definition).dispose(this, beanToDestroy); } catch (Exception e) { if (LOG.isWarnEnabled()) { LOG.warn("Error disposing bean [{}]... Continuing...", beanToDestroy, e); } } } if (beanToDestroy instanceof LifeCycle<?> cycle && !dependent) { destroyLifeCycleBean(cycle, definition); } if (registration instanceof BeanDisposingRegistration) { List<BeanRegistration<?>> dependents = ((BeanDisposingRegistration<T>) registration).getDependents(); if (CollectionUtils.isNotEmpty(dependents)) { final ListIterator<BeanRegistration<?>> i = dependents.listIterator(dependents.size()); while (i.hasPrevious()) { destroyBean(i.previous(), true); } } } else { try { registration.close(); } catch (Exception e) { throw new BeanDestructionException(definition, e); } } triggerBeanDestroyedListeners(definition, beanToDestroy); } /** * Destroy a lifecycle bean. * * @param cycle The cycle * @param definition The definition * @param <T> The bean type */ @Internal protected <T> void destroyLifeCycleBean(LifeCycle<?> cycle, BeanDefinition<T> definition) { try { cycle.stop(); } catch (Exception e) { throw new BeanDestructionException(definition, e); } } @SuppressWarnings("unchecked") @NonNull private <T> T triggerPreDestroyListeners(@NonNull BeanDefinition<T> beanDefinition, @NonNull T bean) { if (beanPreDestroyEventListeners == null) { beanPreDestroyEventListeners = loadBeanEventListeners(BeanPreDestroyEventListener.class); } if (!beanPreDestroyEventListeners.isEmpty()) { BeanPreDestroyEvent<T> event = new BeanPreDestroyEvent<>(this, beanDefinition, bean); Class<T> beanType = getBeanType(beanDefinition); List<ListenersSupplier.ListenerAndOrder<BeanPreDestroyEventListener>> listeners = new ArrayList<>(); for (Map.Entry<Class<?>, ListenersSupplier<BeanPreDestroyEventListener>> entry : beanPreDestroyEventListeners) { if (entry.getKey().isAssignableFrom(beanType)) { for (ListenersSupplier.ListenerAndOrder<BeanPreDestroyEventListener> listener : entry.getValue().get(null)) { listeners.add(listener); } } } if (listeners.size() > 1) { listeners.sort(OrderUtil.COMPARATOR_ZERO); } for (ListenersSupplier.ListenerAndOrder<BeanPreDestroyEventListener> listener : listeners) { try { bean = (T) Objects.requireNonNull( listener.bean.onPreDestroy(event), "PreDestroy event listener illegally returned null: " + listener.getClass() ); } catch (Exception e) { throw new BeanDestructionException(beanDefinition, e); } } } return bean; } private <T> void destroyProxyTargetBean(@NonNull BeanRegistration<T> registration, boolean dependent) { Set<Object> destroyed = Collections.emptySet(); if (registration instanceof BeanDisposingRegistration<?> disposingRegistration) { if (disposingRegistration.getDependents() != null) { destroyed = Collections.newSetFromMap(new IdentityHashMap<>()); for (BeanRegistration<?> beanRegistration : disposingRegistration.getDependents()) { destroyBean(beanRegistration, true); destroyed.add(beanRegistration.bean); } } } BeanDefinition<T> proxyTargetBeanDefinition = findProxyTargetBeanDefinition(registration.beanDefinition) .orElseThrow(() -> new IllegalStateException("Cannot find a proxy target bean definition for: " + registration.beanDefinition)); Optional<CustomScope<?>> declaredScope = customScopeRegistry.findDeclaredScope(proxyTargetBeanDefinition); if (declaredScope.isEmpty()) { if (proxyTargetBeanDefinition.isSingleton()) { return; } // Scope is not present, try to get the actual target bean and destroy it if (registration.bean instanceof InterceptedBeanProxy) { InterceptedBeanProxy<T> interceptedProxy = (InterceptedBeanProxy<T>) registration.bean; if (interceptedProxy.hasCachedInterceptedTarget()) { T interceptedTarget = interceptedProxy.interceptedTarget(); if (destroyed.contains(interceptedTarget)) { return; } destroyBean(BeanRegistration.of(this, new BeanKey<>(proxyTargetBeanDefinition, proxyTargetBeanDefinition.getDeclaredQualifier()), proxyTargetBeanDefinition, interceptedTarget, registration instanceof BeanDisposingRegistration ? ((BeanDisposingRegistration<T>) registration).getDependents() : null )); } } return; } CustomScope<?> customScope = declaredScope.get(); if (dependent) { return; } Optional<BeanRegistration<T>> targetBeanRegistration = customScope.findBeanRegistration(proxyTargetBeanDefinition); if (targetBeanRegistration.isPresent()) { BeanRegistration<T> targetRegistration = targetBeanRegistration.get(); customScope.remove(targetRegistration.identifier); } } private <T> void triggerBeanDestroyedListeners(@NonNull BeanDefinition<T> beanDefinition, @NonNull T bean) { if (beanDestroyedEventListeners == null) { beanDestroyedEventListeners = loadBeanEventListeners(BeanDestroyedEventListener.class); } if (!beanDestroyedEventListeners.isEmpty()) { BeanDestroyedEvent<T> event = new BeanDestroyedEvent<>(this, beanDefinition, bean); Class<T> beanType = getBeanType(beanDefinition); List<ListenersSupplier.ListenerAndOrder<BeanDestroyedEventListener>> listeners = new ArrayList<>(); for (Map.Entry<Class<?>, ListenersSupplier<BeanDestroyedEventListener>> entry : beanDestroyedEventListeners) { if (entry.getKey().isAssignableFrom(beanType)) { for (ListenersSupplier.ListenerAndOrder<BeanDestroyedEventListener> listener : entry.getValue().get(null)) { listeners.add(listener); } } } if (listeners.size() > 1) { listeners.sort(OrderUtil.COMPARATOR_ZERO); } for (ListenersSupplier.ListenerAndOrder<BeanDestroyedEventListener> listener : listeners) { try { listener.bean.onDestroyed(event); } catch (Exception e) { throw new BeanDestructionException(beanDefinition, e); } } } } @NonNull private <T> Class<T> getBeanType(@NonNull BeanDefinition<T> beanDefinition) { if (beanDefinition instanceof ProxyBeanDefinition) { return ((ProxyBeanDefinition<T>) beanDefinition).getTargetType(); } return beanDefinition.getBeanType(); } /** * Find an active singleton bean for the given definition and qualifier. * * @param beanDefinition The bean definition * @param qualifier The qualifier * @param <T> The bean generic type * @return The bean registration */ @Nullable protected <T> BeanRegistration<T> getActiveBeanRegistration(BeanDefinition<T> beanDefinition, Qualifier qualifier) { if (beanDefinition == null) { return null; } return singletonScope.findBeanRegistration(beanDefinition, qualifier); } /** * Creates a bean. * * @param resolutionContext The bean resolution context * @param beanType The bean type * @param qualifier The qualifier * @param <T> The bean generic type * @return The instance */ @NonNull protected <T> T createBean(@Nullable BeanResolutionContext resolutionContext, @NonNull Class<T> beanType, @Nullable Qualifier<T> qualifier) { ArgumentUtils.requireNonNull("beanType", beanType); Optional<BeanDefinition<T>> concreteCandidate = findBeanDefinition(beanType, qualifier); if (concreteCandidate.isPresent()) { BeanDefinition<T> candidate = concreteCandidate.get(); try (BeanResolutionContext context = newResolutionContext(candidate, resolutionContext)) { if (candidate instanceof InstantiatableBeanDefinition<T> instantiatableBeanDefinition) { T bean = resolveByBeanFactory(context, instantiatableBeanDefinition, qualifier, Collections.emptyMap()); return postBeanCreated(context, candidate, Argument.of(beanType), qualifier, bean); } else { throw new BeanInstantiationException("BeanDefinition doesn't support creating a new instance of the bean"); } } } throw newNoSuchBeanException( resolutionContext, Argument.of(beanType), qualifier, null ); } /** * Injects a bean. * * @param resolutionContext The bean resolution context * @param requestingBeanDefinition The requesting bean definition * @param instance The instance * @param <T> The instance type * @return The instance */ @Internal @NonNull protected <T> T inject(@NonNull BeanResolutionContext resolutionContext, @Nullable BeanDefinition<?> requestingBeanDefinition, @NonNull T instance) { @SuppressWarnings("unchecked") Class<T> beanType = (Class<T>) instance.getClass(); Optional<BeanDefinition<T>> concreteCandidate = findBeanDefinition(beanType, null); if (concreteCandidate.isPresent()) { BeanDefinition<T> definition = concreteCandidate.get(); if (requestingBeanDefinition != null && requestingBeanDefinition.equals(definition)) { // bail out, don't inject for bean definition in creation return instance; } doInjectAndInitialize(resolutionContext, instance, definition); } return instance; } /** * Get all beans of the given type. * * @param resolutionContext The bean resolution context * @param beanType The bean type * @param <T> The bean type parameter * @return The found beans */ @NonNull protected <T> Collection<T> getBeansOfType(@Nullable BeanResolutionContext resolutionContext, @NonNull Argument<T> beanType) { return getBeansOfType(resolutionContext, beanType, null); } /** * Get all beans of the given type and qualifier. * * @param resolutionContext The bean resolution context * @param beanType The bean type * @param qualifier The qualifier * @param <T> The bean type parameter * @return The found beans */ @Internal @NonNull public <T> Collection<T> getBeansOfType(@Nullable BeanResolutionContext resolutionContext, @NonNull Argument<T> beanType, @Nullable Qualifier<T> qualifier) { Collection<BeanRegistration<T>> beanRegistrations = getBeanRegistrations(resolutionContext, beanType, qualifier); List<T> list = new ArrayList<>(beanRegistrations.size()); for (BeanRegistration<T> beanRegistration : beanRegistrations) { list.add(beanRegistration.getBean()); } return list; } @Override @NonNull public <T> T getProxyTargetBean(@NonNull Class<T> beanType, @Nullable Qualifier<T> qualifier) { ArgumentUtils.requireNonNull("beanType", beanType); return getProxyTargetBean(null, Argument.of(beanType), qualifier); } @NonNull @Override public <T> T getProxyTargetBean(@NonNull Argument<T> beanType, @Nullable Qualifier<T> qualifier) { ArgumentUtils.requireNonNull("beanType", beanType); return getProxyTargetBean(null, beanType, qualifier); } /** * Resolves the proxy target for a given bean type. If the bean has no proxy then the original bean is returned. * * @param resolutionContext The bean resolution context * @param beanType The bean type * @param qualifier The bean qualifier * @param <T> The generic type * @return The proxied instance * @since 3.1.0 */ @NonNull @UsedByGeneratedCode public <T> T getProxyTargetBean(@Nullable BeanResolutionContext resolutionContext, @NonNull Argument<T> beanType, @Nullable Qualifier<T> qualifier) { BeanDefinition<T> definition = getProxyTargetBeanDefinition(beanType, qualifier); return resolveBeanRegistration(resolutionContext, definition, beanType, qualifier).bean; } /** * Resolves the proxy target for a given proxy bean definition. If the bean has no proxy then the original bean is returned. * * @param resolutionContext The bean resolution context * @param definition The proxy bean definition * @param beanType The bean type * @param qualifier The bean qualifier * @param <T> The generic type * @return The proxied instance * @since 4.3.0 */ @Internal @NonNull @UsedByGeneratedCode public <T> T getProxyTargetBean(@Nullable BeanResolutionContext resolutionContext, @NonNull BeanDefinition<T> definition, @NonNull Argument<T> beanType, @Nullable Qualifier<T> qualifier) { return resolveBeanRegistration(resolutionContext, definition, beanType, qualifier).bean; } @NonNull @Override public <T, R> Optional<ExecutableMethod<T, R>> findProxyTargetMethod(@NonNull Class<T> beanType, @NonNull String method, @NonNull Class<?>[] arguments) { ArgumentUtils.requireNonNull("beanType", beanType); ArgumentUtils.requireNonNull("method", method); BeanDefinition<T> definition = getProxyTargetBeanDefinition(beanType, null); return definition.findMethod(method, arguments); } @NonNull @Override public <T, R> Optional<ExecutableMethod<T, R>> findProxyTargetMethod(@NonNull Class<T> beanType, Qualifier<T> qualifier, @NonNull String method, Class<?>... arguments) { ArgumentUtils.requireNonNull("beanType", beanType); ArgumentUtils.requireNonNull("method", method); BeanDefinition<T> definition = getProxyTargetBeanDefinition(beanType, qualifier); return definition.findMethod(method, arguments); } @Override public <T, R> Optional<ExecutableMethod<T, R>> findProxyTargetMethod(@NonNull Argument<T> beanType, Qualifier<T> qualifier, @NonNull String method, Class<?>... arguments) { ArgumentUtils.requireNonNull("beanType", beanType); ArgumentUtils.requireNonNull("method", method); BeanDefinition<T> definition = getProxyTargetBeanDefinition(beanType, qualifier); return definition.findMethod(method, arguments); } @NonNull @Override public <T> Optional<BeanDefinition<T>> findProxyTargetBeanDefinition(@NonNull Class<T> beanType, @Nullable Qualifier<T> qualifier) { return findProxyTargetBeanDefinition(Argument.of(beanType), qualifier); } @Override @SuppressWarnings("java:S2789") // performance optimization public <T> Optional<BeanDefinition<T>> findProxyTargetBeanDefinition(@NonNull Argument<T> beanType, @Nullable Qualifier<T> qualifier) { ArgumentUtils.requireNonNull("beanType", beanType); BeanCandidateKey<T> key = new BeanCandidateKey<>(beanType, qualifier, true); Optional beanDefinition = beanProxyTargetCache.get(key); if (beanDefinition == null) { beanDefinition = findProxyTargetNoCache(null, beanType, qualifier); beanProxyTargetCache.put(key, beanDefinition); } return beanDefinition; } @NonNull @Override public Collection<BeanDefinition<Object>> getBeanDefinitions(@Nullable Qualifier<Object> qualifier) { if (qualifier == null) { return Collections.emptyList(); } if (LOG.isDebugEnabled()) { LOG.debug("Finding candidate beans for qualifier: {}", qualifier); } Collection<BeanDefinition<Object>> candidates; if (qualifier instanceof FilteringQualifier<Object> filteringQualifier) { // Keep anonymous Predicate<BeanDefinitionReference<Object>> predicate = new Predicate<>() { @Override public boolean test(BeanDefinitionReference<Object> qbt) { return filteringQualifier.doesQualify(Object.class, qbt); } }; candidates = beanDefinitionProvider.getBeanDefinitions(this, predicate, null); } else { Stream<BeanDefinition<Object>> beanDefinitionsClasses = StreamSupport.stream( beanDefinitionProvider.getBeanDefinitions(this, Argument.OBJECT_ARGUMENT, null, null).spliterator(), false); candidates = qualifier.reduce(Object.class, beanDefinitionsClasses) .toList(); } filterReplacedBeans(candidates); return candidates; } @NonNull @Override public Collection<BeanDefinition<Object>> getAllBeanDefinitions() { if (LOG.isDebugEnabled()) { LOG.debug("Finding all bean definitions"); } return beanDefinitionProvider.getBeanDefinitions(this, null); } @Override public Collection<DisabledBean<?>> getDisabledBeans() { return beanDefinitionProvider.getDisabledBeans(this); } @NonNull @Override public Collection<BeanDefinitionReference<Object>> getBeanDefinitionReferences() { return beanDefinitionProvider.getBeanReferences(); } @Override public BeanContext registerBeanConfiguration(BeanConfiguration configuration) { Objects.requireNonNull(configuration, "Configuration cannot be null"); this.beanConfigurations.put(configuration.getName(), configuration); beanDefinitionProvider.registerConfiguration(configuration); return this; } @Override @NonNull public <B> BeanContext registerBeanDefinition(@NonNull RuntimeBeanDefinition<B> definition) { beanDefinitionProvider.addBeanDefinition(definition); purgeCacheForBeanType(definition.getBeanType()); return this; } private <B> void purgeCacheForBeanType(Class<B> beanType) { beanCandidateCache.entrySet().removeIf(entry -> entry.getKey().isAssignableFrom(beanType)); beanConcreteCandidateCache.entrySet().removeIf(entry -> entry.getKey().beanType.isAssignableFrom(beanType)); singletonBeanRegistrations.entrySet().removeIf(entry -> entry.getKey().beanType.isAssignableFrom(beanType)); containsBeanCache.entrySet().removeIf(entry -> entry.getKey().beanType.isAssignableFrom(beanType)); } /** * Get a bean of the given type. * * @param resolutionContext The bean context resolution * @param beanType The bean type * @param <T> The bean type parameter * @return The found bean */ @UsedByGeneratedCode @NonNull public <T> T getBean(@Nullable BeanResolutionContext resolutionContext, @NonNull Class<T> beanType) { ArgumentUtils.requireNonNull("beanType", beanType); return getBean(resolutionContext, Argument.of(beanType), null); } @NonNull @Override public <T> T getBean(@NonNull BeanDefinition<T> definition) { ArgumentUtils.requireNonNull("definition", definition); return resolveBeanRegistration(null, definition).bean; } /** * Get a bean of the given type and qualifier. * * @param resolutionContext The bean context resolution * @param beanType The bean type * @param qualifier The qualifier * @param <T> The bean type parameter * @return The found bean */ @NonNull public <T> T getBean(@Nullable BeanResolutionContext resolutionContext, @NonNull Class<T> beanType, @Nullable Qualifier<T> qualifier) { return getBean(resolutionContext, Argument.of(beanType), qualifier); } /** * Get a bean of the given type and qualifier. * * @param resolutionContext The bean context resolution * @param beanType The bean type * @param qualifier The qualifier * @param <T> The bean type parameter * @return The found bean * @since 3.0.0 */ @NonNull public <T> T getBean(@Nullable BeanResolutionContext resolutionContext, @NonNull Argument<T> beanType, @Nullable Qualifier<T> qualifier) { ArgumentUtils.requireNonNull("beanType", beanType); return resolveBeanRegistration(resolutionContext, beanType, qualifier, true).bean; } /** * Get a bean of the given bean definition, type and qualifier. * * @param resolutionContext The bean context resolution * @param beanDefinition The bean definition * @param beanType The bean type * @param qualifier The qualifier * @param <T> The bean type parameter * @return The found bean * @since 3.5.0 */ @Internal @NonNull public <T> T getBean(@Nullable BeanResolutionContext resolutionContext, @NonNull BeanDefinition<T> beanDefinition, @NonNull Argument<T> beanType, @Nullable Qualifier<T> qualifier) { ArgumentUtils.requireNonNull("beanDefinition", beanDefinition); ArgumentUtils.requireNonNull("beanType", beanType); return resolveBeanRegistration(resolutionContext, beanDefinition, beanType, qualifier).bean; } /** * Find an optional bean of the given type and qualifier. * * @param resolutionContext The bean context resolution * @param beanType The bean type * @param qualifier The qualifier * @param <T> The bean type parameter * @return The found bean wrapped as an {@link Optional} */ @NonNull public <T> Optional<T> findBean(@Nullable BeanResolutionContext resolutionContext, @NonNull Class<T> beanType, @Nullable Qualifier<T> qualifier) { return findBean(resolutionContext, Argument.of(beanType), qualifier); } /** * Find an optional bean of the given type and qualifier. * * @param resolutionContext The bean context resolution * @param beanType The bean type * @param qualifier The qualifier * @param <T> The bean type parameter * @return The found bean wrapped as an {@link Optional} * @since 3.0.0 */ @Internal @NonNull public <T> Optional<T> findBean(@Nullable BeanResolutionContext resolutionContext, @NonNull Argument<T> beanType, @Nullable Qualifier<T> qualifier) { ArgumentUtils.requireNonNull("beanType", beanType); // allow injection the bean context if (thisInterfaces.contains(beanType.getType())) { return Optional.of((T) this); } try { BeanRegistration<T> beanRegistration = resolveBeanRegistration(resolutionContext, beanType, qualifier, false); if (beanRegistration == null || beanRegistration.bean == null) { return Optional.empty(); } else { return Optional.of(beanRegistration.bean); } } catch (DisabledBeanException e) { if (AbstractBeanContextConditional.ConditionLog.LOG.isDebugEnabled()) { AbstractBeanContextConditional.ConditionLog.LOG.debug("Bean of type [{}] disabled for reason: {}", beanType.getSimpleName(), e.getMessage()); } return Optional.empty(); } } @Override public BeanContextConfiguration getContextConfiguration() { return this.beanContextConfiguration; } @SuppressWarnings("unchecked") @Override public void publishEvent(@NonNull Object event) { if (eventsEnabled) { Objects.requireNonNull(event, "Event cannot be null"); getBean(Argument.of(ApplicationEventPublisher.class, event.getClass())).publishEvent(event); } } @Override public @NonNull Future<Void> publishEventAsync(@NonNull Object event) { if (eventsEnabled) { Objects.requireNonNull(event, "Event cannot be null"); return getBean(Argument.of(ApplicationEventPublisher.class, event.getClass())).publishEventAsync(event); } return CompletableFuture.completedFuture(null); } @NonNull @Override public <T> Optional<BeanDefinition<T>> findProxyBeanDefinition(@NonNull Argument<T> beanType, @Nullable Qualifier<T> qualifier) { ArgumentUtils.requireNonNull("beanType", beanType); for (BeanDefinition<T> beanDefinition : getBeanDefinitions(beanType, qualifier)) { if (beanDefinition.isProxy()) { return Optional.of(beanDefinition); } } return Optional.empty(); } /** * Invalidates the bean caches. For testing only. */ @Internal protected void invalidateCaches() { beanCandidateCache.clear(); beanConcreteCandidateCache.clear(); singletonBeanRegistrations.clear(); } /** * Resolves the {@link BeanConfiguration}
loader
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/type/descriptor/jdbc/BigIntJdbcType.java
{ "start": 732, "end": 2890 }
class ____ implements JdbcType { public static final BigIntJdbcType INSTANCE = new BigIntJdbcType(); public BigIntJdbcType() { } @Override public int getJdbcTypeCode() { return Types.BIGINT; } @Override public <T> JavaType<T> getJdbcRecommendedJavaTypeMapping( Integer length, Integer scale, TypeConfiguration typeConfiguration) { return typeConfiguration.getJavaTypeRegistry().getDescriptor( Long.class ); } @Override public <T> JdbcLiteralFormatter<T> getJdbcLiteralFormatter(JavaType<T> javaType) { return new JdbcLiteralFormatterNumericData<>( javaType, Long.class ); } @Override public Class<?> getPreferredJavaTypeClass(WrapperOptions options) { return Long.class; } @Override public <X> ValueBinder<X> getBinder(final JavaType<X> javaType) { return new BasicBinder<>( javaType, this ) { @Override protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options) throws SQLException { st.setLong( index, javaType.unwrap( value, Long.class, options ) ); } @Override protected void doBind(CallableStatement st, X value, String name, WrapperOptions options) throws SQLException { st.setLong( name, javaType.unwrap( value, Long.class, options ) ); } }; } @Override public <X> ValueExtractor<X> getExtractor(final JavaType<X> javaType) { return new BasicExtractor<>( javaType, this ) { @Override protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException { return javaType.wrap( rs.getLong( paramIndex ), options ); } @Override protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException { return javaType.wrap( statement.getLong( index ), options ); } @Override protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException { return javaType.wrap( statement.getLong( name ), options ); } }; } @Override public String getFriendlyName() { return "BIGINT"; } @Override public String toString() { return "BigIntTypeDescriptor(" + getFriendlyName() + ")"; } }
BigIntJdbcType
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesHttpStaticUserPermissions.java
{ "start": 2126, "end": 2570 }
class ____ { private static final File testRootDir = new File("target", TestRMWebServicesHttpStaticUserPermissions.class.getName() + "-root"); private static File spnegoKeytabFile = new File( KerberosTestUtils.getKeytabFile()); private static String spnegoPrincipal = KerberosTestUtils .getServerPrincipal(); private static MiniKdc testMiniKDC; private static MockRM rm; static
TestRMWebServicesHttpStaticUserPermissions
java
netty__netty
transport/src/main/java/io/netty/channel/nio/AbstractNioByteChannel.java
{ "start": 1700, "end": 1770 }
class ____ {@link Channel}s that operate on bytes. */ public abstract
for
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/sql/ast/spi/SqlSelectionProducer.java
{ "start": 335, "end": 1057 }
interface ____ { /** * Create a SqlSelection for the given JDBC ResultSet position * * @param jdbcPosition The index position used to read values from JDBC * @param valuesArrayPosition The position in our {@linkplain RowProcessingState#getJdbcValue(SqlSelection) "current JDBC values array"} * @param javaType The descriptor for the Java type to read the value as * @param virtual Whether the select is virtual or real. See {@link SqlSelection#isVirtual()} * @param typeConfiguration The associated TypeConfiguration */ SqlSelection createSqlSelection( int jdbcPosition, int valuesArrayPosition, JavaType javaType, boolean virtual, TypeConfiguration typeConfiguration); }
SqlSelectionProducer
java
elastic__elasticsearch
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ai21/completion/Ai21ChatCompletionServiceSettings.java
{ "start": 1349, "end": 1454 }
class ____ the model ID and rate limit settings for the AI21 chat completion service. */ public
encapsulates
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/readonly/AbstractReadOnlyTest.java
{ "start": 989, "end": 1961 }
class ____ implements SettingProvider.Provider<CacheMode> { @Override public CacheMode getSetting() { return CacheMode.IGNORE; } } protected void clearCounts(SessionFactoryScope scope) { scope.getSessionFactory().getStatistics().clear(); } protected void assertInsertCount(int expected, SessionFactoryScope scope) { int inserts = (int) scope.getSessionFactory().getStatistics().getEntityInsertCount(); assertEquals( expected, inserts, "unexpected insert count" ); } protected void assertUpdateCount(int expected, SessionFactoryScope scope) { int updates = (int) scope.getSessionFactory().getStatistics().getEntityUpdateCount(); assertEquals( expected, updates, "unexpected update counts" ); } protected void assertDeleteCount(int expected, SessionFactoryScope scope) { int deletes = (int) scope.getSessionFactory().getStatistics().getEntityDeleteCount(); assertEquals( expected, deletes, "unexpected delete counts" ); } }
CacheModeProvider
java
quarkusio__quarkus
extensions/smallrye-reactive-messaging/deployment/src/test/java/io/quarkus/smallrye/reactivemessaging/signatures/ProcessorSignatureTest.java
{ "start": 9508, "end": 10087 }
class ____ extends Spy { @Inject @Channel("H") Emitter<Integer> emitter; public Emitter<Integer> getEmitter() { return emitter; } @Incoming("H") @Outgoing("HH") public PublisherBuilder<String> process(int item) { return ReactiveStreams.of(item, item).map(i -> Integer.toString(i)); } @Incoming("HH") public void consume(String item) { getItems().add(item); } } @ApplicationScoped public static
BeanProducingAPublisherBuilderOfPayload
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/bugs/_3153/Issue3153Test.java
{ "start": 476, "end": 1107 }
class ____ { @ProcessorTest void shouldNotTrimStringValueSource() { assertThat( Issue3153Mapper.INSTANCE.mapToEnum( "PR" ) ).isEqualTo( Issue3153Mapper.Target.PR ); assertThat( Issue3153Mapper.INSTANCE.mapToEnum( " PR" ) ).isEqualTo( Issue3153Mapper.Target.PR ); assertThat( Issue3153Mapper.INSTANCE.mapToEnum( " PR" ) ).isEqualTo( Issue3153Mapper.Target.PR ); assertThat( Issue3153Mapper.INSTANCE.mapToEnum( " PR" ) ).isEqualTo( Issue3153Mapper.Target.PR ); assertThat( Issue3153Mapper.INSTANCE.mapFromEnum( Issue3153Mapper.Target.PR ) ).isEqualTo( " PR" ); } }
Issue3153Test
java
mybatis__mybatis-3
src/test/java/org/apache/ibatis/submitted/constructor_columnprefix/ConstructorColumnPrefixTest.java
{ "start": 1132, "end": 2914 }
class ____ { private static SqlSessionFactory sqlSessionFactory; @BeforeAll static void setUp() throws Exception { // create an SqlSessionFactory try (Reader reader = Resources .getResourceAsReader("org/apache/ibatis/submitted/constructor_columnprefix/mybatis-config.xml")) { sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader); } // populate in-memory database BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(), "org/apache/ibatis/submitted/constructor_columnprefix/CreateDB.sql"); } @Test void shouldGetArticles() { try (SqlSession sqlSession = sqlSessionFactory.openSession()) { Mapper mapper = sqlSession.getMapper(Mapper.class); List<Article> articles = mapper.getArticles(); assertArticles(articles); } } @Test void shouldGetArticlesAnno() { try (SqlSession sqlSession = sqlSessionFactory.openSession()) { Mapper mapper = sqlSession.getMapper(Mapper.class); List<Article> articles = mapper.getArticlesAnno(); assertArticles(articles); } } void assertArticles(List<Article> articles) { assertEquals(2, articles.size()); Article article1 = articles.get(0); assertEquals(Integer.valueOf(1), article1.getId().getId()); assertEquals("Article 1", article1.getName()); assertEquals("Mary", article1.getAuthor().getName()); assertEquals("Bob", article1.getCoauthor().getName()); Article article2 = articles.get(1); assertEquals(Integer.valueOf(2), article2.getId().getId()); assertEquals("Article 2", article2.getName()); assertEquals("Jane", article2.getAuthor().getName()); assertEquals("Mary", article2.getCoauthor().getName()); } }
ConstructorColumnPrefixTest
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java
{ "start": 2134, "end": 6199 }
class ____ implements Writeable, ToXContentFragment { public static final ParseField INCLUDE_FIELD = new ParseField("include"); public static final ParseField EXCLUDE_FIELD = new ParseField("exclude"); public static final ParseField PARTITION_FIELD = new ParseField("partition"); public static final ParseField NUM_PARTITIONS_FIELD = new ParseField("num_partitions"); // Needed to add this seed for a deterministic term hashing policy // otherwise tests fail to get expected results and worse, shards // can disagree on which terms hash to the required partition. private static final int HASH_PARTITIONING_SEED = 31; // for parsing purposes only // TODO: move all aggs to the same package so that this stuff could be pkg-private public static IncludeExclude merge(IncludeExclude include, IncludeExclude exclude) { if (include == null) { return exclude; } if (exclude == null) { return include; } if (include.isPartitionBased()) { throw new IllegalArgumentException("Cannot specify any excludes when using a partition-based include"); } return new IncludeExclude( include.include == null ? null : include.include.getOriginalString(), exclude.exclude == null ? null : exclude.exclude.getOriginalString(), include.includeValues, exclude.excludeValues ); } public static IncludeExclude parseInclude(XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); if (token == XContentParser.Token.VALUE_STRING) { return new IncludeExclude(parser.text(), null, null, null); } else if (token == XContentParser.Token.START_ARRAY) { return new IncludeExclude(null, null, new TreeSet<>(parseArrayToSet(parser)), null); } else if (token == XContentParser.Token.START_OBJECT) { String currentFieldName = null; Integer partition = null, numPartitions = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (NUM_PARTITIONS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { numPartitions = parser.intValue(); } else if (PARTITION_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { partition = parser.intValue(); } else { throw new ElasticsearchParseException("Unknown parameter in Include/Exclude clause: " + currentFieldName); } } if (partition == null) { throw new IllegalArgumentException( "Missing [" + PARTITION_FIELD.getPreferredName() + "] parameter for partition-based include" ); } if (numPartitions == null) { throw new IllegalArgumentException( "Missing [" + NUM_PARTITIONS_FIELD.getPreferredName() + "] parameter for partition-based include" ); } return new IncludeExclude(partition, numPartitions); } else { throw new IllegalArgumentException("Unrecognized token for an include [" + token + "]"); } } public static IncludeExclude parseExclude(XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); if (token == XContentParser.Token.VALUE_STRING) { return new IncludeExclude(null, parser.text(), null, null); } else if (token == XContentParser.Token.START_ARRAY) { return new IncludeExclude(null, null, null, new TreeSet<>(parseArrayToSet(parser))); } else { throw new IllegalArgumentException("Unrecognized token for an exclude [" + token + "]"); } } public abstract static
IncludeExclude
java
apache__kafka
clients/src/main/java/org/apache/kafka/common/TopicCollection.java
{ "start": 930, "end": 1047 }
class ____ to represent a collection of topics. This collection may define topics by name or ID. */ public abstract
used
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/PatternMatchingInstanceofTest.java
{ "start": 16435, "end": 16735 }
class ____ { private String val; public String stringify(Object o) { return !(o instanceof Test) ? "not a test" : ((Test) o).val; } } """) .addOutputLines( "Test.java", """
Test
java
spring-projects__spring-boot
core/spring-boot/src/test/java/org/springframework/boot/logging/structured/StructuredLoggingJsonPropertiesTests.java
{ "start": 1962, "end": 5239 }
class ____ { @Test void getWhenHasNoStackTracePropertiesBindsFromEnvironment() { MockEnvironment environment = new MockEnvironment(); setupJsonProperties(environment); StructuredLoggingJsonProperties properties = StructuredLoggingJsonProperties.get(environment); assertThat(properties).isEqualTo(new StructuredLoggingJsonProperties(Set.of("a", "b"), Set.of("c", "d"), Map.of("e", "f"), Map.of("g", "h"), null, null, Set.of(TestCustomizer.class))); } @Test void getWhenHasStackTracePropertiesBindsFromEnvironment() { MockEnvironment environment = new MockEnvironment(); setupJsonProperties(environment); environment.setProperty("logging.structured.json.stacktrace.printer", "standard"); environment.setProperty("logging.structured.json.stacktrace.root", "first"); environment.setProperty("logging.structured.json.stacktrace.max-length", "1024"); environment.setProperty("logging.structured.json.stacktrace.max-throwable-depth", "5"); environment.setProperty("logging.structured.json.stacktrace.include-common-frames", "true"); environment.setProperty("logging.structured.json.stacktrace.include-hashes", "true"); StructuredLoggingJsonProperties properties = StructuredLoggingJsonProperties.get(environment); assertThat(properties).isNotNull(); assertThat(properties.stackTrace()) .isEqualTo(new StructuredLoggingJsonProperties.StackTrace("standard", Root.FIRST, 1024, 5, true, true)); } private void setupJsonProperties(MockEnvironment environment) { environment.setProperty("logging.structured.json.include", "a,b"); environment.setProperty("logging.structured.json.exclude", "c,d"); environment.setProperty("logging.structured.json.rename.e", "f"); environment.setProperty("logging.structured.json.add.g", "h"); environment.setProperty("logging.structured.json.customizer", TestCustomizer.class.getName()); } @Test void getWhenNoBoundPropertiesReturnsNull() { MockEnvironment environment = new MockEnvironment(); StructuredLoggingJsonProperties.get(environment); } @Test void shouldRegisterRuntimeHints() throws Exception { RuntimeHints hints = new RuntimeHints(); new StructuredLoggingJsonPropertiesRuntimeHints().registerHints(hints, getClass().getClassLoader()); assertThat(RuntimeHintsPredicates.reflection().onType(StructuredLoggingJsonProperties.class)).accepts(hints); assertThat( RuntimeHintsPredicates.reflection() .onConstructorInvocation(StructuredLoggingJsonProperties.class.getDeclaredConstructor(Set.class, Set.class, Map.class, Map.class, StackTrace.class, Context.class, Set.class))) .accepts(hints); assertThat(RuntimeHintsPredicates.reflection() .onConstructorInvocation(StackTrace.class.getDeclaredConstructor(String.class, Root.class, Integer.class, Integer.class, Boolean.class, Boolean.class))) .accepts(hints); assertThat(RuntimeHintsPredicates.reflection() .onConstructorInvocation(Context.class.getDeclaredConstructor(boolean.class, String.class))).accepts(hints); } @Test void structuredLoggingJsonPropertiesRuntimeHintsIsRegistered() { assertThat(AotServices.factories().load(RuntimeHintsRegistrar.class)) .anyMatch(StructuredLoggingJsonPropertiesRuntimeHints.class::isInstance); } @Nested
StructuredLoggingJsonPropertiesTests
java
apache__kafka
clients/src/main/java/org/apache/kafka/clients/consumer/internals/AbstractCoordinator.java
{ "start": 80086, "end": 81812 }
class ____ { public static final Generation NO_GENERATION = new Generation( OffsetCommitRequest.DEFAULT_GENERATION_ID, JoinGroupRequest.UNKNOWN_MEMBER_ID, null); public final int generationId; public final String memberId; public final String protocolName; public Generation(int generationId, String memberId, String protocolName) { this.generationId = generationId; this.memberId = memberId; this.protocolName = protocolName; } /** * @return true if this generation has a valid member id, false otherwise. A member might have an id before * it becomes part of a group generation. */ public boolean hasMemberId() { return !memberId.isEmpty(); } @Override public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final Generation that = (Generation) o; return generationId == that.generationId && Objects.equals(memberId, that.memberId) && Objects.equals(protocolName, that.protocolName); } @Override public int hashCode() { return Objects.hash(generationId, memberId, protocolName); } @Override public String toString() { return "Generation{" + "generationId=" + generationId + ", memberId='" + memberId + '\'' + ", protocol='" + protocolName + '\'' + '}'; } } private static
Generation
java
apache__avro
lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc/jetty/TestStatsPluginAndServlet.java
{ "start": 2649, "end": 5047 }
class ____ extends GenericResponder { public TestResponder(Protocol local) { super(local); } @Override public Object respond(Message message, Object request) throws AvroRemoteException { assertEquals(0, ((GenericRecord) request).get("x")); return 1; } } private void makeRequest(Transceiver t) throws Exception { GenericRecord params = new GenericData.Record(protocol.getMessages().get("m").getRequest()); params.put("x", 0); GenericRequestor r = new GenericRequestor(protocol, t); assertEquals(1, r.request("m", params)); } @Test void fullServerPath() throws Exception { Responder r = new TestResponder(protocol); StatsPlugin statsPlugin = new StatsPlugin(); r.addRPCPlugin(statsPlugin); Transceiver t = new LocalTransceiver(r); for (int i = 0; i < 10; ++i) { makeRequest(t); } String o = generateServletResponse(statsPlugin); assertTrue(o.contains("10 calls")); } @Test void multipleRPCs() throws IOException { org.apache.avro.ipc.stats.FakeTicks t = new org.apache.avro.ipc.stats.FakeTicks(); StatsPlugin statsPlugin = new StatsPlugin(t, StatsPlugin.LATENCY_SEGMENTER, StatsPlugin.PAYLOAD_SEGMENTER); RPCContext context1 = makeContext(); RPCContext context2 = makeContext(); statsPlugin.serverReceiveRequest(context1); t.passTime(100 * MS); // first takes 100ms statsPlugin.serverReceiveRequest(context2); String r = generateServletResponse(statsPlugin); // Check in progress RPCs assertTrue(r.contains("m: 0ms")); assertTrue(r.contains("m: 100ms")); statsPlugin.serverSendResponse(context1); t.passTime(900 * MS); // second takes 900ms statsPlugin.serverSendResponse(context2); r = generateServletResponse(statsPlugin); assertTrue(r.contains("Average: 500.0ms")); } @Test void payloadSize() throws Exception { Responder r = new TestResponder(protocol); StatsPlugin statsPlugin = new StatsPlugin(); r.addRPCPlugin(statsPlugin); Transceiver t = new LocalTransceiver(r); makeRequest(t); String resp = generateServletResponse(statsPlugin); assertTrue(resp.contains("Average: 2.0")); } private RPCContext makeContext() { RPCContext context = new RPCContext(); context.setMessage(message); return context; } /** Sleeps as requested. */ private static
TestResponder
java
spring-projects__spring-security
config/src/test/java/org/springframework/security/config/annotation/web/reactive/WebFluxSecurityConfigurationTests.java
{ "start": 5685, "end": 5752 }
class ____ extends WebFluxSecurityConfiguration { } }
SubclassConfig
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/api/url/UrlAssert_hasHost_Test.java
{ "start": 871, "end": 1216 }
class ____ extends UrlAssertBaseTest { private String expected = "host"; @Override protected UrlAssert invoke_api_method() { return assertions.hasHost(expected); } @Override protected void verify_internal_effects() { verify(urls).assertHasHost(getInfo(assertions), getActual(assertions), expected); } }
UrlAssert_hasHost_Test
java
alibaba__druid
core/src/main/java/com/alibaba/druid/sql/dialect/informix/visitor/InformixOutputVisitor.java
{ "start": 361, "end": 1264 }
class ____ extends SQLASTOutputVisitor { public InformixOutputVisitor(StringBuilder appender) { this(appender, false); } public InformixOutputVisitor(StringBuilder appender, boolean parameterized) { super(appender, DbType.informix, Informix.DIALECT, parameterized); } protected void printSelectListBefore(SQLSelectQueryBlock x) { print(' '); SQLLimit limit = x.getLimit(); if (limit == null) { return; } SQLExpr offset = limit.getOffset(); SQLExpr first = limit.getRowCount(); if (offset != null) { print0(ucase ? "SKIP " : "skip "); offset.accept(this); } print0(ucase ? " FIRST " : " first "); first.accept(this); print(' '); } protected void printFetchFirst(SQLSelectQueryBlock x) { // ignore } }
InformixOutputVisitor
java
apache__hadoop
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemCreate.java
{ "start": 15493, "end": 94267 }
class ____ a double failure on close badly if the second * exception rethrows the first. */ @Test public void testTryWithResources() throws Throwable { final AzureBlobFileSystem fs = getFileSystem(); Path testFolderPath = path(TEST_FOLDER_PATH); Path testPath = new Path(testFolderPath, TEST_CHILD_FILE); try (FSDataOutputStream out = fs.create(testPath)) { out.write('1'); out.hsync(); // this will cause the next write to failAll fs.delete(testPath, false); out.write('2'); out.hsync(); fail("Expected a failure"); } catch (IOException fnfe) { //appendblob outputStream does not generate suppressed exception on close as it is //single threaded code if (!fs.getAbfsStore().isAppendBlobKey(fs.makeQualified(testPath).toString())) { // the exception raised in close() must be in the caught exception's // suppressed list Throwable[] suppressed = fnfe.getSuppressed(); Assertions.assertThat(suppressed.length) .describedAs("suppressed count should be 1").isEqualTo(1); Throwable inner = suppressed[0]; if (!(inner instanceof IOException)) { throw inner; } GenericTestUtils.assertExceptionContains(fnfe.getMessage(), inner); } } } /** * Attempts to write to the azure stream after it is closed will raise * an IOException. */ @Test public void testFilterFSWriteAfterClose() throws Throwable { try (AzureBlobFileSystem fs = getFileSystem()) { Path testPath = new Path(TEST_FOLDER_PATH, TEST_CHILD_FILE); FSDataOutputStream out = fs.create(testPath); intercept(IOException.class, () -> { try (FilterOutputStream fos = new FilterOutputStream(out)) { byte[] bytes = new byte[8 * ONE_MB]; fos.write(bytes); fos.write(bytes); fos.flush(); out.hsync(); fs.delete(testPath, false); // trigger the first failure throw intercept(IOException.class, () -> { fos.write('b'); out.hsync(); return "hsync didn't raise an IOE"; }); } }); } } /** * Tests if the number of connections made for: * 1. create overwrite=false of a file that doesnt pre-exist * 2. create overwrite=false of a file that pre-exists * 3. create overwrite=true of a file that doesnt pre-exist * 4. create overwrite=true of a file that pre-exists * matches the expectation when run against both combinations of * fs.azure.enable.conditional.create.overwrite=true and * fs.azure.enable.conditional.create.overwrite=false * @throws Throwable */ @Test public void testDefaultCreateOverwriteFileTest() throws Throwable { testCreateFileOverwrite(true); testCreateFileOverwrite(false); } public void testCreateFileOverwrite(boolean enableConditionalCreateOverwrite) throws Throwable { if (enableConditionalCreateOverwrite) { assumeHnsEnabled(); assumeDfsServiceType(); assumeThat(getIngressServiceType()) .as("DFS service type is required for this test") .isEqualTo(AbfsServiceType.DFS); } try (AzureBlobFileSystem currentFs = getFileSystem()) { Configuration config = new Configuration(this.getRawConfiguration()); config.set("fs.azure.enable.conditional.create.overwrite", Boolean.toString(enableConditionalCreateOverwrite)); config.set("fs.azure.enable.create.idempotency", "false"); AzureBlobFileSystemStore store = currentFs.getAbfsStore(); AbfsClient client = store.getClientHandler().getIngressClient(); try (AzureBlobFileSystem fs = (AzureBlobFileSystem) FileSystem.newInstance(currentFs.getUri(), config)) { long totalConnectionMadeBeforeTest = fs.getInstrumentationMap() .get(CONNECTIONS_MADE.getStatName()); int createRequestCount = 0; final Path nonOverwriteFile = new Path("/NonOverwriteTest_FileName_" + UUID.randomUUID().toString()); // Case 1: Not Overwrite - File does not pre-exist // create should be successful fs.create(nonOverwriteFile, false); // One request to server to create path should be issued // two calls added for - // 1. getFileStatus on DFS endpoint : 1 // getFileStatus on Blob endpoint: 1 ListBlobcall // 2. actual create call: 1 createRequestCount += ( client instanceof AbfsBlobClient && !getIsNamespaceEnabled(fs) ? 2 : 1); assertAbfsStatistics( CONNECTIONS_MADE, totalConnectionMadeBeforeTest + createRequestCount, fs.getInstrumentationMap()); // Case 2: Not Overwrite - File pre-exists fs.registerListener(new TracingHeaderValidator( fs.getAbfsStore().getAbfsConfiguration().getClientCorrelationId(), fs.getFileSystemId(), FSOperationType.CREATE, false, 0)); intercept(FileAlreadyExistsException.class, () -> fs.create(nonOverwriteFile, false)); fs.registerListener(null); // One request to server to create path should be issued // Only single tryGetFileStatus should happen // 1. getFileStatus on DFS endpoint : 1 // getFileStatus on Blob endpoint: 1 (No Additional List blob call as file exists) createRequestCount += ( client instanceof AbfsBlobClient && !getIsNamespaceEnabled(fs) ? 2 : 1); assertAbfsStatistics( CONNECTIONS_MADE, totalConnectionMadeBeforeTest + createRequestCount, fs.getInstrumentationMap()); final Path overwriteFilePath = new Path("/OverwriteTest_FileName_" + UUID.randomUUID().toString()); // Case 3: Overwrite - File does not pre-exist // create should be successful fs.create(overwriteFilePath, true); /// One request to server to create path should be issued // two calls added for - // 1. getFileStatus on DFS endpoint : 1 // getFileStatus on Blob endpoint: 1 ListBlobCall + 1 GPS // 2. actual create call: 1 // 1 extra call when conditional overwrite is not enabled to check for empty directory createRequestCount += (client instanceof AbfsBlobClient && !getIsNamespaceEnabled(fs)) ? (enableConditionalCreateOverwrite ? 2 : 3) : 1; assertAbfsStatistics( CONNECTIONS_MADE, totalConnectionMadeBeforeTest + createRequestCount, fs.getInstrumentationMap()); // Case 4: Overwrite - File pre-exists fs.registerListener(new TracingHeaderValidator( fs.getAbfsStore().getAbfsConfiguration().getClientCorrelationId(), fs.getFileSystemId(), FSOperationType.CREATE, true, 0)); fs.create(overwriteFilePath, true); fs.registerListener(null); createRequestCount += ( client instanceof AbfsBlobClient && !getIsNamespaceEnabled(fs) ? 1 : 0); // Second actual create call will hap if (enableConditionalCreateOverwrite) { // Three requests will be sent to server to create path, // 1. create without overwrite // 2. GetFileStatus to get eTag // 3. create with overwrite createRequestCount += 3; } else { createRequestCount += (client instanceof AbfsBlobClient && !getIsNamespaceEnabled(fs)) ? 2 : 1; } assertAbfsStatistics( CONNECTIONS_MADE, totalConnectionMadeBeforeTest + createRequestCount, fs.getInstrumentationMap()); } } } /** * Test negative scenarios with Create overwrite=false as default * With create overwrite=true ending in 3 calls: * A. Create overwrite=false * B. GFS * C. Create overwrite=true * * Scn1: A fails with HTTP409, leading to B which fails with HTTP404, * detect parallel access * Scn2: A fails with HTTP409, leading to B which fails with HTTP500, * fail create with HTTP500 * Scn3: A fails with HTTP409, leading to B and then C, * which fails with HTTP412, detect parallel access * Scn4: A fails with HTTP409, leading to B and then C, * which fails with HTTP500, fail create with HTTP500 * Scn5: A fails with HTTP500, fail create with HTTP500 */ @Test public void testNegativeScenariosForCreateOverwriteDisabled() throws Throwable { assumeHnsEnabled(); assumeDfsServiceType(); assumeThat(getIngressServiceType()) .as("DFS service type is required for this test") .isEqualTo(AbfsServiceType.DFS); try (AzureBlobFileSystem currentFs = getFileSystem()) { Configuration config = new Configuration(this.getRawConfiguration()); config.set("fs.azure.enable.conditional.create.overwrite", Boolean.toString(true)); try (AzureBlobFileSystem fs = (AzureBlobFileSystem) FileSystem.newInstance(currentFs.getUri(), config)) { // Get mock AbfsClient with current config AbfsClient mockClient = ITestAbfsClient.getMockAbfsClient( fs.getAbfsStore().getClient(), fs.getAbfsStore().getAbfsConfiguration()); AbfsClientHandler clientHandler = Mockito.mock(AbfsClientHandler.class); when(clientHandler.getIngressClient()).thenReturn(mockClient); when(clientHandler.getClient(Mockito.any())).thenReturn(mockClient); AzureBlobFileSystemStore abfsStore = fs.getAbfsStore(); ReflectionUtils.setFinalField(AzureBlobFileSystemStore.class, abfsStore, "clientHandler", clientHandler); ReflectionUtils.setFinalField(AzureBlobFileSystemStore.class, abfsStore, "client", mockClient); AbfsRestOperation successOp = mock( AbfsRestOperation.class); AbfsHttpOperation http200Op = mock( AbfsHttpOperation.class); when(http200Op.getStatusCode()).thenReturn(HTTP_OK); when(successOp.getResult()).thenReturn(http200Op); AbfsRestOperationException conflictResponseEx = getMockAbfsRestOperationException(HTTP_CONFLICT); AbfsRestOperationException serverErrorResponseEx = getMockAbfsRestOperationException(HTTP_INTERNAL_ERROR); AbfsRestOperationException fileNotFoundResponseEx = getMockAbfsRestOperationException(HTTP_NOT_FOUND); AbfsRestOperationException preConditionResponseEx = getMockAbfsRestOperationException(HTTP_PRECON_FAILED); doCallRealMethod().when(mockClient) .conditionalCreateOverwriteFile(anyString(), Mockito.nullable(FileSystem.Statistics.class), Mockito.nullable(AzureBlobFileSystemStore.Permissions.class), anyBoolean(), Mockito.nullable(ContextEncryptionAdapter.class), Mockito.nullable(TracingContext.class)); // mock for overwrite=false doThrow(conflictResponseEx) // Scn1: GFS fails with Http404 .doThrow(conflictResponseEx) // Scn2: GFS fails with Http500 .doThrow( conflictResponseEx) // Scn3: create overwrite=true fails with Http412 .doThrow( conflictResponseEx) // Scn4: create overwrite=true fails with Http500 .doThrow( serverErrorResponseEx) // Scn5: create overwrite=false fails with Http500 .when(mockClient) .createPath(any(String.class), eq(true), eq(false), any(AzureBlobFileSystemStore.Permissions.class), any(boolean.class), eq(null), any(), any(TracingContext.class)); doThrow(fileNotFoundResponseEx) // Scn1: GFS fails with Http404 .doThrow(serverErrorResponseEx) // Scn2: GFS fails with Http500 .doReturn( successOp) // Scn3: create overwrite=true fails with Http412 .doReturn( successOp) // Scn4: create overwrite=true fails with Http500 .when(mockClient) .getPathStatus(any(String.class), eq(false), any(TracingContext.class), nullable( ContextEncryptionAdapter.class)); // mock for overwrite=true doThrow( preConditionResponseEx) // Scn3: create overwrite=true fails with Http412 .doThrow( serverErrorResponseEx) // Scn4: create overwrite=true fails with Http500 .when(mockClient) .createPath(any(String.class), eq(true), eq(true), any(AzureBlobFileSystemStore.Permissions.class), any(boolean.class), eq(null), any(), any(TracingContext.class)); if (mockClient instanceof AbfsBlobClient) { doReturn(false).when((AbfsBlobClient) mockClient) .isNonEmptyDirectory(anyString(), Mockito.nullable(TracingContext.class)); doNothing().when((AbfsBlobClient) mockClient) .tryMarkerCreation(anyString(), anyBoolean(), Mockito.nullable(String.class), Mockito.nullable(ContextEncryptionAdapter.class), Mockito.nullable(TracingContext.class)); // mock for overwrite=true doThrow( preConditionResponseEx) // Scn3: create overwrite=true fails with Http412 .doThrow( serverErrorResponseEx) // Scn4: create overwrite=true fails with Http500 .when((AbfsBlobClient) mockClient) .createPathRestOp(any(String.class), eq(true), eq(true), any(boolean.class), eq(null), any(), any(TracingContext.class)); // mock for overwrite=false doThrow(conflictResponseEx) // Scn1: GFS fails with Http404 .doThrow(conflictResponseEx) // Scn2: GFS fails with Http500 .doThrow( conflictResponseEx) // Scn3: create overwrite=true fails with Http412 .doThrow( conflictResponseEx) // Scn4: create overwrite=true fails with Http500 .doThrow( serverErrorResponseEx) // Scn5: create overwrite=false fails with Http500 .when((AbfsBlobClient) mockClient) .createPathRestOp(any(String.class), eq(true), eq(false), any(boolean.class), eq(null), any(), any(TracingContext.class)); doThrow(fileNotFoundResponseEx) // Scn1: GFS fails with Http404 .doThrow(serverErrorResponseEx) // Scn2: GFS fails with Http500 .doReturn( successOp) // Scn3: create overwrite=true fails with Http412 .doReturn( successOp) // Scn4: create overwrite=true fails with Http500 .when((AbfsBlobClient) mockClient) .getPathStatus(any(String.class), any(TracingContext.class), nullable( ContextEncryptionAdapter.class), eq(false)); } // Scn1: GFS fails with Http404 // Sequence of events expected: // 1. create overwrite=false - fail with conflict // 2. GFS - fail with File Not found // Create will fail with ConcurrentWriteOperationDetectedException validateCreateFileException( ConcurrentWriteOperationDetectedException.class, abfsStore); // Scn2: GFS fails with Http500 // Sequence of events expected: // 1. create overwrite=false - fail with conflict // 2. GFS - fail with Server error // Create will fail with 500 validateCreateFileException(AbfsRestOperationException.class, abfsStore); // Scn3: create overwrite=true fails with Http412 // Sequence of events expected: // 1. create overwrite=false - fail with conflict // 2. GFS - pass // 3. create overwrite=true - fail with Pre-Condition // Create will fail with ConcurrentWriteOperationDetectedException validateCreateFileException( ConcurrentWriteOperationDetectedException.class, abfsStore); // Scn4: create overwrite=true fails with Http500 // Sequence of events expected: // 1. create overwrite=false - fail with conflict // 2. GFS - pass // 3. create overwrite=true - fail with Server error // Create will fail with 500 validateCreateFileException(AbfsRestOperationException.class, abfsStore); // Scn5: create overwrite=false fails with Http500 // Sequence of events expected: // 1. create overwrite=false - fail with server error // Create will fail with 500 validateCreateFileException(AbfsRestOperationException.class, abfsStore); } } } @Test public void testCreateMarkerFailExceptionIsSwallowed() throws Throwable { assumeBlobServiceType(); try (AzureBlobFileSystem currentFs = getFileSystem()) { Configuration config = new Configuration(this.getRawConfiguration()); config.set("fs.azure.enable.conditional.create.overwrite", Boolean.toString(true)); try (AzureBlobFileSystem fs = (AzureBlobFileSystem) FileSystem.newInstance(currentFs.getUri(), config)) { AbfsClient mockClient = Mockito.spy(fs.getAbfsClient()); AzureBlobFileSystemStore spiedStore = Mockito.spy(fs.getAbfsStore()); spiedStore.setClient(mockClient); AbfsClientHandler clientHandler = Mockito.mock(AbfsClientHandler.class); when(clientHandler.getIngressClient()).thenReturn(mockClient); when(clientHandler.getClient(Mockito.any())).thenReturn(mockClient); Path testFolder = new Path("/dir1"); createAzCopyFolder(testFolder); AzureBlobFileSystemStore abfsStore = fs.getAbfsStore(); ReflectionUtils.setFinalField(AzureBlobFileSystemStore.class, abfsStore, "clientHandler", clientHandler); ReflectionUtils.setFinalField(AzureBlobFileSystemStore.class, abfsStore, "client", mockClient); AbfsRestOperation successOp = mock(AbfsRestOperation.class); AbfsHttpOperation http200Op = mock(AbfsHttpOperation.class); when(http200Op.getStatusCode()).thenReturn(HTTP_OK); when(successOp.getResult()).thenReturn(http200Op); AbfsRestOperationException preConditionResponseEx = getMockAbfsRestOperationException(HTTP_PRECON_FAILED); doCallRealMethod().when(mockClient) .conditionalCreateOverwriteFile(anyString(), Mockito.nullable(FileSystem.Statistics.class), Mockito.nullable(AzureBlobFileSystemStore.Permissions.class), anyBoolean(), Mockito.nullable(ContextEncryptionAdapter.class), Mockito.nullable(TracingContext.class)); doCallRealMethod().when((AbfsBlobClient) mockClient) .tryMarkerCreation(anyString(), anyBoolean(), Mockito.nullable(String.class), Mockito.nullable(ContextEncryptionAdapter.class), Mockito.nullable(TracingContext.class)); Mockito.doReturn(new ArrayList<>(Collections.singletonList(testFolder))) .when((AbfsBlobClient) mockClient) .getMarkerPathsTobeCreated(any(Path.class), Mockito.nullable(TracingContext.class)); doReturn(false).when((AbfsBlobClient) mockClient) .isNonEmptyDirectory(anyString(), Mockito.nullable(TracingContext.class)); doAnswer(new Answer<Void>() { private boolean firstCall = true; @Override public Void answer(InvocationOnMock invocation) throws Throwable { if (firstCall) { firstCall = false; throw preConditionResponseEx; } return null; } }).doCallRealMethod() .when((AbfsBlobClient) mockClient) .createPathRestOp(anyString(), anyBoolean(), anyBoolean(), anyBoolean(), Mockito.nullable(String.class), Mockito.nullable(ContextEncryptionAdapter.class), Mockito.nullable(TracingContext.class)); AbfsClientTestUtil.hookOnRestOpsForTracingContextSingularity(mockClient); doReturn(successOp) .when((AbfsBlobClient) mockClient) .getPathStatus(any(String.class), any(TracingContext.class), nullable(ContextEncryptionAdapter.class), eq(false)); FsPermission permission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL); FsPermission umask = new FsPermission(FsAction.NONE, FsAction.NONE, FsAction.NONE); Path testPath = new Path("/dir1/testFile"); abfsStore.createFile(testPath, null, true, permission, umask, getTestTracingContext(getFileSystem(), true)); Assertions.assertThat(fs.exists(testPath)) .describedAs("File not created when marker creation failed.") .isTrue(); } } } private <E extends Throwable> void validateCreateFileException(final Class<E> exceptionClass, final AzureBlobFileSystemStore abfsStore) throws Exception { FsPermission permission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL); FsPermission umask = new FsPermission(FsAction.NONE, FsAction.NONE, FsAction.NONE); Path testPath = new Path("/testFile"); intercept( exceptionClass, () -> abfsStore.createFile(testPath, null, true, permission, umask, getTestTracingContext(getFileSystem(), true))); } private AbfsRestOperationException getMockAbfsRestOperationException(int status) { return new AbfsRestOperationException(status, "", "", new Exception()); } /** * Attempts to test multiple flush calls. */ @Test public void testMultipleFlush() throws Throwable { try (AzureBlobFileSystem fs = getFileSystem()) { Path testPath = new Path(TEST_FOLDER_PATH, TEST_CHILD_FILE); try (FSDataOutputStream out = fs.create(testPath)) { out.write('1'); out.hsync(); out.write('2'); out.hsync(); } } } /** * Delete the blob before flush and verify that an exception should be thrown. */ @Test public void testDeleteBeforeFlush() throws Throwable { try (AzureBlobFileSystem fs = getFileSystem()) { Path testPath = new Path(TEST_FOLDER_PATH, TEST_CHILD_FILE); try (FSDataOutputStream out = fs.create(testPath)) { out.write('1'); fs.delete(testPath, false); out.hsync(); // this will cause the next write to failAll } catch (IOException fnfe) { //appendblob outputStream does not generate suppressed exception on close as it is //single threaded code if (!fs.getAbfsStore() .isAppendBlobKey(fs.makeQualified(testPath).toString())) { // the exception raised in close() must be in the caught exception's // suppressed list Throwable[] suppressed = fnfe.getSuppressed(); assertEquals(1, suppressed.length, "suppressed count"); Throwable inner = suppressed[0]; if (!(inner instanceof IOException)) { throw inner; } GenericTestUtils.assertExceptionContains(fnfe.getMessage(), inner.getCause(), inner.getCause().getMessage()); } } } } /** * Creating subdirectory on existing file path should fail. * @throws Exception */ @Test public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.create(new Path("a/b/c")); fs.mkdirs(new Path("a/b/d")); intercept(IOException.class, () -> fs.mkdirs(new Path("a/b/c/d/e"))); Assertions.assertThat(fs.exists(new Path("a/b/c"))).isTrue(); Assertions.assertThat(fs.exists(new Path("a/b/d"))).isTrue(); // Asserting directory created still exists as explicit. Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(new Path("a/b/d"), fs, getTestTracingContext(fs, true))) .describedAs("Path is not an explicit directory") .isTrue(); } } /** * Calling mkdir for existing implicit directory. * @throws Exception */ @Test public void testMkdirSameFolder() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { createAzCopyFolder(new Path("a/b/d")); fs.mkdirs(new Path("a/b/d")); } } /** * Try creating file same as an existing directory. * @throws Exception */ @Test public void testCreateDirectoryAndFile() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.mkdirs(new Path("a/b/c")); Assertions.assertThat(fs.exists(new Path("a/b/c"))).isTrue(); intercept(IOException.class, () -> fs.create(new Path("a/b/c"))); // Asserting that directory still exists as explicit Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(new Path("a/b/c"), fs, getTestTracingContext(fs, true))) .describedAs("Path is not an explicit directory") .isTrue(); } } /** * Creating same file without specifying overwrite. * @throws Exception */ @Test public void testCreateSameFile() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.create(new Path("a/b/c")); fs.create(new Path("a/b/c")); Assertions.assertThat(fs.exists(new Path("a/b/c"))) .describedAs("Path does not exist") .isTrue(); } } /** * Test the creation of a file without conditional overwrite. * This test sets the configuration `fs.azure.enable.conditional.create.overwrite` to false, * creates a directory, and then attempts to create a file at the same path with overwrite set to true. * It expects an IOException to be thrown. * * @throws Exception if any exception occurs during the test execution */ @Test public void testCreationWithoutConditionalOverwrite() throws Exception { try (AzureBlobFileSystem currentFs = getFileSystem()) { Configuration config = new Configuration(this.getRawConfiguration()); config.set("fs.azure.enable.conditional.create.overwrite", String.valueOf(false)); try (AzureBlobFileSystem fs = (AzureBlobFileSystem) FileSystem.newInstance(currentFs.getUri(), config)) { fs.mkdirs(new Path("a/b/c")); intercept(IOException.class, () -> fs.create(new Path("a/b/c"), true)); } } } /** * Test the creation of a file with overwrite set to false without conditional overwrite. * This test sets the configuration `fs.azure.enable.conditional.create.overwrite` to false, * creates a directory, and then attempts to create a file at the same path with overwrite set to false. * It expects an IOException to be thrown. * * @throws Exception if any exception occurs during the test execution */ @Test public void testCreationOverwriteFalseWithoutConditionalOverwrite() throws Exception { try (AzureBlobFileSystem currentFs = getFileSystem()) { Configuration config = new Configuration(this.getRawConfiguration()); config.set("fs.azure.enable.conditional.create.overwrite", String.valueOf(false)); try (AzureBlobFileSystem fs = (AzureBlobFileSystem) FileSystem.newInstance( currentFs.getUri(), config)) { fs.mkdirs(new Path("a/b/c")); intercept(IOException.class, () -> fs.create(new Path("a/b/c"), false)); } } } /** * Creating same file with overwrite flag set to false. * @throws Exception */ @Test public void testCreateSameFileWithOverwriteFalse() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.create(new Path("a/b/c")); Assertions.assertThat(fs.exists(new Path("a/b/c"))) .describedAs("Path does not exist") .isTrue(); intercept(IOException.class, () -> fs.create(new Path("a/b/c"), false)); } } /** * Creation of already existing subpath should fail. * @throws Exception */ @Test public void testCreateSubPath() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.create(new Path("a/b/c")); Assertions.assertThat(fs.exists(new Path("a/b/c"))) .describedAs("Path does not exist") .isTrue(); intercept(IOException.class, () -> fs.create(new Path("a/b"))); } } /** * Test create path in parallel with overwrite false. **/ @Test public void testParallelCreateOverwriteFalse() throws Exception { Configuration configuration = getRawConfiguration(); configuration.set(FS_AZURE_ENABLE_CONDITIONAL_CREATE_OVERWRITE, "false"); configuration.set(FS_AZURE_ENABLE_CREATE_BLOB_IDEMPOTENCY, "false"); try (AzureBlobFileSystem fs = (AzureBlobFileSystem) FileSystem.newInstance( configuration)) { ExecutorService executorService = Executors.newFixedThreadPool(5); List<Future<?>> futures = new ArrayList<>(); final byte[] b = new byte[8 * ONE_MB]; new Random().nextBytes(b); final Path filePath = path("/testPath"); futures.add(executorService.submit(() -> { try { fs.create(filePath, false); } catch (IOException e) { throw new RuntimeException(e); } })); futures.add(executorService.submit(() -> { try { fs.create(filePath, false); } catch (IOException e) { throw new RuntimeException(e); } })); futures.add(executorService.submit(() -> { try { fs.create(filePath, false); } catch (IOException e) { throw new RuntimeException(e); } })); checkFuturesForExceptions(futures, 2); } } /** * Test create path in parallel with overwrite true. **/ @Test public void testParallelCreateOverwriteTrue() throws Exception { Configuration configuration = getRawConfiguration(); configuration.set(FS_AZURE_ENABLE_CONDITIONAL_CREATE_OVERWRITE, "false"); try (AzureBlobFileSystem fs = (AzureBlobFileSystem) FileSystem.newInstance( configuration)) { ExecutorService executorService = Executors.newFixedThreadPool(5); List<Future<?>> futures = new ArrayList<>(); final byte[] b = new byte[8 * ONE_MB]; new Random().nextBytes(b); final Path filePath = path("/testPath"); futures.add(executorService.submit(() -> { try { fs.create(filePath); } catch (IOException e) { throw new RuntimeException(e); } })); futures.add(executorService.submit(() -> { try { fs.create(filePath); } catch (IOException e) { throw new RuntimeException(e); } })); futures.add(executorService.submit(() -> { try { fs.create(filePath); } catch (IOException e) { throw new RuntimeException(e); } })); checkFuturesForExceptions(futures, 0); } } /** * Creating path with parent explicit. */ @Test public void testCreatePathParentExplicit() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.mkdirs(new Path("a/b/c")); Assertions.assertThat(fs.exists(new Path("a/b/c"))) .describedAs("Path does not exist") .isTrue(); fs.create(new Path("a/b/c/d")); Assertions.assertThat(fs.exists(new Path("a/b/c/d"))) .describedAs("Path does not exist") .isTrue(); // asserting that parent stays explicit Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(new Path("a/b/c"), fs, getTestTracingContext(fs, true))) .describedAs("Path is not an explicit directory") .isTrue(); } } // Creation with append blob should succeed for blob endpoint @Test public void testCreateWithAppendBlobEnabled() throws IOException, NoSuchFieldException, IllegalAccessException { Configuration conf = getRawConfiguration(); try (AzureBlobFileSystem fs = Mockito.spy( (AzureBlobFileSystem) FileSystem.newInstance(conf))) { AzureBlobFileSystemStore store = Mockito.spy(fs.getAbfsStore()); doReturn(true).when(store).isAppendBlobKey(anyString()); // Set abfsStore as our mocked value. Field privateField = AzureBlobFileSystem.class.getDeclaredField( "abfsStore"); privateField.setAccessible(true); privateField.set(fs, store); Path testPath = path("/testPath"); AzureBlobFileSystemStore.Permissions permissions = new AzureBlobFileSystemStore.Permissions(false, FsPermission.getDefault(), FsPermission.getUMask(fs.getConf())); fs.getAbfsStore().getClientHandler().getBlobClient(). createPath(makeQualified(testPath).toUri().getPath(), true, false, permissions, true, null, null, getTestTracingContext(fs, true)); } } /** * Test create on implicit directory with explicit parent. * @throws Exception */ @Test public void testParentExplicitPathImplicit() throws Exception { assumeBlobServiceType(); try (AzureBlobFileSystem fs = getFileSystem()) { fs.mkdirs(new Path("/explicitParent")); String sourcePathName = "/explicitParent/implicitDir"; Path sourcePath = new Path(sourcePathName); createAzCopyFolder(sourcePath); intercept(IOException.class, () -> fs.create(sourcePath, true)); intercept(IOException.class, () -> fs.create(sourcePath, false)); Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(sourcePath.getParent(), fs, getTestTracingContext(fs, true))) .describedAs("Parent directory should be explicit.") .isTrue(); Assertions.assertThat( DirectoryStateHelper.isImplicitDirectory(sourcePath, fs, getTestTracingContext(fs, true))) .describedAs("Path should be implicit.") .isTrue(); } } /** * Test create on implicit directory with implicit parent * @throws Exception */ @Test public void testParentImplicitPathImplicit() throws Exception { assumeBlobServiceType(); try (AzureBlobFileSystem fs = getFileSystem()) { String parentPathName = "/implicitParent"; Path parentPath = new Path(parentPathName); String sourcePathName = "/implicitParent/implicitDir"; Path sourcePath = new Path(sourcePathName); createAzCopyFolder(parentPath); createAzCopyFolder(sourcePath); intercept(IOException.class, () -> fs.create(sourcePath, true)); intercept(IOException.class, () -> fs.create(sourcePath, false)); Assertions.assertThat( DirectoryStateHelper.isImplicitDirectory(parentPath, fs, getTestTracingContext(fs, true))) .describedAs("Parent directory is implicit.") .isTrue(); Assertions.assertThat( DirectoryStateHelper.isImplicitDirectory(sourcePath, fs, getTestTracingContext(fs, true))) .describedAs("Path should also be implicit.") .isTrue(); } } /** * Tests create file when file exists already and parent is implicit * Verifies using eTag for overwrite = true/false */ @Test public void testCreateFileExistsImplicitParent() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { String parentPathName = "/implicitParent"; Path parentPath = new Path(parentPathName); createAzCopyFolder(parentPath); String fileName = "/implicitParent/testFile"; Path filePath = new Path(fileName); fs.create(filePath); String eTag = extractFileEtag(fileName); // testing createFile on already existing file path fs.create(filePath, true); String eTagAfterCreateOverwrite = extractFileEtag(fileName); Assertions.assertThat(eTag.equals(eTagAfterCreateOverwrite)) .describedAs( "New file eTag after create overwrite should be different from old") .isFalse(); intercept(IOException.class, () -> fs.create(filePath, false)); String eTagAfterCreate = extractFileEtag(fileName); Assertions.assertThat(eTagAfterCreateOverwrite.equals(eTagAfterCreate)) .describedAs("File eTag should not change as creation fails") .isTrue(); Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(parentPath, fs, getTestTracingContext(fs, true))) .describedAs("Parent path should also change to explicit.") .isTrue(); } } /** * Tests create file when file exists already and parent is explicit * Verifies using eTag for overwrite = true/false */ @Test public void testCreateFileExistsExplicitParent() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { String parentPathName = "/explicitParent"; Path parentPath = new Path(parentPathName); fs.mkdirs(parentPath); String fileName = "/explicitParent/testFile"; Path filePath = new Path(fileName); fs.create(filePath); String eTag = extractFileEtag(fileName); // testing createFile on already existing file path fs.create(filePath, true); String eTagAfterCreateOverwrite = extractFileEtag(fileName); Assertions.assertThat(eTag.equals(eTagAfterCreateOverwrite)) .describedAs( "New file eTag after create overwrite should be different from old") .isFalse(); intercept(IOException.class, () -> fs.create(filePath, false)); String eTagAfterCreate = extractFileEtag(fileName); Assertions.assertThat(eTagAfterCreateOverwrite.equals(eTagAfterCreate)) .describedAs("File eTag should not change as creation fails") .isTrue(); Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(parentPath, fs, getTestTracingContext(fs, true))) .describedAs("Parent path should also change to explicit.") .isTrue(); } } /** * Tests create file when the parent is an existing file * should fail. * @throws Exception FileAlreadyExists for blob and IOException for dfs. */ @Test public void testCreateFileParentFile() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { String parentName = "/testParentFile"; Path parent = new Path(parentName); fs.create(parent); String childName = "/testParentFile/testChildFile"; Path child = new Path(childName); IOException e = intercept(IOException.class, () -> fs.create(child, false)); // asserting that parent stays explicit FileStatus status = fs.getAbfsStore() .getFileStatus(fs.makeQualified(new Path(parentName)), new TracingContext(getTestTracingContext(fs, true))); Assertions.assertThat(status.isDirectory()) .describedAs("Path is not a file") .isFalse(); } } /** * Creating directory on existing file path should fail. * @throws Exception */ @Test public void testCreateMkdirs() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.create(new Path("a/b/c")); intercept(IOException.class, () -> fs.mkdirs(new Path("a/b/c/d"))); } } /** * Test mkdirs. * @throws Exception */ @Test public void testMkdirs() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.mkdirs(new Path("a/b")); fs.mkdirs(new Path("a/b/c/d")); fs.mkdirs(new Path("a/b/c/e")); Assertions.assertThat(fs.exists(new Path("a/b"))) .describedAs("Path a/b does not exist") .isTrue(); Assertions.assertThat(fs.exists(new Path("a/b/c/d"))) .describedAs("Path a/b/c/d does not exist") .isTrue(); Assertions.assertThat(fs.exists(new Path("a/b/c/e"))) .describedAs("Path a/b/c/e does not exist") .isTrue(); // Asserting that directories created as explicit FileStatus status = fs.getAbfsStore() .getFileStatus(fs.makeQualified(new Path("a/b")), new TracingContext(getTestTracingContext(fs, true))); Assertions.assertThat(status.isDirectory()) .describedAs("Path a/b is not an explicit directory") .isTrue(); FileStatus status1 = fs.getAbfsStore() .getFileStatus(fs.makeQualified(new Path("a/b/c/d")), new TracingContext(getTestTracingContext(fs, true))); Assertions.assertThat(status1.isDirectory()) .describedAs("Path a/b/c/d is not an explicit directory") .isTrue(); FileStatus status2 = fs.getAbfsStore() .getFileStatus(fs.makeQualified(new Path("a/b/c/e")), new TracingContext(getTestTracingContext(fs, true))); Assertions.assertThat(status2.isDirectory()) .describedAs("Path a/b/c/e is not an explicit directory") .isTrue(); } } /** * Creating subpath of directory path should fail. * @throws Exception */ @Test public void testMkdirsCreateSubPath() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.mkdirs(new Path("a/b/c")); Assertions.assertThat(fs.exists(new Path("a/b/c"))) .describedAs("Path a/b/c does not exist") .isTrue(); intercept(IOException.class, () -> fs.create(new Path("a/b"))); // Asserting that directories created as explicit FileStatus status2 = fs.getAbfsStore() .getFileStatus(fs.makeQualified(new Path("a/b/c")), new TracingContext(getTestTracingContext(fs, true))); Assertions.assertThat(status2.isDirectory()) .describedAs("Path a/b/c is not an explicit directory") .isTrue(); } } /** * Test creation of directory by level. * @throws Exception */ @Test public void testMkdirsByLevel() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.mkdirs(new Path("a")); fs.mkdirs(new Path("a/b/c")); fs.mkdirs(new Path("a/b/c/d/e")); Assertions.assertThat(fs.exists(new Path("a"))) .describedAs("Path a does not exist") .isTrue(); Assertions.assertThat(fs.exists(new Path("a/b/c"))) .describedAs("Path a/b/c does not exist") .isTrue(); Assertions.assertThat(fs.exists(new Path("a/b/c/d/e"))) .describedAs("Path a/b/c/d/e does not exist") .isTrue(); // Asserting that directories created as explicit FileStatus status = fs.getAbfsStore() .getFileStatus(fs.makeQualified(new Path("a/")), new TracingContext(getTestTracingContext(fs, true))); Assertions.assertThat(status.isDirectory()) .describedAs("Path a is not an explicit directory") .isTrue(); FileStatus status1 = fs.getAbfsStore() .getFileStatus(fs.makeQualified(new Path("a/b/c")), new TracingContext(getTestTracingContext(fs, true))); Assertions.assertThat(status1.isDirectory()) .describedAs("Path a/b/c is not an explicit directory") .isTrue(); FileStatus status2 = fs.getAbfsStore() .getFileStatus(fs.makeQualified(new Path("a/b/c/d/e")), new TracingContext(getTestTracingContext(fs, true))); Assertions.assertThat(status2.isDirectory()) .describedAs("Path a/b/c/d/e is not an explicit directory") .isTrue(); } } /* Delete part of a path and validate sub path exists. */ @Test public void testMkdirsWithDelete() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.mkdirs(new Path("a/b")); fs.mkdirs(new Path("a/b/c/d")); fs.delete(new Path("a/b/c/d")); fs.getFileStatus(new Path("a/b/c")); Assertions.assertThat(fs.exists(new Path("a/b/c"))) .describedAs("Path a/b/c does not exist") .isTrue(); } } /** * Verify mkdir and rename of parent. */ @Test public void testMkdirsWithRename() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.mkdirs(new Path("a/b/c/d")); fs.create(new Path("e/file")); fs.delete(new Path("a/b/c/d")); Assertions.assertThat(fs.rename(new Path("e"), new Path("a/b/c/d"))) .describedAs("Failed to rename path e to a/b/c/d") .isTrue(); Assertions.assertThat(fs.exists(new Path("a/b/c/d/file"))) .describedAs("Path a/b/c/d/file does not exist") .isTrue(); } } /** * Create a file with name /dir1 and then mkdirs for /dir1/dir2 should fail. * @throws Exception */ @Test public void testFileCreateMkdirsRoot() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.setWorkingDirectory(new Path("/")); final Path p1 = new Path("dir1"); fs.create(p1); intercept(IOException.class, () -> fs.mkdirs(new Path("dir1/dir2"))); } } /** * Create a file with name /dir1 and then mkdirs for /dir1/dir2 should fail. * @throws Exception */ @Test public void testFileCreateMkdirsNonRoot() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { final Path p1 = new Path("dir1"); fs.create(p1); intercept(IOException.class, () -> fs.mkdirs(new Path("dir1/dir2"))); } } /** * Creation of same directory without overwrite flag should pass. * @throws Exception */ @Test public void testCreateSameDirectory() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.mkdirs(new Path("a/b/c")); fs.mkdirs(new Path("a/b/c")); Assertions.assertThat(fs.exists(new Path("a/b/c"))) .describedAs("Path a/b/c does not exist") .isTrue(); // Asserting that directories created as explicit FileStatus status = fs.getAbfsStore() .getFileStatus(fs.makeQualified(new Path("a/b/c")), new TracingContext(getTestTracingContext(fs, true))); Assertions.assertThat(status.isDirectory()) .describedAs("Path a/b/c is not an explicit directory") .isTrue(); } } /** * Creation of same directory without overwrite flag should pass. * @throws Exception */ @Test public void testCreateSamePathDirectory() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.create(new Path("a")); intercept(IOException.class, () -> fs.mkdirs(new Path("a"))); } } /** * Creation of directory with root as parent */ @Test public void testMkdirOnRootAsParent() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { final Path path = new Path("a"); fs.setWorkingDirectory(new Path("/")); fs.mkdirs(path); // Asserting that the directory created by mkdir exists as explicit. FileStatus status = fs.getAbfsStore() .getFileStatus(fs.makeQualified(new Path("a")), new TracingContext(getTestTracingContext(fs, true))); Assertions.assertThat(status.isDirectory()) .describedAs("Path a is not an explicit directory") .isTrue(); } } /** * Creation of directory on root */ @Test public void testMkdirOnRoot() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { final Path path = new Path("/"); fs.setWorkingDirectory(new Path("/")); fs.mkdirs(path); FileStatus status = fs.getAbfsStore() .getFileStatus(fs.makeQualified(new Path("/")), new TracingContext(getTestTracingContext(fs, true))); Assertions.assertThat(status.isDirectory()) .describedAs("Path is not an explicit directory") .isTrue(); } } /** * Creation of file on path with unicode chars */ @Test public void testCreateUnicode() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { final Path path = new Path("/file\u0031"); fs.create(path); Assertions.assertThat(fs.exists(path)) .describedAs("Path with unicode does not exist") .isTrue(); } } /** * Creation of directory on path with unicode chars */ @Test public void testMkdirUnicode() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { final Path path = new Path("/dir\u0031"); fs.mkdirs(path); // Asserting that the directory created by mkdir exists as explicit. FileStatus status = fs.getAbfsStore() .getFileStatus(fs.makeQualified(path), new TracingContext(getTestTracingContext(fs, true))); Assertions.assertThat(status.isDirectory()) .describedAs("Path is not an explicit directory") .isTrue(); } } /** * Creation of directory on same path with parallel threads. */ @Test public void testMkdirParallelRequests() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { final Path path = new Path("/dir1"); ExecutorService es = Executors.newFixedThreadPool(3); List<CompletableFuture<Void>> tasks = new ArrayList<>(); for (int i = 0; i < 3; i++) { CompletableFuture<Void> future = CompletableFuture.runAsync(() -> { try { fs.mkdirs(path); } catch (IOException e) { throw new CompletionException(e); } }, es); tasks.add(future); } // Wait for all the tasks to complete CompletableFuture.allOf(tasks.toArray(new CompletableFuture[0])).join(); // Assert that the directory created by mkdir exists as explicit FileStatus status = fs.getAbfsStore() .getFileStatus(fs.makeQualified(path), new TracingContext(getTestTracingContext(fs, true))); Assertions.assertThat(status.isDirectory()) .describedAs("Path is not an explicit directory") .isTrue(); } } /** * Creation of directory with overwrite set to false should not fail according to DFS code. * @throws Exception */ @Test public void testCreateSameDirectoryOverwriteFalse() throws Exception { Configuration configuration = getRawConfiguration(); configuration.setBoolean(FS_AZURE_ENABLE_MKDIR_OVERWRITE, false); try (AzureBlobFileSystem fs1 = (AzureBlobFileSystem) FileSystem.newInstance(configuration)) { fs1.mkdirs(new Path("a/b/c")); fs1.mkdirs(new Path("a/b/c")); // Asserting that directories created as explicit FileStatus status = fs1.getAbfsStore() .getFileStatus(fs1.makeQualified(new Path("a/b/c")), new TracingContext(getTestTracingContext(fs1, true))); Assertions.assertThat(status.isDirectory()) .describedAs("Path is not an explicit directory") .isTrue(); } } /** * Try creating directory same as an existing file. */ @Test public void testCreateDirectoryAndFileRecreation() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { fs.mkdirs(new Path("a/b/c")); fs.create(new Path("a/b/c/d")); Assertions.assertThat(fs.exists(new Path("a/b/c"))) .describedAs("Directory a/b/c does not exist") .isTrue(); Assertions.assertThat(fs.exists(new Path("a/b/c/d"))) .describedAs("File a/b/c/d does not exist") .isTrue(); intercept(IOException.class, () -> fs.mkdirs(new Path("a/b/c/d"))); } } @Test public void testCreateNonRecursiveForAtomicDirectoryFile() throws Exception { try (AzureBlobFileSystem fileSystem = getFileSystem()) { fileSystem.setWorkingDirectory(new Path("/")); fileSystem.mkdirs(new Path("/hbase/dir")); fileSystem.createFile(new Path("/hbase/dir/file")) .overwrite(false) .replication((short) 1) .bufferSize(1024) .blockSize(1024) .build(); Assertions.assertThat(fileSystem.exists(new Path("/hbase/dir/file"))) .describedAs("File /hbase/dir/file does not exist") .isTrue(); } } /** * Test creating a file on a non-existing path with an implicit parent directory. * This test creates an implicit directory, then creates a file * inside this implicit directory and asserts that it gets created. * * @throws Exception if any exception occurs during the test execution */ @Test public void testCreateOnNonExistingPathWithImplicitParentDir() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { final Path implicitPath = new Path("dir1"); final Path path = new Path("dir1/dir2"); createAzCopyFolder(implicitPath); // Creating a directory on non-existing path inside an implicit directory fs.create(path); // Asserting that path created by azcopy becomes explicit. Assertions.assertThat(fs.exists(path)) .describedAs("File dir1/dir2 does not exist") .isTrue(); } } @Test public void testMkdirOnNonExistingPathWithImplicitParentDir() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { final Path implicitPath = new Path("dir1"); final Path path = new Path("dir1/dir2"); createAzCopyFolder(implicitPath); // Creating a directory on non-existing path inside an implicit directory fs.mkdirs(path); // Asserting that path created by azcopy becomes explicit. Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(implicitPath, fs, getTestTracingContext(fs, true))) .describedAs("Path created by azcopy did not become explicit") .isTrue(); // Asserting that the directory created by mkdir exists as explicit. Assertions.assertThat(DirectoryStateHelper.isExplicitDirectory(path, fs, getTestTracingContext(fs, true))) .describedAs("Directory created by mkdir does not exist as explicit") .isTrue(); } } /** * Creation of directory with parent directory existing as implicit. * And the directory to be created existing as explicit directory * @throws Exception */ @Test public void testMkdirOnExistingExplicitDirWithImplicitParentDir() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { final Path implicitPath = new Path("dir1"); final Path path = new Path("dir1/dir2"); // Creating implicit directory to be used as parent createAzCopyFolder(implicitPath); // Creating an explicit directory on the path first fs.mkdirs(path); // Creating a directory on existing explicit directory inside an implicit directory fs.mkdirs(path); // Asserting that path created by azcopy becomes explicit. Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(implicitPath, fs, getTestTracingContext(fs, true))) .describedAs("Path created by azcopy did not become explicit") .isTrue(); // Asserting that the directory created by mkdir exists as explicit. Assertions.assertThat(DirectoryStateHelper.isExplicitDirectory(path, fs, getTestTracingContext(fs, true))) .describedAs("Directory created by mkdir does not exist as explicit") .isTrue(); } } /** * Creation of directory with parent directory existing as explicit. * And the directory to be created existing as implicit directory * @throws Exception */ @Test public void testMkdirOnExistingImplicitDirWithExplicitParentDir() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { final Path explicitPath = new Path("dir1"); final Path path = new Path("dir1/dir2"); // Creating an explicit directory to be used a parent fs.mkdirs(explicitPath); createAzCopyFolder(path); // Creating a directory on existing implicit directory inside an explicit directory fs.mkdirs(path); // Asserting that the directory created by mkdir exists as explicit. Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(explicitPath, fs, getTestTracingContext(fs, true))) .describedAs("Explicit parent directory does not exist as explicit") .isTrue(); // Asserting that the directory created by mkdir exists as explicit. Assertions.assertThat(DirectoryStateHelper.isImplicitDirectory(path, fs, getTestTracingContext(fs, true))) .describedAs("Mkdir created explicit directory") .isTrue(); } } /** * Creation of directory with parent directory existing as implicit. * And the directory to be created existing as implicit directory * @throws Exception */ @Test public void testMkdirOnExistingImplicitDirWithImplicitParentDir() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { final Path implicitPath = new Path("dir3"); final Path path = new Path("dir3/dir4"); createAzCopyFolder(implicitPath); // Creating an implicit directory on path createAzCopyFolder(path); // Creating a directory on existing implicit directory inside an implicit directory fs.mkdirs(path); Assertions.assertThat( DirectoryStateHelper.isImplicitDirectory(implicitPath, fs, getTestTracingContext(fs, true))) .describedAs("Marker is present for path created by azcopy") .isTrue(); // Asserting that the mkdir didn't create markers for existing directory. Assertions.assertThat(DirectoryStateHelper.isImplicitDirectory(path, fs, getTestTracingContext(fs, true))) .describedAs("Marker is present for existing directory") .isTrue(); } } /** * Creation of directory with parent directory existing as implicit. * And the directory to be created existing as file * @throws Exception */ @Test public void testMkdirOnExistingFileWithImplicitParentDir() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { final Path implicitPath = new Path("dir1"); final Path path = new Path("dir1/dir2"); createAzCopyFolder(implicitPath); // Creating a file on path fs.create(path); // Creating a directory on existing file inside an implicit directory // Asserting that the mkdir fails LambdaTestUtils.intercept(FileAlreadyExistsException.class, () -> { fs.mkdirs(path); }); // Asserting that the file still exists at path. Assertions.assertThat(DirectoryStateHelper.isExplicitDirectory(path, fs, getTestTracingContext(fs, true))) .describedAs("File still exists at path") .isFalse(); } } /** * 1. a/b/c as implicit. * 2. Create marker for b. * 3. Do mkdir on a/b/c/d. * 4. Verify all b,c,d have marker but a is implicit. */ @Test public void testImplicitExplicitFolder() throws Exception { Configuration configuration = Mockito.spy(getRawConfiguration()); try (AzureBlobFileSystem fs = (AzureBlobFileSystem) FileSystem.newInstance(configuration)) { final Path implicitPath = new Path("a/b/c"); createAzCopyFolder(implicitPath); Path path = makeQualified(new Path("a/b")); AbfsBlobClient blobClient = (AbfsBlobClient) fs.getAbfsStore() .getClient(AbfsServiceType.BLOB); blobClient.createPathRestOp(path.toUri().getPath(), false, true, false, null, null, getTestTracingContext(fs, true)); fs.mkdirs(new Path("a/b/c/d")); Assertions.assertThat( DirectoryStateHelper.isImplicitDirectory(new Path("a"), fs, getTestTracingContext(fs, true))) .describedAs("Directory 'a' should be implicit") .isTrue(); Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(new Path("a/b"), fs, getTestTracingContext(fs, true))) .describedAs("Directory 'a/b' should be explicit") .isTrue(); Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(new Path("a/b/c"), fs, getTestTracingContext(fs, true))) .describedAs("Directory 'a/b/c' should be explicit") .isTrue(); Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(new Path("a/b/c/d"), fs, getTestTracingContext(fs, true))) .describedAs("Directory 'a/b/c/d' should be explicit") .isTrue(); } } /** * 1. a/b/c implicit. * 2. Marker for a and c. * 3. mkdir on a/b/c/d. * 4. Verify a,c,d are explicit but b is implicit. */ @Test public void testImplicitExplicitFolder1() throws Exception { Configuration configuration = Mockito.spy(getRawConfiguration()); try (AzureBlobFileSystem fs = (AzureBlobFileSystem) FileSystem.newInstance(configuration)) { final Path implicitPath = new Path("a/b/c"); createAzCopyFolder(implicitPath); Path path = makeQualified(new Path("a")); AbfsBlobClient blobClient = (AbfsBlobClient) fs.getAbfsStore() .getClient(AbfsServiceType.BLOB); blobClient.createPathRestOp(path.toUri().getPath(), false, true, false, null, null, getTestTracingContext(fs, true)); Path newPath = makeQualified(new Path("a/b/c")); blobClient.createPathRestOp(newPath.toUri().getPath(), false, true, false, null, null, getTestTracingContext(fs, true)); fs.mkdirs(new Path("a/b/c/d")); Assertions.assertThat( DirectoryStateHelper.isImplicitDirectory(new Path("a/b"), fs, getTestTracingContext(fs, true))) .describedAs("Directory 'a/b' should be implicit") .isTrue(); // Asserting that the directory created by mkdir exists as explicit. Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(new Path("a"), fs, getTestTracingContext(fs, true))) .describedAs("Directory 'a' should be explicit") .isTrue(); Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(new Path("a/b/c"), fs, getTestTracingContext(fs, true))) .describedAs("Directory 'a/b/c' should be explicit") .isTrue(); Assertions.assertThat( DirectoryStateHelper.isExplicitDirectory(new Path("a/b/c/d"), fs, getTestTracingContext(fs, true))) .describedAs("Directory 'a/b/c/d' should be explicit") .isTrue(); } } /** * Extracts the eTag for an existing file * @param fileName file Path in String from container root * @return String etag for the file * @throws IOException */ private String extractFileEtag(String fileName) throws IOException { final AzureBlobFileSystem fs = getFileSystem(); final AbfsClient client = fs.getAbfsClient(); final TracingContext testTracingContext = getTestTracingContext(fs, false); AbfsRestOperation op; op = client.getPathStatus(fileName, true, testTracingContext, null); return AzureBlobFileSystemStore.extractEtagHeader(op.getResult()); } /** * Tests the idempotency of creating a path with retries by simulating * a conflict response (HTTP 409) from the Azure Blob File System client. * The method ensures that the path creation operation retries correctly * with the proper transaction ID headers, verifying idempotency during * failure recovery. * * @throws Exception if any error occurs during the operation. */ @Test public void testCreatePathRetryIdempotency() throws Exception { Configuration configuration = new Configuration(getRawConfiguration()); configuration.set(FS_AZURE_ENABLE_CLIENT_TRANSACTION_ID, "true"); try (AzureBlobFileSystem fs = getFileSystem(configuration)) { assumeRecoveryThroughClientTransactionID(true); AbfsDfsClient abfsClient = mockIngressClientHandler(fs); final Path nonOverwriteFile = new Path( "/NonOverwriteTest_FileName_" + UUID.randomUUID()); final List<AbfsHttpHeader> headers = new ArrayList<>(); mockRetriedRequest(abfsClient, headers); AbfsRestOperation getPathRestOp = Mockito.mock(AbfsRestOperation.class); AbfsHttpOperation op = Mockito.mock(AbfsHttpOperation.class); Mockito.doAnswer(answer -> { String requiredHeader = null; for (AbfsHttpHeader httpHeader : headers) { if (X_MS_CLIENT_TRANSACTION_ID.equalsIgnoreCase( httpHeader.getName())) { requiredHeader = httpHeader.getValue(); break; } } return requiredHeader; }).when(op).getResponseHeader(X_MS_CLIENT_TRANSACTION_ID); Mockito.doReturn(true).when(getPathRestOp).hasResult(); Mockito.doReturn(op).when(getPathRestOp).getResult(); Mockito.doReturn(getPathRestOp).when(abfsClient).getPathStatus( Mockito.nullable(String.class), Mockito.nullable(Boolean.class), Mockito.nullable(TracingContext.class), Mockito.nullable(ContextEncryptionAdapter.class)); fs.create(nonOverwriteFile, false); } } /** * Test to verify that the client transaction ID is included in the response header * during the creation of a new file in Azure Blob Storage. * * This test ensures that when a new file is created, the Azure Blob FileSystem client * correctly includes the client transaction ID in the response header for the created file. * The test uses a configuration where client transaction ID is enabled and verifies * its presence after the file creation operation. * * @throws Exception if any error occurs during test execution */ @Test public void testGetClientTransactionIdAfterCreate() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { assumeRecoveryThroughClientTransactionID(true); final String[] clientTransactionId = new String[1]; AbfsDfsClient abfsDfsClient = mockIngressClientHandler(fs); mockAddClientTransactionIdToHeader(abfsDfsClient, clientTransactionId); final Path nonOverwriteFile = new Path( "/NonOverwriteTest_FileName_" + UUID.randomUUID()); fs.create(nonOverwriteFile, false); final AbfsHttpOperation getPathStatusOp = abfsDfsClient.getPathStatus(nonOverwriteFile.toUri().getPath(), false, getTestTracingContext(fs, true), null).getResult(); Assertions.assertThat( getPathStatusOp.getResponseHeader(X_MS_CLIENT_TRANSACTION_ID)) .describedAs("Client transaction ID should be set during create") .isNotNull(); Assertions.assertThat( getPathStatusOp.getResponseHeader(X_MS_CLIENT_TRANSACTION_ID)) .describedAs("Client transaction ID should be equal to the one set in the header") .isEqualTo(clientTransactionId[0]); } } /** * Test to verify that the client transaction ID is included in the response header * after two consecutive create operations on the same file in Azure Blob Storage. * * This test ensures that even after performing two create operations (with overwrite) * on the same file, the Azure Blob FileSystem client includes the client transaction ID * in the response header for the created file. The test checks for the presence of * the client transaction ID in the response after the second create call. * * @throws Exception if any error occurs during test execution */ @Test public void testClientTransactionIdAfterTwoCreateCalls() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { assumeRecoveryThroughClientTransactionID(true); final String[] clientTransactionId = new String[1]; AbfsDfsClient abfsDfsClient = mockIngressClientHandler(fs); mockAddClientTransactionIdToHeader(abfsDfsClient, clientTransactionId); Path testPath = path("testfile"); AzureBlobFileSystemStore.Permissions permissions = new AzureBlobFileSystemStore.Permissions(false, FsPermission.getDefault(), FsPermission.getUMask(fs.getConf())); fs.create(testPath, false); fs.create(testPath, true); final AbfsHttpOperation getPathStatusOp = abfsDfsClient.getPathStatus(testPath.toUri().getPath(), false, getTestTracingContext(fs, true), null).getResult(); Assertions.assertThat( getPathStatusOp.getResponseHeader(X_MS_CLIENT_TRANSACTION_ID)) .describedAs("Client transaction ID should be set during create") .isNotNull(); Assertions.assertThat( getPathStatusOp.getResponseHeader(X_MS_CLIENT_TRANSACTION_ID)) .describedAs("Client transaction ID should be equal to the one set in the header") .isEqualTo(clientTransactionId[0]); } } /** * Test case to simulate a failure scenario during the recovery process while * creating a file in Azure Blob File System. This test verifies that when the * `getPathStatus` method encounters a timeout exception during recovery, it * triggers an appropriate failure response. * * The test mocks the `AbfsDfsClient` to simulate the failure behavior, including * a retry logic. It also verifies that an exception is correctly thrown and the * error message contains the expected details for recovery failure. * * @throws Exception If an error occurs during the test setup or execution. */ @Test public void testFailureInGetPathStatusDuringCreateRecovery() throws Exception { try (AzureBlobFileSystem fs = getFileSystem()) { assumeRecoveryThroughClientTransactionID(true); final String[] clientTransactionId = new String[1]; AbfsDfsClient abfsDfsClient = mockIngressClientHandler(fs); mockAddClientTransactionIdToHeader(abfsDfsClient, clientTransactionId); mockRetriedRequest(abfsDfsClient, new ArrayList<>()); boolean[] flag = new boolean[1]; Mockito.doAnswer(getPathStatus -> { if (!flag[0]) { flag[0] = true; throw new AbfsRestOperationException(HTTP_CLIENT_TIMEOUT, "", "", new Exception()); } return getPathStatus.callRealMethod(); }).when(abfsDfsClient).getPathStatus( Mockito.nullable(String.class), Mockito.nullable(Boolean.class), Mockito.nullable(TracingContext.class), Mockito.nullable(ContextEncryptionAdapter.class)); final Path nonOverwriteFile = new Path( "/NonOverwriteTest_FileName_" + UUID.randomUUID()); String errorMessage = intercept(AbfsDriverException.class, () -> fs.create(nonOverwriteFile, false)).getErrorMessage(); Assertions.assertThat(errorMessage) .describedAs("getPathStatus should fail while recovering") .contains(ERR_CREATE_RECOVERY); } } /** * Test to simulate a successful create operation followed by a connection reset * on the response, triggering a retry. * * This test verifies that the create operation is retried in the event of a * connection reset during the response phase. The test creates a mock * AzureBlobFileSystem and its associated components to simulate the create * operation and the connection reset. It then verifies that the create * operation is retried once before succeeding. * * @throws Exception if an error occurs during the test execution. */ @Test public void testCreateIdempotencyForNonHnsBlob() throws Exception { assumeThat(isAppendBlobEnabled()).as("Not valid for APPEND BLOB").isFalse(); assumeHnsDisabled(); assumeBlobServiceType(); // Create a spy of AzureBlobFileSystem try (AzureBlobFileSystem fs = Mockito.spy( (AzureBlobFileSystem) FileSystem.newInstance(getRawConfiguration()))) { // Create a spy of AzureBlobFileSystemStore AzureBlobFileSystemStore store = Mockito.spy(fs.getAbfsStore()); // Create spies for the client handler and blob client AbfsClientHandler clientHandler = Mockito.spy(store.getClientHandler()); AbfsBlobClient blobClient = Mockito.spy(clientHandler.getBlobClient()); fs.getAbfsStore().setClient(blobClient); fs.getAbfsStore().setClientHandler(clientHandler); // Set up the spies to return the mocked objects Mockito.doReturn(clientHandler).when(store).getClientHandler(); Mockito.doReturn(blobClient).when(clientHandler).getBlobClient(); Mockito.doReturn(blobClient).when(clientHandler).getIngressClient(); AtomicInteger createCount = new AtomicInteger(0); Mockito.doAnswer(answer -> { // Set up the mock for the create operation AbfsClientTestUtil.setMockAbfsRestOperationForCreateOperation(blobClient, (httpOperation) -> { Mockito.doAnswer(invocation -> { // Call the real processResponse method invocation.callRealMethod(); int currentCount = createCount.incrementAndGet(); if (currentCount == 2) { Mockito.when(httpOperation.getStatusCode()) .thenReturn( HTTP_INTERNAL_ERROR); // Status code 500 for Internal Server Error Mockito.when(httpOperation.getStorageErrorMessage()) .thenReturn("CONNECTION_RESET"); // Error message throw new IOException("Connection Reset"); } return null; }).when(httpOperation).processResponse( Mockito.nullable(byte[].class), Mockito.anyInt(), Mockito.anyInt() ); return httpOperation; }); return answer.callRealMethod(); }).when(blobClient).createPath( Mockito.anyString(), Mockito.anyBoolean(), Mockito.anyBoolean(), Mockito.any(AzureBlobFileSystemStore.Permissions.class), Mockito.anyBoolean(), Mockito.nullable(String.class), Mockito.any(ContextEncryptionAdapter.class), any(TracingContext.class) ); Path path = new Path("/test/file"); fs.create(path, false); Mockito.verify(blobClient, Mockito.times(1)).createPath( Mockito.anyString(), Mockito.anyBoolean(), Mockito.anyBoolean(), Mockito.any(AzureBlobFileSystemStore.Permissions.class), Mockito.anyBoolean(), Mockito.nullable(String.class), Mockito.any(ContextEncryptionAdapter.class), any(TracingContext.class)); Mockito.verify(blobClient, Mockito.times(2)).createPathRestOp( Mockito.anyString(), Mockito.anyBoolean(), Mockito.anyBoolean(), Mockito.anyBoolean(), Mockito.nullable(String.class), Mockito.any(ContextEncryptionAdapter.class), any(TracingContext.class)); assertIsFile(fs, path); } } /** * Mocks and returns an instance of {@link AbfsDfsClient} for the given AzureBlobFileSystem. * This method sets up the necessary mock behavior for the client handler and ingress client. * * @param fs The {@link AzureBlobFileSystem} instance for which the client handler will be mocked. * @return A mocked {@link AbfsDfsClient} instance associated with the provided file system. */ private AbfsDfsClient mockIngressClientHandler(AzureBlobFileSystem fs) { AzureBlobFileSystemStore store = Mockito.spy(fs.getAbfsStore()); AbfsClientHandler clientHandler = Mockito.spy(store.getClientHandler()); AbfsDfsClient abfsDfsClient = (AbfsDfsClient) Mockito.spy( clientHandler.getClient()); fs.getAbfsStore().setClient(abfsDfsClient); fs.getAbfsStore().setClientHandler(clientHandler); Mockito.doReturn(abfsDfsClient).when(clientHandler).getIngressClient(); return abfsDfsClient; } /** * Mocks the retry behavior for an AbfsDfsClient request. The method intercepts * the Abfs operation and simulates an HTTP conflict (HTTP 409) error on the * first invocation. It creates a mock HTTP operation with a PUT method and * specific status codes and error messages. * * @param abfsDfsClient The AbfsDfsClient to mock operations for. * @param headers The list of HTTP headers to which request headers will be added. * * @throws Exception If an error occurs during mock creation or operation execution. */ private void mockRetriedRequest(AbfsDfsClient abfsDfsClient, final List<AbfsHttpHeader> headers) throws Exception { TestAbfsClient.mockAbfsOperationCreation(abfsDfsClient, new MockIntercept<AbfsRestOperation>() { private int count = 0; @Override public void answer(final AbfsRestOperation mockedObj, final InvocationOnMock answer) throws AbfsRestOperationException { if (count == 0) { count = 1; AbfsHttpOperation op = Mockito.mock(AbfsHttpOperation.class); Mockito.doReturn(HTTP_METHOD_PUT).when(op).getMethod(); Mockito.doReturn(EMPTY_STRING).when(op).getStorageErrorMessage(); Mockito.doReturn(true).when(mockedObj).hasResult(); Mockito.doReturn(op).when(mockedObj).getResult(); Mockito.doReturn(HTTP_CONFLICT).when(op).getStatusCode(); headers.addAll(mockedObj.getRequestHeaders()); throw new AbfsRestOperationException(HTTP_CONFLICT, AzureServiceErrorCode.PATH_CONFLICT.getErrorCode(), EMPTY_STRING, null, op); } } }, 0); } }
handles
java
elastic__elasticsearch
modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java
{ "start": 785, "end": 1017 }
class ____ extends Plugin implements ScriptPlugin { @Override public ScriptEngine getScriptEngine(Settings settings, Collection<ScriptContext<?>> contexts) { return new ExpressionScriptEngine(); } }
ExpressionPlugin
java
apache__camel
components/camel-schematron/src/main/java/org/apache/camel/component/schematron/SchematronEndpoint.java
{ "start": 5128, "end": 5704 }
class ____, attempting file system {}", path); try { InputStream schRules = FileUtils.openInputStream(new File(path)); rules = TemplatesFactory.newInstance().getTemplates(schRules, transformerFactory); } catch (FileNotFoundException e) { LOG.debug("Schematron rules not found in the file system {}", path); throw classPathException; // Can be more meaningful, for example, xslt compilation error. } } // rules not found in
path
java
redisson__redisson
redisson/src/main/java/org/redisson/client/protocol/BatchCommandData.java
{ "start": 1077, "end": 2662 }
class ____<T, R> extends CommandData<T, R> implements Comparable<BatchCommandData<T, R>> { private final int index; private final AtomicReference<RedisException> retryError = new AtomicReference<>(); public BatchCommandData(RedisCommand<T> command, Object[] params, int index) { this(new CompletableFuture<>(), StringCodec.INSTANCE, command, params, index); } public BatchCommandData(CompletableFuture<R> promise, Codec codec, RedisCommand<T> command, Object[] params, int index) { super(promise, codec, command, params); this.index = index; } @Override public boolean tryFailure(Throwable cause) { if (retryError.get() != null) { return false; } if (cause instanceof RedisRedirectException || cause instanceof RedisRetryException) { return retryError.compareAndSet(null, (RedisException) cause); } return super.tryFailure(cause); } @Override public boolean isSuccess() { return retryError.get() == null && super.isSuccess(); } @Override public Throwable cause() { if (retryError.get() != null) { return retryError.get(); } return super.cause(); } public void clearError() { retryError.set(null); } @Override public int compareTo(BatchCommandData<T, R> o) { return index - o.index; } public void updateCommand(RedisCommand command) { this.command = command; } public int getIndex() { return index; } }
BatchCommandData
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/basic/WrapperArrayHandlingLegacyTests.java
{ "start": 8756, "end": 9207 }
class ____ { @Id public Integer id; // mapped as VARCHAR char[] primitive; Character[] wrapper; // mapped as CLOB @Lob char[] primitiveClob; @Lob Character[] wrapperClob; // mapped as NVARCHAR @Nationalized char[] primitiveNVarchar; @Nationalized Character[] wrapperNVarchar; // mapped as NCLOB @Lob @Nationalized char[] primitiveNClob; @Lob @Nationalized Character[] wrapperNClob; } }
EntityWithCharArrays
java
google__dagger
dagger-runtime/main/java/dagger/internal/LazyClassKeyMap.java
{ "start": 1579, "end": 2503 }
class ____, therefore no need to use @LazyClassKey annotated // bindings. throw new UnsupportedOperationException( "Maps created with @LazyClassKey do not support usage of keySet(). Consider @ClassKey" + " instead."); } @Override public Collection<V> values() { return delegate.values(); } @Override public boolean isEmpty() { return delegate.isEmpty(); } @Override public boolean containsKey(@Nullable Object key) { if (!(key instanceof Class)) { throw new IllegalArgumentException("Key must be a class"); } return delegate.containsKey(((Class<?>) key).getName()); } @Override public boolean containsValue(@Nullable Object value) { return delegate.containsValue(value); } @Override public int size() { return delegate.size(); } @Override public Set<Map.Entry<Class<?>, V>> entrySet() { // This method will load all
keys
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationBaseProtocol.java
{ "start": 9926, "end": 11168 }
interface ____ by clients to get a report of Containers for an * application attempt from the <code>ResourceManager</code> or * <code>ApplicationHistoryServer</code> * </p> * * <p> * The client, via {@link GetContainersRequest} provides the * {@link ApplicationAttemptId} of the application attempt. * </p> * * <p> * In secure mode,the <code>ResourceManager</code> or * <code>ApplicationHistoryServer</code> verifies access to the method before * accepting the request. * </p> * * <p> * The <code>ResourceManager</code> or <code>ApplicationHistoryServer</code> * responds with a {@link GetContainersResponse} which includes a list of * {@link ContainerReport} for all the containers of a specific application * attempt. * </p> * * @param request * request for a list of container reports of an application attempt. * @return reports on all containers of an application attempt * @throws YarnException exceptions from yarn servers. * @throws IOException io error occur. */ @Public @Unstable @Idempotent public GetContainersResponse getContainers(GetContainersRequest request) throws YarnException, IOException; /** * <p> * The
used
java
apache__spark
launcher/src/main/java/org/apache/spark/launcher/InProcessAppHandle.java
{ "start": 1063, "end": 2776 }
class ____ extends AbstractAppHandle { private static final String THREAD_NAME_FMT = "spark-app-%d: '%s'"; private static final Logger LOG = Logger.getLogger(InProcessAppHandle.class.getName()); private static final AtomicLong THREAD_IDS = new AtomicLong(); // Avoid really long thread names. private static final int MAX_APP_NAME_LEN = 16; private volatile Throwable error; private Thread app; InProcessAppHandle(LauncherServer server) { super(server); } @Override public synchronized void kill() { if (!isDisposed()) { LOG.warning("kill() may leave the underlying app running in in-process mode."); setState(State.KILLED); disconnect(); // Interrupt the thread. This is not guaranteed to kill the app, though. if (app != null) { app.interrupt(); } } } @Override public Optional<Throwable> getError() { return Optional.ofNullable(error); } synchronized void start(String appName, Method main, String[] args) { CommandBuilderUtils.checkState(app == null, "Handle already started."); if (appName.length() > MAX_APP_NAME_LEN) { appName = "..." + appName.substring(appName.length() - MAX_APP_NAME_LEN); } app = new Thread(() -> { try { main.invoke(null, (Object) args); } catch (Throwable t) { if (t instanceof InvocationTargetException) { t = t.getCause(); } LOG.log(Level.WARNING, "Application failed with exception.", t); error = t; setState(State.FAILED); } dispose(); }); app.setName(String.format(THREAD_NAME_FMT, THREAD_IDS.incrementAndGet(), appName)); app.start(); } }
InProcessAppHandle
java
apache__dubbo
dubbo-compatible/src/test/java/org/apache/dubbo/rpc/support/Person.java
{ "start": 901, "end": 1413 }
class ____ implements Serializable { private static final long serialVersionUID = 1L; private String name; private int age; public Person() {} public Person(String name, int age) { this.name = name; this.age = age; } public String getName() { return name; } public void setName(String name) { this.name = name; } public int getAge() { return age; } public void setAge(int age) { this.age = age; } }
Person
java
apache__flink
flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/operators/windowing/EvictingWindowOperatorTest.java
{ "start": 54403, "end": 54798 }
class ____ implements ReduceFunction<Tuple2<String, Integer>> { private static final long serialVersionUID = 1L; @Override public Tuple2<String, Integer> reduce( Tuple2<String, Integer> value1, Tuple2<String, Integer> value2) throws Exception { return new Tuple2<>(value2.f0, value1.f1 + value2.f1); } } private static
SumReducer
java
apache__avro
lang/java/avro/src/main/java/org/apache/avro/Schema.java
{ "start": 27140, "end": 27854 }
class ____ { private final Object s1; private final Object s2; public SeenPair(Object s1, Object s2) { this.s1 = s1; this.s2 = s2; } public boolean equals(Object o) { if (!(o instanceof SeenPair)) return false; return this.s1 == ((SeenPair) o).s1 && this.s2 == ((SeenPair) o).s2; } @Override public int hashCode() { return System.identityHashCode(s1) + System.identityHashCode(s2); } } private static final ThreadLocal<Set<SeenPair>> SEEN_EQUALS = ThreadLocalWithInitial.of(HashSet::new); private static final ThreadLocal<Map<Schema, Schema>> SEEN_HASHCODE = ThreadLocalWithInitial.of(IdentityHashMap::new); private static
SeenPair
java
quarkusio__quarkus
independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/jaxrs/RuntimeDelegateImpl.java
{ "start": 1481, "end": 6019 }
class ____ extends RuntimeDelegate { static final ResponseBuilderFactory factory; static { ResponseBuilderFactory result = new ResponseBuilderFactory() { @Override public Response.ResponseBuilder create() { throw new RuntimeException("Quarkus REST server side components are not installed."); } @Override public int priority() { return 0; } @Override public <T> ResponseBuilder<T> createRestResponse() { throw new RuntimeException("Quarkus REST server side components are not installed."); } }; ServiceLoader<ResponseBuilderFactory> sl = ServiceLoader.load(ResponseBuilderFactory.class, RuntimeDelegateImpl.class.getClassLoader()); for (ResponseBuilderFactory i : sl) { if (result.priority() < i.priority()) { result = i; } } factory = result; } @Override public UriBuilder createUriBuilder() { return new UriBuilderImpl(); } @Override public Response.ResponseBuilder createResponseBuilder() { return factory.create(); } public <T> RestResponse.ResponseBuilder<T> createRestResponseBuilder() { return factory.createRestResponse(); } @Override public Variant.VariantListBuilder createVariantListBuilder() { return new VariantListBuilderImpl(); } @Override public <T> T createEndpoint(Application application, Class<T> endpointType) throws IllegalArgumentException, UnsupportedOperationException { throw new UnsupportedOperationException(); } @Override public <T> HeaderDelegate<T> createHeaderDelegate(Class<T> type) throws IllegalArgumentException { if (type == null) { throw new IllegalArgumentException("type cannot be null"); } if (type.equals(MediaType.class)) { return (HeaderDelegate<T>) MediaTypeHeaderDelegate.INSTANCE; } else if (Date.class.isAssignableFrom(type)) { // for Date, we do subtypes too, because ORM will instantiate java.util.Date as subtypes // and it's extremely likely we get those here, and we still have to generate a valid // date representation for them, rather than Object.toString which will be wrong return (HeaderDelegate<T>) DateDelegate.INSTANCE; } else if (type.equals(CacheControl.class)) { return (HeaderDelegate<T>) CacheControlDelegate.INSTANCE; } else if (type.equals(NewCookie.class)) { return (HeaderDelegate<T>) NewCookieHeaderDelegate.INSTANCE; } else if (type.equals(Cookie.class)) { return (HeaderDelegate<T>) CookieHeaderDelegate.INSTANCE; } else if (type.equals(EntityTag.class)) { return (HeaderDelegate<T>) EntityTagDelegate.INSTANCE; } else if (type.equals(Locale.class)) { return (HeaderDelegate<T>) LocaleDelegate.INSTANCE; } else if (type.equals(Link.class)) { return (HeaderDelegate<T>) LinkDelegate.INSTANCE; } else { return (HeaderDelegate<T>) ObjectToStringDelegate.INSTANCE; } } @Override public Link.Builder createLinkBuilder() { return new LinkBuilderImpl(); } @Override public SeBootstrap.Configuration.Builder createConfigurationBuilder() { // RR does not implement currently implement the bootstrapping API throw new UnsupportedOperationException("Pending implementation"); } @Override public CompletionStage<SeBootstrap.Instance> bootstrap(Application application, SeBootstrap.Configuration configuration) { // RR does not implement currently implement the bootstrapping API throw new UnsupportedOperationException("Pending implementation"); } @Override public CompletionStage<SeBootstrap.Instance> bootstrap(Class<? extends Application> aClass, SeBootstrap.Configuration configuration) { // RR does not implement currently implement the bootstrapping API throw new UnsupportedOperationException("Pending implementation"); } @Override public EntityPart.Builder createEntityPartBuilder(String s) throws IllegalArgumentException { // TODO: figure out how to implement this throw new UnsupportedOperationException("Pending implementation"); } }
RuntimeDelegateImpl
java
square__retrofit
retrofit/java-test/src/test/java/retrofit2/RequestFactoryTest.java
{ "start": 5997, "end": 6329 }
class ____ { @GET("/") // Call<ResponseBody> method(@Query("maybe") @NonNull Object o) { return null; } } Request request = buildRequest(Example.class, "yep"); assertThat(request.url().toString()).isEqualTo("http://example.com/?maybe=yep"); } @Test public void twoMethodsFail() {
Example
java
apache__camel
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
{ "start": 392577, "end": 398178 }
class ____ extends YamlDeserializerBase<ForyDataFormat> { public ForyDataFormatDeserializer() { super(ForyDataFormat.class); } @Override protected ForyDataFormat newInstance() { return new ForyDataFormat(); } @Override protected boolean setProperty(ForyDataFormat target, String propertyKey, String propertyName, Node node) { propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey); switch(propertyKey) { case "allowAutoWiredFory": { String val = asText(node); target.setAllowAutoWiredFory(val); break; } case "id": { String val = asText(node); target.setId(val); break; } case "requireClassRegistration": { String val = asText(node); target.setRequireClassRegistration(val); break; } case "threadSafe": { String val = asText(node); target.setThreadSafe(val); break; } case "unmarshalType": { String val = asText(node); target.setUnmarshalTypeName(val); break; } default: { return false; } } return true; } } @YamlType( nodes = "get", types = org.apache.camel.model.rest.GetDefinition.class, order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1, displayName = "Get", description = "Rest GET command", deprecated = false, properties = { @YamlProperty(name = "apiDocs", type = "boolean", defaultValue = "true", description = "Whether to include or exclude this rest operation in API documentation. The default value is true.", displayName = "Api Docs"), @YamlProperty(name = "bindingMode", type = "enum:off,auto,json,xml,json_xml", defaultValue = "off", description = "Sets the binding mode to use. This option will override what may be configured on a parent level The default value is off", displayName = "Binding Mode"), @YamlProperty(name = "clientRequestValidation", type = "boolean", defaultValue = "false", description = "Whether to enable validation of the client request to check: 1) Content-Type header matches what the Rest DSL consumes; returns HTTP Status 415 if validation error. 2) Accept header matches what the Rest DSL produces; returns HTTP Status 406 if validation error. 3) Missing required data (query parameters, HTTP headers, body); returns HTTP Status 400 if validation error. 4) Parsing error of the message body (JSon, XML or Auto binding mode must be enabled); returns HTTP Status 400 if validation error.", displayName = "Client Request Validation"), @YamlProperty(name = "clientResponseValidation", type = "boolean", defaultValue = "false", description = "Whether to check what Camel is returning as response to the client: 1) Status-code and Content-Type matches Rest DSL response messages. 2) Check whether expected headers is included according to the Rest DSL repose message headers. 3) If the response body is JSon then check whether its valid JSon. Returns 500 if validation error detected.", displayName = "Client Response Validation"), @YamlProperty(name = "consumes", type = "string", description = "To define the content type what the REST service consumes (accept as input), such as application/xml or application/json. This option will override what may be configured on a parent level", displayName = "Consumes"), @YamlProperty(name = "deprecated", type = "boolean", defaultValue = "false", description = "Marks this rest operation as deprecated in OpenApi documentation.", displayName = "Deprecated"), @YamlProperty(name = "description", type = "string", description = "Sets the description of this node", displayName = "Description"), @YamlProperty(name = "disabled", type = "boolean", defaultValue = "false", description = "Whether to disable this REST service from the route during build time. Once an REST service has been disabled then it cannot be enabled later at runtime.", displayName = "Disabled"), @YamlProperty(name = "enableCORS", type = "boolean", defaultValue = "false", description = "Whether to enable CORS headers in the HTTP response. This option will override what may be configured on a parent level The default value is false.", displayName = "Enable CORS"), @YamlProperty(name = "enableNoContentResponse", type = "boolean", defaultValue = "false", description = "Whether to return HTTP 204 with an empty body when a response contains an empty JSON object or XML root object. The default value is false.", displayName = "Enable No Content Response"), @YamlProperty(name = "id", type = "string", description = "Sets the id of this node", displayName = "Id"), @YamlProperty(name = "note", type = "string", description = "Sets the note of this node", displayName = "Note"), @YamlProperty(name = "outType", type = "string", description = "Sets the
ForyDataFormatDeserializer
java
redisson__redisson
redisson-micronaut/redisson-micronaut-20/src/main/java/org/redisson/micronaut/RedissonConfiguration.java
{ "start": 1194, "end": 6222 }
class ____ extends Config { public RedissonConfiguration() { } @Override public SingleServerConfig getSingleServerConfig() { if (isNotDefined()) { return useSingleServer(); } return super.getSingleServerConfig(); } @Override @ConfigurationBuilder("singleServerConfig") protected void setSingleServerConfig(SingleServerConfig singleConnectionConfig) { super.setSingleServerConfig(singleConnectionConfig); } @Override public ClusterServersConfig getClusterServersConfig() { if (isNotDefined()) { return useClusterServers(); } return super.getClusterServersConfig(); } @Override @ConfigurationBuilder(value = "clusterServersConfig", includes = {"nodeAddresses"}) protected void setClusterServersConfig(ClusterServersConfig clusterServersConfig) { super.setClusterServersConfig(clusterServersConfig); } private boolean isNotDefined() { return super.getSingleServerConfig() == null && super.getClusterServersConfig() == null && super.getReplicatedServersConfig() == null && super.getSentinelServersConfig() == null && super.getMasterSlaveServersConfig() == null; } @Override public ReplicatedServersConfig getReplicatedServersConfig() { if (isNotDefined()) { return useReplicatedServers(); } return super.getReplicatedServersConfig(); } @Override @ConfigurationBuilder(value = "replicatedServersConfig", includes = {"nodeAddresses"}) protected void setReplicatedServersConfig(ReplicatedServersConfig replicatedServersConfig) { super.setReplicatedServersConfig(replicatedServersConfig); } @Override public SentinelServersConfig getSentinelServersConfig() { if (isNotDefined()) { return useSentinelServers(); } return super.getSentinelServersConfig(); } @Override @ConfigurationBuilder(value = "sentinelServersConfig", includes = {"sentinelAddresses"}) protected void setSentinelServersConfig(SentinelServersConfig sentinelConnectionConfig) { super.setSentinelServersConfig(sentinelConnectionConfig); } @Override public MasterSlaveServersConfig getMasterSlaveServersConfig() { if (isNotDefined()) { return useMasterSlaveServers(); } return super.getMasterSlaveServersConfig(); } @Override @ConfigurationBuilder(value = "masterSlaveServersConfig", includes = {"slaveAddresses"}) protected void setMasterSlaveServersConfig(MasterSlaveServersConfig masterSlaveConnectionConfig) { super.setMasterSlaveServersConfig(masterSlaveConnectionConfig); } @Override @ConfigurationBuilder(value = "codec1") public Config setCodec(Codec codec) { return super.setCodec(codec); } public Config setCodec(String className) { try { Codec codec = (Codec) Class.forName(className).getDeclaredConstructor().newInstance(); return super.setCodec(codec); } catch (Exception e) { throw new IllegalArgumentException(e); } } @Override @ConfigurationBuilder(value = "nettyHook1") public Config setNettyHook(NettyHook nettyHook) { return super.setNettyHook(nettyHook); } public Config setNettyHook(String className) { try { NettyHook nettyHook = (NettyHook) Class.forName(className).getDeclaredConstructor().newInstance(); return super.setNettyHook(nettyHook); } catch (Exception e) { throw new IllegalArgumentException(e); } } @Override @ConfigurationBuilder(value = "addressResolverGroupFactory1") public Config setAddressResolverGroupFactory(AddressResolverGroupFactory addressResolverGroupFactory) { return super.setAddressResolverGroupFactory(addressResolverGroupFactory); } public Config setAddressResolverGroupFactory(String className) { try { AddressResolverGroupFactory value = (AddressResolverGroupFactory) Class.forName(className).getDeclaredConstructor().newInstance(); return super.setAddressResolverGroupFactory(value); } catch (Exception e) { throw new IllegalArgumentException(e); } } @Override @ConfigurationBuilder(value = "connectionListener1") public Config setConnectionListener(ConnectionListener connectionListener) { return super.setConnectionListener(connectionListener); } public Config setConnectionListener(String className) { try { ConnectionListener connectionListener = (ConnectionListener) Class.forName(className).getDeclaredConstructor().newInstance(); return super.setConnectionListener(connectionListener); } catch (Exception e) { throw new IllegalArgumentException(e); } } }
RedissonConfiguration
java
spring-projects__spring-framework
spring-core/src/test/java/org/springframework/core/convert/TypeDescriptorTests.java
{ "start": 40364, "end": 40693 }
class ____ implements GenericType<Integer> { @Override public Integer getProperty() { return null; } @Override public void setProperty(Integer t) { } @Override public List<Integer> getListProperty() { return null; } @Override public void setListProperty(List<Integer> t) { } } public
IntegerType
java
netty__netty
codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketFrameMaskGenerator.java
{ "start": 845, "end": 1045 }
interface ____ { /** * Return the next mask that is used to mask the payload of the {@link WebSocketFrame}. * * @return mask. */ int nextMask(); }
WebSocketFrameMaskGenerator
java
apache__camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/DigitalOceanEndpointBuilderFactory.java
{ "start": 1452, "end": 1587 }
interface ____ { /** * Builder for endpoint for the DigitalOcean component. */ public
DigitalOceanEndpointBuilderFactory
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/db2/DB2TruncateTest5.java
{ "start": 1037, "end": 2767 }
class ____ extends DB2Test { public void test_0() throws Exception { String sql = "TRUNCATE TABLE INVENTORY REUSE STORAGE IGNORE DELETE TRIGGERS CONTINUE IDENTITY IMMEDIATE"; DB2StatementParser parser = new DB2StatementParser(sql); List<SQLStatement> statementList = parser.parseStatementList(); SQLStatement stmt = statementList.get(0); print(statementList); assertEquals(1, statementList.size()); DB2SchemaStatVisitor visitor = new DB2SchemaStatVisitor(); stmt.accept(visitor); // System.out.println("Tables : " + visitor.getTables()); // System.out.println("fields : " + visitor.getColumns()); // System.out.println("coditions : " + visitor.getConditions()); // System.out.println("orderBy : " + visitor.getOrderByColumns()); assertEquals(1, visitor.getTables().size()); assertEquals(0, visitor.getColumns().size()); assertEquals(0, visitor.getConditions().size()); assertTrue(visitor.getTables().containsKey(new TableStat.Name("INVENTORY"))); // assertTrue(visitor.getColumns().contains(new Column("A", "F_0201"))); // assertTrue(visitor.getColumns().contains(new Column("mytable", "first_name"))); // assertTrue(visitor.getColumns().contains(new Column("mytable", "full_name"))); assertEquals("TRUNCATE TABLE INVENTORY REUSE STORAGE IGNORE DELETE TRIGGERS IMMEDIATE", // SQLUtils.toSQLString(stmt, JdbcConstants.DB2)); assertEquals("truncate table INVENTORY reuse storage ignore delete triggers immediate", // SQLUtils.toSQLString(stmt, JdbcConstants.DB2, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION)); } }
DB2TruncateTest5
java
hibernate__hibernate-orm
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/query/InheritanceAssociationToOneInnerJoinTest.java
{ "start": 1338, "end": 4737 }
class ____ { @BeforeClassTemplate public void initData(EntityManagerFactoryScope scope) { scope.inTransaction( em -> { final EntityC c = new EntityC(); c.setId( 1 ); c.setFoo( "bar" ); em.persist( c ); final EntityD d = new EntityD(); d.setId( 1 ); d.setFoo( "bar" ); em.persist( d ); final EntityB b1 = new EntityB(); b1.setId( 1 ); b1.setName( "b1" ); b1.setRelationToC( c ); b1.setRelationToD( d ); em.persist( b1 ); final EntityB b2 = new EntityB(); b2.setId( 2 ); b2.setName( "b2" ); b2.setRelationToC( c ); b2.setRelationToD( d ); em.persist( b2 ); } ); } @Test public void testAuditQueryWithJoinedInheritanceUsingWithSemanticsManyToOne(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> { List results = AuditReaderFactory.get( em ).createQuery().forEntitiesAtRevision( EntityB.class, 1 ) .add( disjunction() .add( property( "name" ).like( "b1" ) ) .add( property( "name" ).like( "b2" ) ) ) .traverseRelation( "relationToC", JoinType.INNER ) .add( property( "foo" ).like( "bar" ) ) .getResultList(); assertEquals( 2, results.size() ); } ); } @Test public void testAuditQueryWithJoinedInheritanceUsingWithSemanticsOneToOne(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> { List results = AuditReaderFactory.get( em ).createQuery().forEntitiesAtRevision( EntityB.class, 1 ) .add( disjunction() .add( property( "name" ).like( "b1" ) ) .add( property( "name" ).like( "b2" ) ) ) .traverseRelation( "relationToD", JoinType.INNER ) .add( property( "foo" ).like( "bar" ) ) .getResultList(); assertEquals( 2, results.size() ); } ); } @Test public void testAuditQueryWithJoinedInheritanceUsingWithSemanticsToOne(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> { List results = AuditReaderFactory.get( em ).createQuery().forEntitiesAtRevision( EntityB.class, 1 ) .add( disjunction() .add( property( "name" ).like( "b1" ) ) .add( property( "name" ).like( "b2" ) ) ) .traverseRelation( "relationToC", JoinType.INNER ) .add( property( "foo" ).like( "bar" ) ) .up() .traverseRelation( "relationToD", JoinType.INNER ) .add( property( "foo" ).like( "bar" ) ) .getResultList(); assertEquals( 2, results.size() ); } ); } @Test public void testAuditQueryWithJoinedInheritanceSubclassPropertyProjectionWithRelationTraversal(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> { // HHH-11383 // This test was requested by the reporter so that we have a test that shows Hibernate is // automatically adding "INNER JOIN EntityA_AUD" despite the fact whether the query uses // the traverseRelation API or not. This test makes sure that if the SQL generation is // changed in the future, Envers would properly fail if so. List results = AuditReaderFactory.get( em ).createQuery().forEntitiesAtRevision( EntityB.class, 1 ) .addProjection( property( "name" ) ) .traverseRelation( "relationToC", JoinType.INNER ) .add( property( "foo" ).like( "bar" ) ) .getResultList(); assertEquals( 2, results.size() ); } ); } @Entity(name = "EntityA") @Audited @Inheritance(strategy = InheritanceType.JOINED) public static
InheritanceAssociationToOneInnerJoinTest
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
{ "start": 1539, "end": 2221 }
class ____ to handle is that of symlink traversal. * <br> * An example of such an attack is: * <ol> * <li> Malicious user removes his task's syslog file, and puts a link to the * jobToken file of a target user.</li> * <li> Malicious user tries to open the syslog file via the servlet on the * tasktracker.</li> * <li> The tasktracker is unaware of the symlink, and simply streams the contents * of the jobToken file. The malicious user can now access potentially sensitive * map outputs, etc. of the target user's job.</li> * </ol> * A similar attack is possible involving task log truncation, but in that case * due to an insecure write to a file. * <br> */ public
tries
java
google__truth
core/src/main/java/com/google/common/truth/Correspondence.java
{ "start": 2778, "end": 3063 }
class ____ also provide functionality to format the difference * between values which do not correspond. This results in failure messages including formatted * diffs between expected and actual value, where possible. * * <p>The recommended approach for creating an instance of this
can
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/aot/nativex/substitution/Target_Introspector.java
{ "start": 1246, "end": 1782 }
class ____ { @Substitute private static Class<?> findCustomizerClass(Class<?> type) { String name = type.getName() + "Customizer"; try { type = Target_ClassFinder.findClass(name, type.getClassLoader()); if (Customizer.class.isAssignableFrom(type)) { Class<?> c = type; do { c = c.getSuperclass(); if (c.getName().equals("java.awt.Component")) { return type; } } while (!c.getName().equals("java.lang.Object")); } } catch (Exception ignored) { } return null; } }
Target_Introspector
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhance/internal/bytebuddy/DirtyCheckingWithEmbeddableExtendingMappedSuperclassTest.java
{ "start": 1890, "end": 2086 }
class ____ { private String name; public String getName() { return name; } public void setName(String name) { this.name = name; } } @Embeddable public static
MyAbstractEmbeddable
java
apache__camel
components/camel-braintree/src/test/java/org/apache/camel/component/braintree/BraintreeComponentTest.java
{ "start": 1271, "end": 3064 }
class ____ { @Test public void testLoggerConfiguration() { BraintreeConfiguration configuration = createBraintreeConfiguration(); configuration.setHttpLogLevel(Level.WARNING.getName()); BraintreeComponent component = new BraintreeComponent(); component.createEndpoint("braintree:clientToken", "generate", BraintreeApiName.CLIENT_TOKEN, configuration); BraintreeGateway braintreeGateway = component.getGateway(configuration); Logger logger = braintreeGateway.getConfiguration().getLogger(); assertEquals(Level.WARNING, logger.getLevel()); assertEquals(1, logger.getHandlers().length); assertTrue(logger.getHandlers()[0] instanceof BraintreeLogHandler); } @Test public void testBraintreeLogHandlerDisabled() { BraintreeConfiguration configuration = createBraintreeConfiguration(); BraintreeComponent component = new BraintreeComponent(); component.setConfiguration(configuration); component.getConfiguration().setLogHandlerEnabled(false); component.createEndpoint("", "", BraintreeApiName.CLIENT_TOKEN, configuration); BraintreeGateway braintreeGateway = component.getGateway(configuration); Logger logger = braintreeGateway.getConfiguration().getLogger(); assertEquals(0, logger.getHandlers().length); } private BraintreeConfiguration createBraintreeConfiguration() { BraintreeConfiguration configuration = new BraintreeConfiguration(); configuration.setEnvironment("SANDBOX"); configuration.setMerchantId("dummy-merchant-id"); configuration.setPublicKey("dummy-public-key"); configuration.setPrivateKey("dummy-private-key"); return configuration; } }
BraintreeComponentTest
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
{ "start": 2529, "end": 5327 }
enum ____ { NAMENODE("-namenodes", "gets list of namenodes in the cluster."), SECONDARY("-secondaryNameNodes", "gets list of secondary namenodes in the cluster."), BACKUP("-backupNodes", "gets list of backup nodes in the cluster."), JOURNALNODE("-journalNodes", "gets list of journal nodes in the cluster."), INCLUDE_FILE("-includeFile", "gets the include file path that defines the datanodes " + "that can join the cluster."), EXCLUDE_FILE("-excludeFile", "gets the exclude file path that defines the datanodes " + "that need to decommissioned."), NNRPCADDRESSES("-nnRpcAddresses", "gets the namenode rpc addresses"), CONFKEY("-confKey [key]", "gets a specific key from the configuration"); private static final Map<String, CommandHandler> map; static { map = new HashMap<String, CommandHandler>(); map.put(StringUtils.toLowerCase(NAMENODE.getName()), new NameNodesCommandHandler()); map.put(StringUtils.toLowerCase(SECONDARY.getName()), new SecondaryNameNodesCommandHandler()); map.put(StringUtils.toLowerCase(BACKUP.getName()), new BackupNodesCommandHandler()); map.put(StringUtils.toLowerCase(JOURNALNODE.getName()), new JournalNodeCommandHandler()); map.put(StringUtils.toLowerCase(INCLUDE_FILE.getName()), new CommandHandler(DFSConfigKeys.DFS_HOSTS)); map.put(StringUtils.toLowerCase(EXCLUDE_FILE.getName()), new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE)); map.put(StringUtils.toLowerCase(NNRPCADDRESSES.getName()), new NNRpcAddressesCommandHandler()); map.put(StringUtils.toLowerCase(CONFKEY.getName()), new PrintConfKeyCommandHandler()); } private final String cmd; private final String description; Command(String cmd, String description) { this.cmd = cmd; this.description = description; } public String getName() { return cmd.split(" ")[0]; } public String getUsage() { return cmd; } public String getDescription() { return description; } public static CommandHandler getHandler(String cmd) { return map.get(StringUtils.toLowerCase(cmd)); } } static final String USAGE; static { HdfsConfiguration.init(); /* Initialize USAGE based on Command values */ StringBuilder usage = new StringBuilder(DESCRIPTION); usage.append("\nhadoop getconf \n"); for (Command cmd : Command.values()) { usage.append("\t[" + cmd.getUsage() + "]\t\t\t" + cmd.getDescription() + "\n"); } USAGE = usage.toString(); } /** * Handler to return value for key corresponding to the {@link Command} */ static
Command
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/error/ShouldHaveSameHashCode_create_Test.java
{ "start": 1909, "end": 2474 }
class ____ { private int code; public FixedHashCode(int code) { this.code = code; } @Override public int hashCode() { return code; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; FixedHashCode other = (FixedHashCode) obj; return code == other.code; } @Override public String toString() { return "FixedHashCode[code=%s]".formatted(code); } } }
FixedHashCode
java
apache__flink
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/table/DataGeneratorConnectorITCase.java
{ "start": 1515, "end": 5054 }
class ____ extends BatchTestBase { private static final String TABLE = "CREATE TABLE datagen_t (\n" + " f0 CHAR(1),\n" + " f1 VARCHAR(10),\n" + " f2 STRING,\n" + " f3 BOOLEAN,\n" + " f4 DECIMAL(32,2),\n" + " f5 TINYINT,\n" + " f6 SMALLINT,\n" + " f7 INT,\n" + " f8 BIGINT,\n" + " f9 FLOAT,\n" + " f10 DOUBLE,\n" + " f11 DATE,\n" + " f12 TIME,\n" + " f13 TIMESTAMP(3),\n" + " f14 TIMESTAMP WITH LOCAL TIME ZONE,\n" + " f15 INT ARRAY,\n" + " f16 MAP<STRING, DATE>,\n" + " f17 DECIMAL(32,2) MULTISET,\n" + " f18 ROW<a BIGINT, b TIME, c ROW<d TIMESTAMP>>\n" + ") WITH (" + " 'connector' = 'datagen',\n" + " 'number-of-rows' = '10'\n" + ")"; @Test void testTypes() throws Exception { tEnv().executeSql(TABLE); List<Row> results = new ArrayList<>(); try (CloseableIterator<Row> iter = tEnv().executeSql("select * from datagen_t").collect()) { while (iter.hasNext()) { results.add(iter.next()); } } assertThat(results).as("Unexpected number of results").hasSize(10); } @Test void testLimitPushDown() { final TestingTableEnvironment env = TestingTableEnvironment.create( EnvironmentSettings.newInstance().inStreamingMode().build(), null, TableConfig.getDefault()); env.executeSql( "CREATE TABLE datagen_t (\n" + " f0 CHAR(1)\n" + ") WITH (" + " 'connector' = 'datagen'" + ")"); final Table table = env.sqlQuery("select * from datagen_t limit 5"); assertThat(table.explain()) .contains( "table=[[default_catalog, default_database, datagen_t, limit=[5]]], fields=[f0]"); assertThat(CollectionUtil.iteratorToList(table.execute().collect())) .as("Unexpected number of results") .hasSize(5); } @Test void testWithParallelism() { final TestingTableEnvironment env = TestingTableEnvironment.create( EnvironmentSettings.newInstance().inStreamingMode().build(), null, TableConfig.getDefault()); env.executeSql( "CREATE TABLE datagen_t (\n" + " f0 CHAR(1)\n" + ") WITH (" + " 'connector' = 'datagen'," + " 'scan.parallelism' = '2'" + ")"); final Table table = env.sqlQuery("select * from datagen_t"); final String explain = table.explain(ExplainDetail.JSON_EXECUTION_PLAN); final String expectedPhysicalExecutionPlanFragment = "table=[[default_catalog, default_database, datagen_t]], fields=[f0])\",\n" + " \"parallelism\" : 2"; assertThat(explain).contains(expectedPhysicalExecutionPlanFragment); } }
DataGeneratorConnectorITCase
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestKillAMPreemptionPolicy.java
{ "start": 2402, "end": 6255 }
class ____ { private final RecordFactory recordFactory = RecordFactoryProvider .getRecordFactory(null); @SuppressWarnings("unchecked") @Test public void testKillAMPreemptPolicy() { ApplicationId appId = ApplicationId.newInstance(123456789, 1); ContainerId container = ContainerId.newContainerId( ApplicationAttemptId.newInstance(appId, 1), 1); AMPreemptionPolicy.Context mPctxt = mock(AMPreemptionPolicy.Context.class); when(mPctxt.getTaskAttempt(any(ContainerId.class))).thenReturn( MRBuilderUtils.newTaskAttemptId(MRBuilderUtils.newTaskId( MRBuilderUtils.newJobId(appId, 1), 1, TaskType.MAP), 0)); List<Container> p = new ArrayList<Container>(); p.add(Container.newInstance(container, null, null, null, null, null)); when(mPctxt.getContainers(any(TaskType.class))).thenReturn(p); KillAMPreemptionPolicy policy = new KillAMPreemptionPolicy(); // strictContract is null & contract is null RunningAppContext mActxt = getRunningAppContext(); policy.init(mActxt); PreemptionMessage pM = getPreemptionMessage(false, false, container); policy.preempt(mPctxt, pM); verify(mActxt.getEventHandler(), times(0)).handle( any(TaskAttemptEvent.class)); verify(mActxt.getEventHandler(), times(0)).handle( any(JobCounterUpdateEvent.class)); // strictContract is not null & contract is null mActxt = getRunningAppContext(); policy.init(mActxt); pM = getPreemptionMessage(true, false, container); policy.preempt(mPctxt, pM); verify(mActxt.getEventHandler(), times(1)).handle( any(TaskAttemptEvent.class)); verify(mActxt.getEventHandler(), times(1)).handle( any(JobCounterUpdateEvent.class)); // strictContract is null & contract is not null mActxt = getRunningAppContext(); policy.init(mActxt); pM = getPreemptionMessage(false, true, container); policy.preempt(mPctxt, pM); verify(mActxt.getEventHandler(), times(1)).handle( any(TaskAttemptEvent.class)); verify(mActxt.getEventHandler(), times(1)).handle( any(JobCounterUpdateEvent.class)); // strictContract is not null & contract is not null mActxt = getRunningAppContext(); policy.init(mActxt); pM = getPreemptionMessage(true, true, container); policy.preempt(mPctxt, pM); verify(mActxt.getEventHandler(), times(2)).handle( any(TaskAttemptEvent.class)); verify(mActxt.getEventHandler(), times(2)).handle( any(JobCounterUpdateEvent.class)); } private RunningAppContext getRunningAppContext() { RunningAppContext mActxt = mock(RunningAppContext.class); @SuppressWarnings("unchecked") EventHandler<Event> eventHandler = mock(EventHandler.class); when(mActxt.getEventHandler()).thenReturn(eventHandler); return mActxt; } private PreemptionMessage getPreemptionMessage(boolean strictContract, boolean contract, final ContainerId container) { PreemptionMessage preemptionMessage = recordFactory .newRecordInstance(PreemptionMessage.class); Set<PreemptionContainer> cntrs = new HashSet<PreemptionContainer>(); PreemptionContainer preemptContainer = recordFactory .newRecordInstance(PreemptionContainer.class); preemptContainer.setId(container); cntrs.add(preemptContainer); if (strictContract) { StrictPreemptionContract set = recordFactory .newRecordInstance(StrictPreemptionContract.class); set.setContainers(cntrs); preemptionMessage.setStrictContract(set); } if (contract) { PreemptionContract preemptContract = recordFactory .newRecordInstance(PreemptionContract.class); preemptContract.setContainers(cntrs); preemptionMessage.setContract(preemptContract); } return preemptionMessage; } }
TestKillAMPreemptionPolicy
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerNode.java
{ "start": 2602, "end": 18921 }
class ____ { private static final Logger LOG = LoggerFactory.getLogger(SchedulerNode.class); private Resource unallocatedResource = Resource.newInstance(0, 0); private Resource allocatedResource = Resource.newInstance(0, 0); private Resource totalResource; private RMContainer reservedContainer; private volatile int numContainers; private volatile ResourceUtilization containersUtilization = ResourceUtilization.newInstance(0, 0, 0f); private volatile ResourceUtilization nodeUtilization = ResourceUtilization.newInstance(0, 0, 0f); /** Time stamp for overcommitted resources to time out. */ private long overcommitTimeout = -1; /* set of containers that are allocated containers */ private final Map<ContainerId, ContainerInfo> launchedContainers = new HashMap<>(); private final RMNode rmNode; private final String nodeName; private final RMContext rmContext; private volatile Set<String> labels = null; private volatile Set<NodeAttribute> nodeAttributes = null; // Last updated time private volatile long lastHeartbeatMonotonicTime; public SchedulerNode(RMNode node, boolean usePortForNodeName, Set<String> labels) { this.rmNode = node; this.rmContext = node.getRMContext(); this.unallocatedResource = Resources.clone(node.getTotalCapability()); this.totalResource = Resources.clone(node.getTotalCapability()); if (usePortForNodeName) { nodeName = rmNode.getHostName() + ":" + node.getNodeID().getPort(); } else { nodeName = rmNode.getHostName(); } this.labels = ImmutableSet.copyOf(labels); this.lastHeartbeatMonotonicTime = Time.monotonicNow(); } public SchedulerNode(RMNode node, boolean usePortForNodeName) { this(node, usePortForNodeName, CommonNodeLabelsManager.EMPTY_STRING_SET); } public RMNode getRMNode() { return this.rmNode; } /** * Set total resources on the node. * @param resource Total resources on the node. */ public synchronized void updateTotalResource(Resource resource){ this.totalResource = resource; this.unallocatedResource = Resources.subtract(totalResource, this.allocatedResource); } /** * Set the timeout for the node to stop overcommitting the resources. After * this time the scheduler will start killing containers until the resources * are not overcommitted anymore. This may reset a previous timeout. * @param timeOut Time out in milliseconds. */ public synchronized void setOvercommitTimeOut(long timeOut) { if (timeOut >= 0) { if (this.overcommitTimeout != -1) { LOG.debug("The overcommit timeout for {} was already set to {}", getNodeID(), this.overcommitTimeout); } this.overcommitTimeout = Time.now() + timeOut; } } /** * Check if the time out has passed. * @return If the node is overcommitted. */ public synchronized boolean isOvercommitTimedOut() { return this.overcommitTimeout >= 0 && Time.now() >= this.overcommitTimeout; } /** * Check if the node has a time out for overcommit resources. * @return If the node has a time out for overcommit resources. */ public synchronized boolean isOvercommitTimeOutSet() { return this.overcommitTimeout >= 0; } /** * Get the ID of the node which contains both its hostname and port. * @return The ID of the node. */ public NodeId getNodeID() { return this.rmNode.getNodeID(); } /** * Get HTTP address for the node. * @return HTTP address for the node. */ public String getHttpAddress() { return this.rmNode.getHttpAddress(); } /** * Get the name of the node for scheduling matching decisions. * <p> * Typically this is the 'hostname' reported by the node, but it could be * configured to be 'hostname:port' reported by the node via the * {@link YarnConfiguration#RM_SCHEDULER_INCLUDE_PORT_IN_NODE_NAME} constant. * The main usecase of this is YARN minicluster to be able to differentiate * node manager instances by their port number. * @return Name of the node for scheduling matching decisions. */ public String getNodeName() { return nodeName; } /** * Get rackname. * @return rackname */ public String getRackName() { return this.rmNode.getRackName(); } /** * The Scheduler has allocated containers on this node to the given * application. * @param rmContainer Allocated container */ public void allocateContainer(RMContainer rmContainer) { allocateContainer(rmContainer, false); } /** * The Scheduler has allocated containers on this node to the given * application. * @param rmContainer Allocated container * @param launchedOnNode True if the container has been launched */ protected synchronized void allocateContainer(RMContainer rmContainer, boolean launchedOnNode) { Container container = rmContainer.getContainer(); if (rmContainer.getExecutionType() == ExecutionType.GUARANTEED) { deductUnallocatedResource(container.getResource()); ++numContainers; } launchedContainers.put(container.getId(), new ContainerInfo(rmContainer, launchedOnNode)); } /** * Get unallocated resources on the node. * @return Unallocated resources on the node */ public synchronized Resource getUnallocatedResource() { return this.unallocatedResource; } /** * Get allocated resources on the node. * @return Allocated resources on the node */ public synchronized Resource getAllocatedResource() { return this.allocatedResource; } /** * Get total resources on the node. * @return Total resources on the node. */ public synchronized Resource getTotalResource() { return this.totalResource; } /** * Check if a container is launched by this node. * * @param containerId containerId. * @return If the container is launched by the node. */ public synchronized boolean isValidContainer(ContainerId containerId) { if (launchedContainers.containsKey(containerId)) { return true; } return false; } /** * Update the resources of the node when releasing a container. * @param container Container to release. */ protected synchronized void updateResourceForReleasedContainer( Container container) { if (container.getExecutionType() == ExecutionType.GUARANTEED) { addUnallocatedResource(container.getResource()); --numContainers; } } /** * Release an allocated container on this node. * @param containerId ID of container to be released. * @param releasedByNode whether the release originates from a node update. */ public synchronized void releaseContainer(ContainerId containerId, boolean releasedByNode) { ContainerInfo info = launchedContainers.get(containerId); if (info == null) { return; } if (!releasedByNode && info.launchedOnNode) { // wait until node reports container has completed return; } launchedContainers.remove(containerId); Container container = info.container.getContainer(); // We remove allocation tags when a container is actually // released on NM. This is to avoid running into situation // when AM releases a container and NM has some delay to // actually release it, then the tag can still be visible // at RM so that RM can respect it during scheduling new containers. if (rmContext != null && rmContext.getAllocationTagsManager() != null) { rmContext.getAllocationTagsManager() .removeContainer(container.getNodeId(), container.getId(), container.getAllocationTags()); } updateResourceForReleasedContainer(container); if (LOG.isDebugEnabled()) { LOG.debug("Released container " + container.getId() + " of capacity " + container.getResource() + " on host " + rmNode.getNodeAddress() + ", which currently has " + numContainers + " containers, " + getAllocatedResource() + " used and " + getUnallocatedResource() + " available" + ", release resources=" + true); } } /** * Inform the node that a container has launched. * @param containerId ID of the launched container */ public synchronized void containerStarted(ContainerId containerId) { ContainerInfo info = launchedContainers.get(containerId); if (info != null) { info.launchedOnNode = true; } } /** * Add unallocated resources to the node. This is used when unallocating a * container. * @param resource Resources to add. */ private synchronized void addUnallocatedResource(Resource resource) { if (resource == null) { LOG.error("Invalid resource addition of null resource for " + rmNode.getNodeAddress()); return; } Resources.addTo(unallocatedResource, resource); Resources.subtractFrom(allocatedResource, resource); } /** * Deduct unallocated resources from the node. This is used when allocating a * container. * @param resource Resources to deduct. */ @VisibleForTesting public synchronized void deductUnallocatedResource(Resource resource) { if (resource == null) { LOG.error("Invalid deduction of null resource for " + rmNode.getNodeAddress()); return; } Resources.subtractFrom(unallocatedResource, resource); Resources.addTo(allocatedResource, resource); } /** * Reserve container for the attempt on this node. * @param attempt Application attempt asking for the reservation. * @param schedulerKey Priority of the reservation. * @param container Container reserving resources for. */ public abstract void reserveResource(SchedulerApplicationAttempt attempt, SchedulerRequestKey schedulerKey, RMContainer container); /** * Unreserve resources on this node. * @param attempt Application attempt that had done the reservation. */ public abstract void unreserveResource(SchedulerApplicationAttempt attempt); @Override public String toString() { return "host: " + rmNode.getNodeAddress() + " #containers=" + getNumContainers() + " available=" + getUnallocatedResource() + " used=" + getAllocatedResource(); } /** * Get number of active containers on the node. * @return Number of active containers on the node. */ public int getNumContainers() { return numContainers; } /** * Get the containers running on the node. * @return A copy of containers running on the node. */ public synchronized List<RMContainer> getCopiedListOfRunningContainers() { List<RMContainer> result = new ArrayList<>(launchedContainers.size()); for (ContainerInfo info : launchedContainers.values()) { result.add(info.container); } return result; } /** * Get the containers running on the node with AM containers at the end. * @return A copy of running containers with AM containers at the end. */ public synchronized List<RMContainer> getRunningContainersWithAMsAtTheEnd() { LinkedList<RMContainer> result = new LinkedList<>(); for (ContainerInfo info : launchedContainers.values()) { if(info.container.isAMContainer()) { result.addLast(info.container); } else { result.addFirst(info.container); } } return result; } /** * Get the containers running on the node ordered by which to kill first. It * tries to kill AMs last, then GUARANTEED containers, and it kills * OPPORTUNISTIC first. If the same time, it uses the creation time. * @return A copy of the running containers ordered by which to kill first. */ public List<RMContainer> getContainersToKill() { List<RMContainer> result = getLaunchedContainers(); Collections.sort(result, (c1, c2) -> { return new CompareToBuilder() .append(c1.isAMContainer(), c2.isAMContainer()) .append(c2.getExecutionType(), c1.getExecutionType()) // reversed .append(c2.getCreationTime(), c1.getCreationTime()) // reversed .toComparison(); }); return result; } /** * Get the launched containers in the node. * @return List of launched containers. */ protected synchronized List<RMContainer> getLaunchedContainers() { List<RMContainer> result = new ArrayList<>(); for (ContainerInfo info : launchedContainers.values()) { result.add(info.container); } return result; } /** * Get the container for the specified container ID. * @param containerId The container ID * @return The container for the specified container ID */ protected synchronized RMContainer getContainer(ContainerId containerId) { RMContainer container = null; ContainerInfo info = launchedContainers.get(containerId); if (info != null) { container = info.container; } return container; } /** * Get the reserved container in the node. * @return Reserved container in the node. */ public synchronized RMContainer getReservedContainer() { return reservedContainer; } /** * Set the reserved container in the node. * @param reservedContainer Reserved container in the node. */ public synchronized void setReservedContainer(RMContainer reservedContainer) { this.reservedContainer = reservedContainer; } /** * Recover a container. * @param rmContainer Container to recover. */ public synchronized void recoverContainer(RMContainer rmContainer) { if (rmContainer.getState().equals(RMContainerState.COMPLETED)) { return; } allocateContainer(rmContainer, true); } /** * Get the labels for the node. * @return Set of labels for the node. */ public Set<String> getLabels() { return labels; } /** * Update the labels for the node. * @param labels Set of labels for the node. */ public void updateLabels(Set<String> labels) { this.labels = labels; } /** * Get partition of which the node belongs to, if node-labels of this node is * empty or null, it belongs to NO_LABEL partition. And since we only support * one partition for each node (YARN-2694), first label will be its partition. * @return Partition for the node. */ public String getPartition() { if (this.labels == null || this.labels.isEmpty()) { return RMNodeLabelsManager.NO_LABEL; } else { return this.labels.iterator().next(); } } /** * Set the resource utilization of the containers in the node. * @param containersUtilization Resource utilization of the containers. */ public void setAggregatedContainersUtilization( ResourceUtilization containersUtilization) { this.containersUtilization = containersUtilization; } /** * Get the resource utilization of the containers in the node. * @return Resource utilization of the containers. */ public ResourceUtilization getAggregatedContainersUtilization() { return this.containersUtilization; } /** * Set the resource utilization of the node. This includes the containers. * @param nodeUtilization Resource utilization of the node. */ public void setNodeUtilization(ResourceUtilization nodeUtilization) { this.nodeUtilization = nodeUtilization; } /** * Get the resource utilization of the node. * @return Resource utilization of the node. */ public ResourceUtilization getNodeUtilization() { return this.nodeUtilization; } public long getLastHeartbeatMonotonicTime() { return lastHeartbeatMonotonicTime; } /** * This will be called for each node heartbeat. */ public void notifyNodeUpdate() { this.lastHeartbeatMonotonicTime = Time.monotonicNow(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof SchedulerNode)) { return false; } SchedulerNode that = (SchedulerNode) o; return getNodeID().equals(that.getNodeID()); } @Override public int hashCode() { return getNodeID().hashCode(); } public Set<NodeAttribute> getNodeAttributes() { return nodeAttributes; } public void updateNodeAttributes(Set<NodeAttribute> attributes) { this.nodeAttributes = attributes; } private static
SchedulerNode
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/cglib/beans/BulkBean.java
{ "start": 2130, "end": 4085 }
class ____ extends AbstractClassGenerator { private static final Source SOURCE = new Source(BulkBean.class.getName()); private Class target; private String[] getters; private String[] setters; private Class[] types; public Generator() { super(SOURCE); } public void setTarget(Class target) { this.target = target; // SPRING PATCH BEGIN setContextClass(target); // SPRING PATCH END } public void setGetters(String[] getters) { this.getters = getters; } public void setSetters(String[] setters) { this.setters = setters; } public void setTypes(Class[] types) { this.types = types; } @Override protected ClassLoader getDefaultClassLoader() { return target.getClassLoader(); } @Override protected ProtectionDomain getProtectionDomain() { return ReflectUtils.getProtectionDomain(target); } public BulkBean create() { setNamePrefix(target.getName()); String targetClassName = target.getName(); String[] typeClassNames = ReflectUtils.getNames(types); Object key = KEY_FACTORY.newInstance(targetClassName, getters, setters, typeClassNames); return (BulkBean)super.create(key); } @Override public void generateClass(ClassVisitor v) throws Exception { new BulkBeanEmitter(v, getClassName(), target, getters, setters, types); } @Override protected Object firstInstance(Class type) { BulkBean instance = (BulkBean)ReflectUtils.newInstance(type); instance.target = target; int length = getters.length; instance.getters = new String[length]; System.arraycopy(getters, 0, instance.getters, 0, length); instance.setters = new String[length]; System.arraycopy(setters, 0, instance.setters, 0, length); instance.types = new Class[types.length]; System.arraycopy(types, 0, instance.types, 0, types.length); return instance; } @Override protected Object nextInstance(Object instance) { return instance; } } }
Generator
java
apache__maven
impl/maven-core/src/main/java/org/apache/maven/eventspy/internal/EventSpyExecutionListener.java
{ "start": 1086, "end": 4006 }
class ____ extends AbstractExecutionListener { private final EventSpyDispatcher dispatcher; private final ExecutionListener delegate; EventSpyExecutionListener(EventSpyDispatcher dispatcher, ExecutionListener delegate) { this.dispatcher = dispatcher; this.delegate = delegate; } @Override public void projectDiscoveryStarted(ExecutionEvent event) { dispatcher.onEvent(event); delegate.projectDiscoveryStarted(event); } @Override public void sessionStarted(ExecutionEvent event) { dispatcher.onEvent(event); delegate.sessionStarted(event); } @Override public void sessionEnded(ExecutionEvent event) { dispatcher.onEvent(event); delegate.sessionEnded(event); } @Override public void projectSkipped(ExecutionEvent event) { dispatcher.onEvent(event); delegate.projectSkipped(event); } @Override public void projectStarted(ExecutionEvent event) { dispatcher.onEvent(event); delegate.projectStarted(event); } @Override public void projectSucceeded(ExecutionEvent event) { dispatcher.onEvent(event); delegate.projectSucceeded(event); } @Override public void projectFailed(ExecutionEvent event) { dispatcher.onEvent(event); delegate.projectFailed(event); } @Override public void forkStarted(ExecutionEvent event) { dispatcher.onEvent(event); delegate.forkStarted(event); } @Override public void forkSucceeded(ExecutionEvent event) { dispatcher.onEvent(event); delegate.forkSucceeded(event); } @Override public void forkFailed(ExecutionEvent event) { dispatcher.onEvent(event); delegate.forkFailed(event); } @Override public void mojoSkipped(ExecutionEvent event) { dispatcher.onEvent(event); delegate.mojoSkipped(event); } @Override public void mojoStarted(ExecutionEvent event) { dispatcher.onEvent(event); delegate.mojoStarted(event); } @Override public void mojoSucceeded(ExecutionEvent event) { dispatcher.onEvent(event); delegate.mojoSucceeded(event); } @Override public void mojoFailed(ExecutionEvent event) { dispatcher.onEvent(event); delegate.mojoFailed(event); } @Override public void forkedProjectStarted(ExecutionEvent event) { dispatcher.onEvent(event); delegate.forkedProjectStarted(event); } @Override public void forkedProjectSucceeded(ExecutionEvent event) { dispatcher.onEvent(event); delegate.forkedProjectSucceeded(event); } @Override public void forkedProjectFailed(ExecutionEvent event) { dispatcher.onEvent(event); delegate.forkedProjectFailed(event); } }
EventSpyExecutionListener
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleFallibleArrayState.java
{ "start": 1509, "end": 5047 }
class ____ extends AbstractFallibleArrayState implements GroupingAggregatorState { private final double init; private DoubleArray values; DoubleFallibleArrayState(BigArrays bigArrays, double init) { super(bigArrays); this.values = bigArrays.newDoubleArray(1, false); this.values.set(0, init); this.init = init; } double get(int groupId) { return values.get(groupId); } double getOrDefault(int groupId) { return groupId < values.size() ? values.get(groupId) : init; } void set(int groupId, double value) { ensureCapacity(groupId); values.set(groupId, value); trackGroupId(groupId); } Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds() && false == anyFailure()) { try (var builder = driverContext.blockFactory().newDoubleVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { builder.appendDouble(i, values.get(selected.getInt(i))); } return builder.build().asBlock(); } } try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); if (hasValue(group) && !hasFailed(group)) { builder.appendDouble(values.get(group)); } else { builder.appendNull(); } } return builder.build(); } } private void ensureCapacity(int groupId) { if (groupId >= values.size()) { long prevSize = values.size(); values = bigArrays.grow(values, groupId + 1); values.fill(prevSize, values.size(), init); } } /** Extracts an intermediate view of the contents of this state. */ @Override public void toIntermediate( Block[] blocks, int offset, IntVector selected, org.elasticsearch.compute.operator.DriverContext driverContext ) { assert blocks.length >= offset + 3; try ( var valuesBuilder = driverContext.blockFactory().newDoubleBlockBuilder(selected.getPositionCount()); var hasValueBuilder = driverContext.blockFactory().newBooleanVectorFixedBuilder(selected.getPositionCount()); var hasFailedBuilder = driverContext.blockFactory().newBooleanVectorFixedBuilder(selected.getPositionCount()) ) { for (int i = 0; i < selected.getPositionCount(); i++) { int group = selected.getInt(i); if (group < values.size()) { valuesBuilder.appendDouble(values.get(group)); } else { valuesBuilder.appendDouble(0); // TODO can we just use null? } hasValueBuilder.appendBoolean(i, hasValue(group)); hasFailedBuilder.appendBoolean(i, hasFailed(group)); } blocks[offset + 0] = valuesBuilder.build(); blocks[offset + 1] = hasValueBuilder.build().asBlock(); blocks[offset + 2] = hasFailedBuilder.build().asBlock(); } } @Override public void close() { Releasables.close(values, super::close); } }
DoubleFallibleArrayState
java
elastic__elasticsearch
x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/StringContainsFunctionPipe.java
{ "start": 694, "end": 3445 }
class ____ extends Pipe { private final Pipe string, substring; private final boolean caseInsensitive; public StringContainsFunctionPipe(Source source, Expression expression, Pipe string, Pipe substring, boolean caseInsensitive) { super(source, expression, Arrays.asList(string, substring)); this.string = string; this.substring = substring; this.caseInsensitive = caseInsensitive; } @Override public final Pipe replaceChildren(List<Pipe> newChildren) { return replaceChildren(newChildren.get(0), newChildren.get(1)); } @Override public final Pipe resolveAttributes(AttributeResolver resolver) { Pipe newString = string.resolveAttributes(resolver); Pipe newSubstring = substring.resolveAttributes(resolver); if (newString == string && newSubstring == substring) { return this; } return replaceChildren(newString, newSubstring); } @Override public boolean supportedByAggsOnlyQuery() { return string.supportedByAggsOnlyQuery() && substring.supportedByAggsOnlyQuery(); } @Override public boolean resolved() { return string.resolved() && substring.resolved(); } protected StringContainsFunctionPipe replaceChildren(Pipe string, Pipe substring) { return new StringContainsFunctionPipe(source(), expression(), string, substring, caseInsensitive); } @Override public final void collectFields(QlSourceBuilder sourceBuilder) { string.collectFields(sourceBuilder); substring.collectFields(sourceBuilder); } @Override protected NodeInfo<StringContainsFunctionPipe> info() { return NodeInfo.create(this, StringContainsFunctionPipe::new, expression(), string, substring, caseInsensitive); } @Override public StringContainsFunctionProcessor asProcessor() { return new StringContainsFunctionProcessor(string.asProcessor(), substring.asProcessor(), caseInsensitive); } public Pipe string() { return string; } public Pipe substring() { return substring; } protected boolean isCaseInsensitive() { return caseInsensitive; } @Override public int hashCode() { return Objects.hash(string(), substring()); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } StringContainsFunctionPipe other = (StringContainsFunctionPipe) obj; return Objects.equals(string(), other.string()) && Objects.equals(substring(), other.substring()); } }
StringContainsFunctionPipe
java
spring-projects__spring-security
web/src/main/java/org/springframework/security/web/server/csrf/ServerCsrfTokenRequestHandlerLoggerHolder.java
{ "start": 782, "end": 863 }
class ____ holding the logger for {@link ServerCsrfTokenRequestHandler} */ final
for
java
quarkusio__quarkus
extensions/resteasy-classic/resteasy-client/deployment/src/test/java/io/quarkus/restclient/compression/ClientUsingGzipCompressionTest.java
{ "start": 603, "end": 1590 }
class ____ { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(MyResource.class, MyClient.class)) .withConfigurationResource("client-using-gzip-application.properties"); @RestClient MyClient client; /** * Test that covers the property `quarkus.resteasy.gzip.max-input`. * Larger payloads than 10 bytes should return HTTP Request Too Large. */ @Test public void testGzipMaxInput() { WebApplicationException ex = Assertions.assertThrows(WebApplicationException.class, () -> client.gzip(new byte[11])); assertEquals(HttpStatus.SC_REQUEST_TOO_LONG, ex.getResponse().getStatus()); // verify shorter message works fine Assertions.assertEquals("Worked!", client.gzip(new byte[10])); } @Path("/client") @RegisterRestClient(configKey = "my-client") public
ClientUsingGzipCompressionTest
java
spring-projects__spring-framework
spring-beans/src/main/java/org/springframework/beans/factory/annotation/Value.java
{ "start": 2529, "end": 2725 }
interface ____ { /** * The actual value expression such as <code>#{systemProperties.myProp}</code> * or property placeholder such as <code>${my.app.myProp}</code>. */ String value(); }
Value
java
micronaut-projects__micronaut-core
management/src/main/java/io/micronaut/management/endpoint/EndpointConfiguration.java
{ "start": 1002, "end": 3612 }
class ____ { /** * The prefix for endpoints configurations. */ public static final String PREFIX = "endpoints"; private static final String SLASH = "/"; private Boolean enabled; private Boolean sensitive; @Nullable private String path; private final String id; private EndpointDefaultConfiguration defaultConfiguration; /** * @param id The id of the endpoint * @param defaultConfiguration The default endpoint configuration */ public EndpointConfiguration(@Parameter String id, EndpointDefaultConfiguration defaultConfiguration) { this.id = id; this.defaultConfiguration = defaultConfiguration; } /** * @return The ID of the endpoint * @see io.micronaut.management.endpoint.annotation.Endpoint#value() */ public String getId() { return id; } /** * @return Is the endpoint enabled. If not present, use the value of {@link io.micronaut.management.endpoint.annotation.Endpoint#defaultEnabled()} */ public Optional<Boolean> isEnabled() { if (enabled != null) { return Optional.of(enabled); } return defaultConfiguration.isEnabled(); } /** * @return Does the endpoint expose sensitive information. If not present, use the value of {@link io.micronaut.management.endpoint.annotation.Endpoint#defaultSensitive()} */ public Optional<Boolean> isSensitive() { if (sensitive != null) { return Optional.of(sensitive); } return defaultConfiguration.isSensitive(); } /** * Sets whether the endpoint is enabled. * * @param enabled True it is enabled, null for the default behaviour */ public void setEnabled(Boolean enabled) { this.enabled = enabled; } /** * Sets whether the endpoint is sensitive. * * @param sensitive True it is sensitive, null for the default behaviour */ public void setSensitive(Boolean sensitive) { this.sensitive = sensitive; } /** * Endpoint's path. If not set the endpoint name is used as the path. * @param path Endpoint's path * @since 4.8.0 */ public void setPath(@Nullable String path) { this.path = path != null && path.startsWith(SLASH) ? path.substring(1) : path; } /** * Endpoint's path. If not set the endpoint name is used as the path. * @return Endpoint's path * @since 4.8.0 */ @Nullable public String getPath() { return path; } }
EndpointConfiguration
java
apache__dubbo
dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/reference/ReferenceKeyTest.java
{ "start": 19534, "end": 19977 }
interface ____, fixed bean name @DubboReference( id = "demoService", group = "demo", version = "1.2.3", consumer = "my-consumer", init = false, url = "dubbo://127.0.0.1:20813") private HelloService demoService; // @Autowired // private HelloService helloService; } @Configuration static
type
java
apache__camel
components/camel-opentelemetry/src/main/java/org/apache/camel/opentelemetry/OpenTelemetryTracer.java
{ "start": 2745, "end": 2988 }
class ____") public String getInstrumentationName() { return instrumentationName; } /** * A name uniquely identifying the instrumentation scope, such as the instrumentation library, package, or fully * qualified
name
java
elastic__elasticsearch
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/DelegatingProcessorTests.java
{ "start": 968, "end": 7801 }
class ____ extends ESTestCase { public static <T, R> R onNext(DelegatingProcessor<T, R> processor, T item) { var response = new AtomicReference<R>(); var error = new AtomicReference<Throwable>(); processor.onSubscribe(mock()); Flow.Subscriber<R> downstream = mock(); doAnswer(ans -> { response.set(ans.getArgument(0)); return null; }).when(downstream).onNext(any()); doAnswer(ans -> { error.set(ans.getArgument(0)); return null; }).when(downstream).onError(any()); processor.subscribe(downstream); processor.onNext(item); assertThat("onError should not be called", error.get(), nullValue()); assertThat("Response from processor was null", response.get(), notNullValue()); return response.get(); } public static <T, R> Throwable onError(DelegatingProcessor<T, R> processor, T item) { var response = new AtomicReference<Throwable>(); Flow.Subscription upstream = mock(); processor.onSubscribe(upstream); Flow.Subscriber<R> downstream = mock(); doAnswer(ans -> { response.set(ans.getArgument(0)); return null; }).when(downstream).onError(any()); processor.subscribe(downstream); processor.onNext(item); assertThat("Error from processor was null", response.get(), notNullValue()); verify(upstream, times(1)).cancel(); return response.get(); } public void testRequestBeforeOnSubscribe() { var processor = delegatingProcessor(); var expectedRequestCount = randomLongBetween(2, 100); Flow.Subscriber<String> downstream = mock(); processor.subscribe(downstream); var subscription = ArgumentCaptor.forClass(Flow.Subscription.class); verify(downstream, times(1)).onSubscribe(subscription.capture()); subscription.getValue().request(expectedRequestCount); Flow.Subscription upstream = mock(); processor.onSubscribe(upstream); verify(upstream, times(1)).request(eq(expectedRequestCount)); } public void testRequestAfterOnSubscribe() { var processor = delegatingProcessor(); var expectedRequestCount = randomLongBetween(2, 100); Flow.Subscription upstream = mock(); processor.onSubscribe(upstream); verify(upstream, never()).request(anyInt()); Flow.Subscriber<String> downstream = mock(); processor.subscribe(downstream); var subscription = ArgumentCaptor.forClass(Flow.Subscription.class); verify(downstream, times(1)).onSubscribe(subscription.capture()); subscription.getValue().request(expectedRequestCount); verify(upstream, times(1)).request(eq(expectedRequestCount)); } public void testOnNextAfterCancelDoesNotForwardItem() { var expectedItem = "hello"; var processor = delegatingProcessor(); processor.onSubscribe(mock()); Flow.Subscriber<String> downstream = mock(); doAnswer(ans -> { Flow.Subscription sub = ans.getArgument(0); sub.cancel(); return null; }).when(downstream).onSubscribe(any()); processor.subscribe(downstream); processor.onNext(expectedItem); verify(downstream, never()).onNext(any()); } public void testCancelForwardsToUpstream() { var processor = delegatingProcessor(); Flow.Subscription upstream = mock(); processor.onSubscribe(upstream); Flow.Subscriber<String> downstream = mock(); doAnswer(ans -> { Flow.Subscription sub = ans.getArgument(0); sub.cancel(); return null; }).when(downstream).onSubscribe(any()); processor.subscribe(downstream); verify(upstream, times(1)).cancel(); } public void testRequestForwardsToUpstream() { var expectedRequestCount = randomLongBetween(2, 20); var processor = delegatingProcessor(); Flow.Subscription upstream = mock(); processor.onSubscribe(upstream); Flow.Subscriber<String> downstream = mock(); doAnswer(ans -> { Flow.Subscription sub = ans.getArgument(0); sub.request(expectedRequestCount); return null; }).when(downstream).onSubscribe(any()); processor.subscribe(downstream); verify(upstream, times(1)).request(expectedRequestCount); } public void testOnErrorBeforeSubscriptionThrowsException() { assertThrows(IllegalStateException.class, () -> delegatingProcessor().onError(new NullPointerException())); } public void testOnError() { var expectedException = new IllegalStateException("hello"); var processor = delegatingProcessor(); Flow.Subscriber<String> downstream = mock(); processor.subscribe(downstream); processor.onError(expectedException); verify(downstream, times(1)).onError(eq(expectedException)); } public void testOnCompleteBeforeSubscriptionInvokesOnComplete() { var processor = delegatingProcessor(); Flow.Subscriber<String> downstream = mock(); doAnswer(ans -> { Flow.Subscription sub = ans.getArgument(0); sub.request(1); return null; }).when(downstream).onSubscribe(any()); processor.onComplete(); verify(downstream, times(0)).onComplete(); processor.subscribe(downstream); verify(downstream, times(1)).onComplete(); } public void testOnComplete() { var processor = delegatingProcessor(); Flow.Subscriber<String> downstream = mock(); processor.subscribe(downstream); processor.onComplete(); verify(downstream, times(1)).onComplete(); } public void testSubscriberOnlyAllowsOnePublisher() { var publisher1 = delegatingProcessor(); var publisher2 = delegatingProcessor(); var subscriber1 = spy(delegatingProcessor()); publisher1.subscribe(subscriber1); verify(subscriber1, times(1)).onSubscribe(any()); // verify we cannot reuse subscribers assertThrows(IllegalStateException.class, () -> publisher2.subscribe(subscriber1)); // verify publisher resets its subscriber var subscriber2 = spy(delegatingProcessor()); publisher2.subscribe(subscriber2); verify(subscriber2, times(1)).onSubscribe(any()); } private DelegatingProcessor<String, String> delegatingProcessor() { return new DelegatingProcessor<>() { @Override public void next(String item) { downstream().onNext(item); } }; } }
DelegatingProcessorTests
java
apache__kafka
server-common/src/main/java/org/apache/kafka/timeline/SnapshottableHashTable.java
{ "start": 1525, "end": 3024 }
class ____ a building block. * <p> * Each snapshot tier contains a size and a hash table. The size reflects the size at * the time the snapshot was taken. Note that, as an optimization, snapshot tiers will * be null if they don't contain anything. So for example, if snapshot 20 of Object O * contains the same entries as snapshot 10 of that object, the snapshot 20 tier for * object O will be null. * <p> * The current tier's data is stored in the fields inherited from BaseHashTable. It * would be conceptually simpler to have a separate BaseHashTable object, but since Java * doesn't have value types, subclassing is the only way to avoid another pointer * indirection and the associated extra memory cost. * <p> * Note that each element in the hash table contains a start epoch, and a value. The * start epoch is there to identify when the object was first inserted. This in turn * determines which snapshots it is a member of. * <p> * In order to retrieve an object from snapshot E, we start by checking to see if the * object exists in the "current" hash tier. If it does, and its startEpoch extends back * to E, we return that object. Otherwise, we check all the snapshot tiers, starting * with E, and ending with the most recent snapshot, to see if the object is there. * As an optimization, if we encounter the object in a snapshot tier but its epoch is too * new, we know that its value at epoch E must be null, so we can return that immediately. * <p> * The
as
java
quarkusio__quarkus
integration-tests/smallrye-config/src/test/java/io/quarkus/it/smallrye/config/QuarkusConfigTest.java
{ "start": 376, "end": 967 }
class ____ { @Test void uuid() { given() .get("/config/{name}", "quarkus.uuid") .then() .statusCode(OK.getStatusCode()) .body("value", is(notNullValue())) .body("configSourceName", equalTo("QuarkusUUIDConfigSource")); given() .get("/config/uuid") .then() .statusCode(OK.getStatusCode()) .body("value", is(notNullValue())) .body("configSourceName", equalTo("QuarkusUUIDConfigSource")); } }
QuarkusConfigTest
java
apache__camel
components/camel-smb/src/test/java/org/apache/camel/component/smb/SmbConsumerDirectoriesNotMatchedIT.java
{ "start": 986, "end": 2519 }
class ____ extends SmbServerTestSupport { private String getSbmUrl() { return String.format( "smb:%s/%s/dirnotmatched?username=%s&password=%s&recursive=true&delete=true&initialDelay=3000&include=^.*txt$", service.address(), service.shareName(), service.userName(), service.password()); } @Override public void doPostSetup() throws Exception { prepareSmbServer(); } @Test public void testSkipDirectories() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(3); mock.assertIsSatisfied(); } private void prepareSmbServer() { // prepares the SMB Server by creating files on the server that we want // to unit test that we can pool and store as a local file sendFile(getSbmUrl() + "/?password=admin", "This is a dot file", ".skipme"); sendFile(getSbmUrl() + "/?password=admin", "This is a web file", "index.html"); sendFile(getSbmUrl() + "/?password=admin", "This is a readme file", "readme.txt"); sendFile(getSbmUrl() + "/2007/?password=admin", "2007 report", "report2007.txt"); sendFile(getSbmUrl() + "/2008/?password=admin", "2008 report", "report2008.txt"); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { from(getSbmUrl()).to("mock:result"); } }; } }
SmbConsumerDirectoriesNotMatchedIT
java
google__guava
android/guava-tests/test/com/google/common/cache/TestingWeighers.java
{ "start": 810, "end": 1392 }
class ____ { /** Returns a {@link Weigher} that returns the given {@code constant} for every request. */ static Weigher<Object, Object> constantWeigher(int constant) { return new ConstantWeigher(constant); } /** Returns a {@link Weigher} that uses the integer key as the weight. */ static Weigher<Integer, Object> intKeyWeigher() { return new IntKeyWeigher(); } /** Returns a {@link Weigher} that uses the integer value as the weight. */ static Weigher<Object, Integer> intValueWeigher() { return new IntValueWeigher(); } static final
TestingWeighers
java
quarkusio__quarkus
extensions/smallrye-reactive-messaging/runtime/src/main/java/io/quarkus/smallrye/reactivemessaging/runtime/ConnectorContextPropagationDecorator.java
{ "start": 2638, "end": 3222 }
class ____<T> extends MultiOperatorProcessor<T, T> { private final Executor tcExecutor; public ContextPropagationProcessor(MultiSubscriber<? super T> downstream, ThreadContext tc) { super(downstream); this.tcExecutor = tc.currentContextExecutor(); } @Override public void onItem(T item) { // Even though the executor is called, this is a synchronous call tcExecutor.execute(() -> super.onItem(item)); } } } }
ContextPropagationProcessor
java
spring-projects__spring-framework
spring-webmvc/src/test/java/org/springframework/web/servlet/handler/CorsAbstractHandlerMappingTests.java
{ "start": 8344, "end": 9960 }
class ____ extends AbstractHandlerMapping { private @Nullable CorsConfiguration savedCorsConfig; TestHandlerMapping() { this(null); } TestHandlerMapping(@Nullable PathPatternParser parser) { setInterceptors(mock(HandlerInterceptor.class)); setApplicationContext(new StaticWebApplicationContext()); if (parser != null) { setPatternParser(parser); } } boolean hasSavedCorsConfig() { return this.savedCorsConfig != null; } CorsConfiguration getRequiredCorsConfig() { assertThat(this.savedCorsConfig).isNotNull(); return this.savedCorsConfig; } @Override protected Object getHandlerInternal(HttpServletRequest request) { String lookupPath = initLookupPath(request); if (lookupPath.equals("/cors")) { return new CorsAwareHandler(); } else if (lookupPath.equals("/chain")) { return new HandlerExecutionChain(new CorsAwareHandler()); } return new SimpleHandler(); } @Override protected String initLookupPath(HttpServletRequest request) { // At runtime this is done by the DispatcherServlet if (getPatternParser() != null) { RequestPath requestPath = ServletRequestPathUtils.parseAndCache(request); return requestPath.pathWithinApplication().value(); } return super.initLookupPath(request); } @Override protected HandlerExecutionChain getCorsHandlerExecutionChain( HttpServletRequest request, HandlerExecutionChain chain, @Nullable CorsConfiguration config) { this.savedCorsConfig = config; return super.getCorsHandlerExecutionChain(request, chain, config); } } private static
TestHandlerMapping
java
apache__camel
components/camel-azure/camel-azure-cosmosdb/src/generated/java/org/apache/camel/component/azure/cosmosdb/CosmosDbTypeConverterLoader.java
{ "start": 890, "end": 2232 }
class ____ implements TypeConverterLoader, CamelContextAware { private CamelContext camelContext; public CosmosDbTypeConverterLoader() { } @Override public void setCamelContext(CamelContext camelContext) { this.camelContext = camelContext; } @Override public CamelContext getCamelContext() { return camelContext; } @Override public void load(TypeConverterRegistry registry) throws TypeConverterLoaderException { registerConverters(registry); } private void registerConverters(TypeConverterRegistry registry) { addTypeConverter(registry, com.azure.cosmos.models.PartitionKey.class, java.lang.String.class, true, (type, exchange, value) -> { Object answer = org.apache.camel.component.azure.cosmosdb.CosmosDbTypeConverter.toPartitionKey((java.lang.String) value); if (true && answer == null) { answer = Void.class; } return answer; }); } private static void addTypeConverter(TypeConverterRegistry registry, Class<?> toType, Class<?> fromType, boolean allowNull, SimpleTypeConverter.ConversionMethod method) { registry.addTypeConverter(toType, fromType, new SimpleTypeConverter(allowNull, method)); } }
CosmosDbTypeConverterLoader
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/util/ClassUtils.java
{ "start": 62118, "end": 63105 }
interface ____; on an implementation class, * implementations of the {@code GroovyObject} methods will be marked as synthetic anyway). * Note that, despite being synthetic, bridge methods ({@link Method#isBridge()}) are considered * as user-level methods since they are eventually pointing to a user-declared generic method. * @param method the method to check * @return {@code true} if the method can be considered as user-declared; {@code false} otherwise */ public static boolean isUserLevelMethod(Method method) { Assert.notNull(method, "Method must not be null"); return (method.isBridge() || (!method.isSynthetic() && !isGroovyObjectMethod(method))); } private static boolean isGroovyObjectMethod(Method method) { return method.getDeclaringClass().getName().equals("groovy.lang.GroovyObject"); } /** * Determine whether the given method is overridable in the given target class. * @param method the method to check * @param targetClass the target
methods
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/orphan/one2one/OneToOneEagerNonOptionalOrphanRemovalTest.java
{ "start": 968, "end": 2934 }
class ____ { @AfterEach public void tearDown(SessionFactoryScope scope) { scope.getSessionFactory().getSchemaManager().truncate(); } @Test public void testOneToOneLazyNonOptionalOrphanRemoval(SessionFactoryScope scope) { // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Initialize the data scope.inTransaction( session -> { final PaintColor color = new PaintColor( 1, "Red" ); final Engine engine1 = new Engine( 1, 275 ); final Engine engine2 = new Engine( 2, 295 ); final Car car = new Car( 1, engine1, color ); session.persist( engine1 ); session.persist( engine2 ); session.persist( color ); session.persist( car ); } ); // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Test orphan removal for unidirectional relationship scope.inTransaction( session -> { final Car car = session.find( Car.class, 1 ); final Engine engine = session.find( Engine.class, 2 ); car.setEngine( engine ); session.merge( car ); } ); scope.inTransaction( session -> { final Car car = session.find( Car.class, 1 ); assertNotNull( car.getEngine() ); final Engine engine = session.find( Engine.class, 1 ); assertNull( engine ); } ); // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Test orphan removal for bidirectional relationship scope.inTransaction( session -> { final PaintColor color = new PaintColor( 2, "Blue" ); final Car car = session.find( Car.class, 1 ); car.setPaintColor( color ); session.persist( color ); session.merge( car ); } ); scope.inTransaction( session -> { final Car car = session.find( Car.class, 1 ); assertNotNull( car.getPaintColor() ); final PaintColor color = session.find( PaintColor.class, 1 ); assertNull( color ); } ); } @Entity(name = "Car") public static
OneToOneEagerNonOptionalOrphanRemovalTest
java
ReactiveX__RxJava
src/test/java/io/reactivex/rxjava3/flowable/FlowableConversionTest.java
{ "start": 4880, "end": 5172 }
class ____<T> implements FlowableConverter<T, CylonDetectorObservable<T>> { @Override public CylonDetectorObservable<T> apply(final Flowable<T> onSubscribe) { return CylonDetectorObservable.create(onSubscribe); } } public static
ConvertToCylonDetector
java
apache__flink
flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/nfa/sharedbuffer/SharedBufferAccessor.java
{ "start": 14183, "end": 14328 }
class ____ store the extraction state while extracting a sequence of values following * the versioned entry edges. */ private static
to
java
quarkusio__quarkus
independent-projects/qute/core/src/main/java/io/quarkus/qute/Engine.java
{ "start": 423, "end": 6754 }
interface ____ extends ErrorInitializer { /** * * @return a new builder instance */ static EngineBuilder builder() { return new EngineBuilder(); } /** * Parse the template contents. * <p> * Note that this method always returns a new {@link Template} instance. * * @param content * @return the template */ default Template parse(String content) { return parse(content, null, null); } /** * Parse the template contents with the specified variant. * <p> * Note that this method always returns a new {@link Template} instance. * * @param content * @param variant * @return the template */ default Template parse(String content, Variant variant) { return parse(content, variant, null); } /** * Parse the template contents with the specified variant and id. * <p> * Note that this method always returns a new {@link Template} instance. * * @param content * @param variant * @param id * @return the template */ Template parse(String content, Variant variant, String id); /** * * @return an immutable list of result mappers */ List<ResultMapper> getResultMappers(); /** * Maps the given result to a string value. If no result mappers are available the {@link Object#toString()} value is used. * * @param result Must not be null * @param expression Must not be null * @return the string value * @see Engine#getResultMappers() */ String mapResult(Object result, Expression expression); /** * A valid identifier is a sequence of non-whitespace characters. * * @param id * @param template * @return the previous value or null */ Template putTemplate(String id, Template template); /** * Obtain a template for the given identifier. A template may be registered using * {@link #putTemplate(String, Template)} or loaded by a template locator. * * @param id * @return the template or null * @see EngineBuilder#addLocator(TemplateLocator) */ Template getTemplate(String id); /** * Note that template locators are not used in this method. * * @param id * @return {@code true} if a template with the given identifier is loaded, {@code false} otherwise */ boolean isTemplateLoaded(String id); /** * Removes all templates from the cache. */ void clearTemplates(); /** * Removes the templates for which the mapping id matches the given predicate. * * @param test */ void removeTemplates(Predicate<String> test); /** * * @param name * @return the section helper factory for the giben name */ SectionHelperFactory<?> getSectionHelperFactory(String name); /** * * @return an immutable map of section helper factories */ Map<String, SectionHelperFactory<?>> getSectionHelperFactories(); /** * * @return an immutable list of value resolvers */ List<ValueResolver> getValueResolvers(); /** * * @return an immutable list of namespace resolvers */ List<NamespaceResolver> getNamespaceResolvers(); /** * * @return the evaluator used to evaluate expressions */ Evaluator getEvaluator(); /** * @return an immutable list of template instance initializers */ List<TemplateInstance.Initializer> getTemplateInstanceInitializers(); /** * The global rendering timeout in milliseconds. It is used if no {@code timeout} instance attribute is set. * * @return the global rendering timeout * @see TemplateInstance#TIMEOUT */ long getTimeout(); /** * * @return {@code true} if the timeout should also used for asynchronous rendering methods */ boolean useAsyncTimeout(); /** * Locates the template with the given id. * <p> * All locators registered via {@link EngineBuilder#addLocator(TemplateLocator)} are used. * * @param id * @return the template location for the given id, or an empty {@link Optional} if no template was found * @see TemplateLocator#locate(String) */ Optional<TemplateLocation> locate(String id); /** * @return {@code true} if the parser should remove standalone lines from the output, {@code false} otherwise */ boolean removeStandaloneLines(); /** * Returns the {@link TraceManager} responsible for managing trace listeners and * firing trace events during template rendering. * * @return the trace manager instance or {@code null} if tracing is disabled * @see EngineBuilder#enableTracing(boolean) */ TraceManager getTraceManager(); /** * Registers a new {@link TraceListener} to receive trace events. * <p> * The listener will be notified of template rendering and resolution events. * * @param listener the trace listener to add; must not be {@code null} */ default void addTraceListener(TraceListener listener) { TraceManager manager = getTraceManager(); if (manager == null) { throw new IllegalStateException("Tracing not enabled"); } manager.addTraceListener(listener); } /** * Unregisters a previously registered {@link TraceListener}. * <p> * After removal, the listener will no longer receive trace events. * * @param listener the trace listener to remove; must not be {@code null} */ default void removeTraceListener(TraceListener listener) { TraceManager manager = getTraceManager(); if (manager == null) { throw new IllegalStateException("Tracing not enabled"); } manager.removeTraceListener(listener); } /** * Initializes a new {@link EngineBuilder} instance from this engine. * <p> * The {@link EngineBuilder#iterationMetadataPrefix(String) is not set but if a * {@link io.quarkus.qute.LoopSectionHelper.Factory} is registered then the original prefix should be honored. * * @return a new builder instance initialized from this engine */ EngineBuilder newBuilder(); }
Engine
java
elastic__elasticsearch
x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianCentroidTests.java
{ "start": 560, "end": 1227 }
class ____ extends BaseAggregationTestCase<CartesianCentroidAggregationBuilder> { @Override protected Collection<Class<? extends Plugin>> getPlugins() { return List.of(LocalStateSpatialPlugin.class); } @Override protected CartesianCentroidAggregationBuilder createTestAggregatorBuilder() { CartesianCentroidAggregationBuilder factory = new CartesianCentroidAggregationBuilder(randomAlphaOfLengthBetween(1, 20)); String field = randomNumericField(); randomFieldOrScript(factory, field); if (randomBoolean()) { factory.missing("0,0"); } return factory; } }
CartesianCentroidTests
java
quarkusio__quarkus
integration-tests/main/src/test/java/io/quarkus/it/main/ParameterizedSimpleTestCase.java
{ "start": 426, "end": 854 }
class ____ { @ParameterizedTest @NullSource public void nullArgument(String arg) { assertNull(arg); } @ParameterizedTest @EmptySource public void emptyArgument(String arg) { assertEquals("", arg); } @ParameterizedTest @ValueSource(strings = { "foobar" }) public void nonemptyArgument(String arg) { assertEquals("foobar", arg); } }
ParameterizedSimpleTestCase
java
spring-projects__spring-boot
core/spring-boot/src/main/java/org/springframework/boot/SpringApplication.java
{ "start": 69164, "end": 70331 }
class ____ implements OrderSourceProvider { private final ConfigurableBeanFactory beanFactory; private final Map<?, String> instancesToBeanNames; FactoryAwareOrderSourceProvider(ConfigurableBeanFactory beanFactory, Map<?, String> instancesToBeanNames) { this.beanFactory = beanFactory; this.instancesToBeanNames = instancesToBeanNames; } @Override public @Nullable Object getOrderSource(Object obj) { String beanName = this.instancesToBeanNames.get(obj); return (beanName != null) ? getOrderSource(beanName, obj.getClass()) : null; } private @Nullable Object getOrderSource(String beanName, Class<?> instanceType) { try { RootBeanDefinition beanDefinition = (RootBeanDefinition) this.beanFactory .getMergedBeanDefinition(beanName); Method factoryMethod = beanDefinition.getResolvedFactoryMethod(); Class<?> targetType = beanDefinition.getTargetType(); targetType = (targetType != instanceType) ? targetType : null; return Stream.of(factoryMethod, targetType).filter(Objects::nonNull).toArray(); } catch (NoSuchBeanDefinitionException ex) { return null; } } } }
FactoryAwareOrderSourceProvider
java
apache__flink
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/util/collections/OptimizableHashSet.java
{ "start": 1415, "end": 5121 }
class ____ { /** The initial default size of a hash table. */ public static final int DEFAULT_INITIAL_SIZE = 16; /** The default load factor of a hash table. */ public static final float DEFAULT_LOAD_FACTOR = 0.75f; /** * Decide whether to convert to dense mode if it does not require more memory or could fit * within L1 cache. */ public static final int DENSE_THRESHOLD = 8192; /** The acceptable load factor. */ protected final float f; /** The mask for wrapping a position counter. */ protected int mask; /** The current table size. */ protected int n; /** Threshold after which we rehash. It must be the table size times {@link #f}. */ protected int maxFill; /** Is this set has a null key. */ protected boolean containsNull; /** Is this set has a zero key. */ protected boolean containsZero; /** Number of entries in the set. */ protected int size; /** Is now dense mode. */ protected boolean isDense = false; /** Used array for dense mode. */ protected boolean[] used; public OptimizableHashSet(final int expected, final float f) { checkArgument(f > 0 && f <= 1); checkArgument(expected >= 0); this.f = f; this.n = OptimizableHashSet.arraySize(expected, f); this.mask = this.n - 1; this.maxFill = OptimizableHashSet.maxFill(this.n, f); } /** Add a null key. */ public void addNull() { this.containsNull = true; } /** Is there a null key. */ public boolean containsNull() { return containsNull; } protected int realSize() { return this.containsZero ? this.size - 1 : this.size; } /** Decide whether to convert to dense mode. */ public abstract void optimize(); /** * Return the least power of two greater than or equal to the specified value. * * <p>Note that this function will return 1 when the argument is 0. * * @param x a long integer smaller than or equal to 2<sup>62</sup>. * @return the least power of two greater than or equal to the specified value. */ public static long nextPowerOfTwo(long x) { if (x == 0L) { return 1L; } else { --x; x |= x >> 1; x |= x >> 2; x |= x >> 4; x |= x >> 8; x |= x >> 16; return (x | x >> 32) + 1L; } } /** * Returns the maximum number of entries that can be filled before rehashing. * * @param n the size of the backing array. * @param f the load factor. * @return the maximum number of entries before rehashing. */ public static int maxFill(int n, float f) { return Math.min((int) Math.ceil((double) ((float) n * f)), n - 1); } /** * Returns the least power of two smaller than or equal to 2<sup>30</sup> and larger than or * equal to <code>Math.ceil( expected / f )</code>. * * @param expected the expected number of elements in a hash table. * @param f the load factor. * @return the minimum possible size for a backing array. * @throws IllegalArgumentException if the necessary size is larger than 2<sup>30</sup>. */ public static int arraySize(int expected, float f) { long s = Math.max(2L, nextPowerOfTwo((long) Math.ceil((double) ((float) expected / f)))); if (s > (Integer.MAX_VALUE / 2 + 1)) { throw new IllegalArgumentException( "Too large (" + expected + " expected elements with load factor " + f + ")"); } else { return (int) s; } } }
OptimizableHashSet
java
spring-projects__spring-framework
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/RequestResponseBodyMethodProcessorTests.java
{ "start": 55902, "end": 56876 }
class ____ implements RequestBodyAdvice { @Override public boolean supports(MethodParameter methodParameter, Type targetType, Class<? extends HttpMessageConverter<?>> converterType) { return StringHttpMessageConverter.class.equals(converterType); } @Override public HttpInputMessage beforeBodyRead(HttpInputMessage inputMessage, MethodParameter parameter, Type targetType, Class<? extends HttpMessageConverter<?>> converterType) { return inputMessage; } @Override public Object afterBodyRead(Object body, HttpInputMessage inputMessage, MethodParameter parameter, Type targetType, Class<? extends HttpMessageConverter<?>> converterType) { return body; } @Override public Object handleEmptyBody(@Nullable Object body, HttpInputMessage inputMessage, MethodParameter parameter, Type targetType, Class<? extends HttpMessageConverter<?>> converterType) { return "default value for empty body"; } }
EmptyRequestBodyAdvice
java
junit-team__junit5
platform-tests/src/test/java/org/junit/platform/commons/util/ToStringBuilderTests.java
{ "start": 725, "end": 4407 }
class ____ { @SuppressWarnings("DataFlowIssue") @Test void withNullObject() { assertPreconditionViolationFor(() -> new ToStringBuilder((Object) null)); } @SuppressWarnings("DataFlowIssue") @Test void withNullClass() { assertPreconditionViolationFor(() -> new ToStringBuilder((Class<?>) null)); } @SuppressWarnings("DataFlowIssue") @Test void appendWithIllegalName() { var builder = new ToStringBuilder(""); assertPreconditionViolationFor(() -> builder.append(null, "")); assertPreconditionViolationFor(() -> builder.append("", "")); assertPreconditionViolationFor(() -> builder.append(" ", "")); } @Test void withZeroFields() { assertEquals("RoleModel []", new ToStringBuilder(new RoleModel()).toString()); assertEquals("RoleModel []", new ToStringBuilder(RoleModel.class).toString()); } @Test void withOneField() { assertEquals("RoleModel [name = 'Dilbert']", new ToStringBuilder(new RoleModel()).append("name", "Dilbert").toString()); } @Test void withNullField() { assertEquals("RoleModel [name = null]", new ToStringBuilder(new RoleModel()).append("name", null).toString()); } @Test void withTwoFields() { assertEquals("RoleModel [name = 'Dilbert', age = 42]", new ToStringBuilder(new RoleModel()).append("name", "Dilbert").append("age", 42).toString()); } @Test void withIntegerArrayField() { assertEquals("RoleModel [magic numbers = [1, 42, 99]]", new ToStringBuilder(new RoleModel()).append("magic numbers", new Integer[] { 1, 42, 99 }).toString()); } @Test void withIntArrayField() { assertEquals("RoleModel [magic numbers = [1, 42, 23]]", new ToStringBuilder(new RoleModel()).append("magic numbers", new int[] { 1, 42, 23 }).toString()); } @Test void withCharArrayField() { assertEquals("RoleModel [magic characters = [a, b]]", new ToStringBuilder(new RoleModel()).append("magic characters", new char[] { 'a', 'b' }).toString()); } @Test void withPrimitiveBooleanArrayField() { assertEquals("RoleModel [booleans = [true, false, true]]", new ToStringBuilder(new RoleModel()).append("booleans", new boolean[] { true, false, true }).toString()); } @Test void withShortArrayField() { assertEquals("RoleModel [values = [23, 42]]", new ToStringBuilder(new RoleModel()).append("values", new short[] { 23, 42 }).toString()); } @Test void withByteArrayField() { assertEquals("RoleModel [values = [23, 42]]", new ToStringBuilder(new RoleModel()).append("values", new byte[] { 23, 42 }).toString()); } @Test void withPrimitiveLongArrayField() { assertEquals("RoleModel [values = [23, 42]]", new ToStringBuilder(new RoleModel()).append("values", new long[] { 23, 42 }).toString()); } @Test void withPrimitiveFloatArrayField() { assertEquals("RoleModel [values = [23.45, 17.13]]", new ToStringBuilder(new RoleModel()).append("values", new float[] { 23.45f, 17.13f }).toString()); } @Test void withPrimitiveDoubleArrayField() { assertEquals("RoleModel [values = [23.45, 17.13]]", new ToStringBuilder(new RoleModel()).append("values", new double[] { 23.45d, 17.13d }).toString()); } @Test @SuppressWarnings("serial") void withMapField() { // @formatter:off Map<String,Object> map = new LinkedHashMap<>() {{ put("foo", 42); put("bar", "enigma"); }}; // @formatter:on assertEquals("RoleModel [mystery map = {foo=42, bar=enigma}]", new ToStringBuilder(new RoleModel()).append("mystery map", map).toString()); } @Test void withDemoImplementation() { var roleModel = new RoleModel("Dilbert", 42); assertEquals("RoleModel [name = 'Dilbert', age = 42]", roleModel.toString()); } @NullUnmarked static
ToStringBuilderTests
java
elastic__elasticsearch
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/StringTemplatePlugin.java
{ "start": 826, "end": 1623 }
class ____ implements Plugin<Project> { @Override public void apply(Project project) { File outputDir = project.file("src/main/generated-src/"); TaskProvider<StringTemplateTask> generateSourceTask = project.getTasks().register("stringTemplates", StringTemplateTask.class); generateSourceTask.configure(stringTemplateTask -> stringTemplateTask.getOutputFolder().set(outputDir)); project.getPlugins().withType(JavaPlugin.class, javaPlugin -> { SourceSetContainer sourceSets = project.getExtensions().getByType(JavaPluginExtension.class).getSourceSets(); SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME); mainSourceSet.getJava().srcDir(generateSourceTask); }); } }
StringTemplatePlugin