language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/security/ssl/SslFactoryTest.java | {
"start": 3011,
"end": 32374
} | class ____ {
private final String tlsProtocol;
public SslFactoryTest(String tlsProtocol) {
this.tlsProtocol = tlsProtocol;
}
@Test
public void testSslFactoryConfiguration() throws Exception {
File trustStoreFile = TestUtils.tempFile("truststore", ".jks");
Map<String, Object> serverSslConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile)
.build();
try (SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER, null, true)) {
sslFactory.configure(serverSslConfig);
//host and port are hints
SSLEngine engine = sslFactory.createSslEngine("localhost", 0);
assertNotNull(engine);
assertEquals(Set.of(tlsProtocol), Set.of(engine.getEnabledProtocols()));
assertFalse(engine.getUseClientMode());
}
}
@Test
public void testSslFactoryConfigWithManyKeyStoreEntries() throws Exception {
//generate server configs for keystore with multiple certificate chain
Map<String, Object> serverSslConfig = TestSslUtils.generateConfigsWithCertificateChains(tlsProtocol);
try (SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER, null, true)) {
sslFactory.configure(serverSslConfig);
SSLEngine engine = sslFactory.createSslEngine("localhost", 0);
assertNotNull(engine);
assertEquals(Set.of(tlsProtocol), Set.of(engine.getEnabledProtocols()));
assertFalse(engine.getUseClientMode());
}
}
@Test
public void testSslFactoryWithCustomKeyManagerConfiguration() {
TestProviderCreator testProviderCreator = new TestProviderCreator();
Map<String, Object> serverSslConfig = TestSslUtils.createSslConfig(
TestKeyManagerFactory.ALGORITHM,
TestTrustManagerFactory.ALGORITHM,
tlsProtocol
);
serverSslConfig.put(SecurityConfig.SECURITY_PROVIDERS_CONFIG, testProviderCreator.getClass().getName());
SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER);
sslFactory.configure(serverSslConfig);
assertNotNull(sslFactory.sslEngineFactory(), "SslEngineFactory not created");
Security.removeProvider(testProviderCreator.getProvider().getName());
}
@Test
public void testSslFactoryWithoutProviderClassConfiguration() {
// An exception is thrown as the algorithm is not registered through a provider
Map<String, Object> serverSslConfig = TestSslUtils.createSslConfig(
TestKeyManagerFactory.ALGORITHM,
TestTrustManagerFactory.ALGORITHM,
tlsProtocol
);
SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER);
assertThrows(KafkaException.class, () -> sslFactory.configure(serverSslConfig));
}
@Test
public void testSslFactoryWithIncorrectProviderClassConfiguration() {
// An exception is thrown as the algorithm is not registered through a provider
Map<String, Object> serverSslConfig = TestSslUtils.createSslConfig(
TestKeyManagerFactory.ALGORITHM,
TestTrustManagerFactory.ALGORITHM,
tlsProtocol
);
serverSslConfig.put(SecurityConfig.SECURITY_PROVIDERS_CONFIG,
"com.fake.ProviderClass1,com.fake.ProviderClass2");
SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER);
assertThrows(KafkaException.class, () -> sslFactory.configure(serverSslConfig));
}
@Test
public void testSslFactoryWithoutPasswordConfiguration() throws Exception {
File trustStoreFile = TestUtils.tempFile("truststore", ".jks");
Map<String, Object> serverSslConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile)
.build();
// unset the password
serverSslConfig.remove(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG);
SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER);
try {
sslFactory.configure(serverSslConfig);
} catch (Exception e) {
fail("An exception was thrown when configuring the truststore without a password: " + e);
}
}
@Test
public void testClientMode() throws Exception {
File trustStoreFile = TestUtils.tempFile("truststore", ".jks");
Map<String, Object> clientSslConfig = sslConfigsBuilder(ConnectionMode.CLIENT)
.createNewTrustStore(trustStoreFile)
.useClientCert(false)
.build();
SslFactory sslFactory = new SslFactory(ConnectionMode.CLIENT);
sslFactory.configure(clientSslConfig);
//host and port are hints
SSLEngine engine = sslFactory.createSslEngine("localhost", 0);
assertTrue(engine.getUseClientMode());
}
@Test
public void staleSslEngineFactoryShouldBeClosed() throws IOException, GeneralSecurityException {
File trustStoreFile = TestUtils.tempFile("truststore", ".jks");
Map<String, Object> clientSslConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile)
.useClientCert(false)
.build();
clientSslConfig.put(SslConfigs.SSL_ENGINE_FACTORY_CLASS_CONFIG, TestSslUtils.TestSslEngineFactory.class);
SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER);
sslFactory.configure(clientSslConfig);
TestSslUtils.TestSslEngineFactory sslEngineFactory = (TestSslUtils.TestSslEngineFactory) sslFactory.sslEngineFactory();
assertNotNull(sslEngineFactory);
assertFalse(sslEngineFactory.closed);
trustStoreFile = TestUtils.tempFile("truststore", ".jks");
clientSslConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile)
.build();
clientSslConfig.put(SslConfigs.SSL_ENGINE_FACTORY_CLASS_CONFIG, TestSslUtils.TestSslEngineFactory.class);
sslFactory.reconfigure(clientSslConfig);
TestSslUtils.TestSslEngineFactory newSslEngineFactory = (TestSslUtils.TestSslEngineFactory) sslFactory.sslEngineFactory();
assertNotEquals(sslEngineFactory, newSslEngineFactory);
// the older one should be closed
assertTrue(sslEngineFactory.closed);
}
@Test
public void testReconfiguration() throws Exception {
File trustStoreFile = TestUtils.tempFile("truststore", ".jks");
Map<String, Object> sslConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile)
.build();
SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER);
// Verify that we'll throw an exception if validateReconfiguration is called before sslFactory is configured
Exception e = assertThrows(ConfigException.class, () -> sslFactory.validateReconfiguration(sslConfig));
assertEquals("SSL reconfiguration failed due to java.lang.IllegalStateException: SslFactory has not been configured.", e.getMessage());
sslFactory.configure(sslConfig);
SslEngineFactory sslEngineFactory = sslFactory.sslEngineFactory();
assertNotNull(sslEngineFactory, "SslEngineFactory not created");
// Verify that SslEngineFactory is not recreated on reconfigure() if config and
// file are not changed
sslFactory.reconfigure(sslConfig);
assertSame(sslEngineFactory, sslFactory.sslEngineFactory(), "SslEngineFactory recreated unnecessarily");
// Verify that the SslEngineFactory is recreated on reconfigure() if config is changed
trustStoreFile = TestUtils.tempFile("truststore", ".jks");
Map<String, Object> newSslConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile)
.build();
sslFactory.reconfigure(newSslConfig);
assertNotSame(sslEngineFactory, sslFactory.sslEngineFactory(), "SslEngineFactory not recreated");
sslEngineFactory = sslFactory.sslEngineFactory();
// Verify that builder is recreated on reconfigure() if config is not changed, but truststore file was modified
trustStoreFile.setLastModified(System.currentTimeMillis() + 10000);
sslFactory.reconfigure(newSslConfig);
assertNotSame(sslEngineFactory, sslFactory.sslEngineFactory(), "SslEngineFactory not recreated");
sslEngineFactory = sslFactory.sslEngineFactory();
// Verify that builder is recreated on reconfigure() if config is not changed, but keystore file was modified
File keyStoreFile = new File((String) newSslConfig.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG));
keyStoreFile.setLastModified(System.currentTimeMillis() + 10000);
sslFactory.reconfigure(newSslConfig);
assertNotSame(sslEngineFactory, sslFactory.sslEngineFactory(), "SslEngineFactory not recreated");
sslEngineFactory = sslFactory.sslEngineFactory();
// Verify that builder is recreated after validation on reconfigure() if config is not changed, but keystore file was modified
keyStoreFile.setLastModified(System.currentTimeMillis() + 15000);
sslFactory.validateReconfiguration(newSslConfig);
sslFactory.reconfigure(newSslConfig);
assertNotSame(sslEngineFactory, sslFactory.sslEngineFactory(), "SslEngineFactory not recreated");
sslEngineFactory = sslFactory.sslEngineFactory();
// Verify that the builder is not recreated if modification time cannot be determined
keyStoreFile.setLastModified(System.currentTimeMillis() + 20000);
Files.delete(keyStoreFile.toPath());
sslFactory.reconfigure(newSslConfig);
assertSame(sslEngineFactory, sslFactory.sslEngineFactory(), "SslEngineFactory recreated unnecessarily");
}
@Test
public void testReconfigurationWithoutTruststore() throws Exception {
File trustStoreFile = TestUtils.tempFile("truststore", ".jks");
Map<String, Object> sslConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile)
.build();
sslConfig.remove(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG);
sslConfig.remove(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG);
sslConfig.remove(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG);
SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER);
sslFactory.configure(sslConfig);
SSLContext sslContext = ((DefaultSslEngineFactory) sslFactory.sslEngineFactory()).sslContext();
assertNotNull(sslContext, "SSL context not created");
assertSame(sslContext, ((DefaultSslEngineFactory) sslFactory.sslEngineFactory()).sslContext(),
"SSL context recreated unnecessarily");
assertFalse(sslFactory.createSslEngine("localhost", 0).getUseClientMode());
Map<String, Object> sslConfig2 = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile)
.build();
try {
sslFactory.validateReconfiguration(sslConfig2);
fail("Truststore configured dynamically for listener without previous truststore");
} catch (ConfigException e) {
// Expected exception
}
}
@Test
public void testReconfigurationWithoutKeystore() throws Exception {
File trustStoreFile = TestUtils.tempFile("truststore", ".jks");
Map<String, Object> sslConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile)
.build();
sslConfig.remove(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG);
sslConfig.remove(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG);
sslConfig.remove(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG);
SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER);
sslFactory.configure(sslConfig);
SSLContext sslContext = ((DefaultSslEngineFactory) sslFactory.sslEngineFactory()).sslContext();
assertNotNull(sslContext, "SSL context not created");
assertSame(sslContext, ((DefaultSslEngineFactory) sslFactory.sslEngineFactory()).sslContext(),
"SSL context recreated unnecessarily");
assertFalse(sslFactory.createSslEngine("localhost", 0).getUseClientMode());
File newTrustStoreFile = TestUtils.tempFile("truststore", ".jks");
sslConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(newTrustStoreFile)
.build();
sslConfig.remove(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG);
sslConfig.remove(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG);
sslConfig.remove(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG);
sslFactory.reconfigure(sslConfig);
assertNotSame(sslContext, ((DefaultSslEngineFactory) sslFactory.sslEngineFactory()).sslContext(),
"SSL context not recreated");
sslConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(newTrustStoreFile)
.build();
try {
sslFactory.validateReconfiguration(sslConfig);
fail("Keystore configured dynamically for listener without previous keystore");
} catch (ConfigException e) {
// Expected exception
}
}
@Test
public void testPemReconfiguration() throws Exception {
Properties props = new Properties();
props.putAll(sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(null)
.usePem(true)
.build());
TestSecurityConfig sslConfig = new TestSecurityConfig(props);
SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER);
sslFactory.configure(sslConfig.values());
SslEngineFactory sslEngineFactory = sslFactory.sslEngineFactory();
assertNotNull(sslEngineFactory, "SslEngineFactory not created");
props.put("some.config", "some.value");
sslConfig = new TestSecurityConfig(props);
sslFactory.reconfigure(sslConfig.values());
assertSame(sslEngineFactory, sslFactory.sslEngineFactory(), "SslEngineFactory recreated unnecessarily");
props.put(SslConfigs.SSL_KEYSTORE_KEY_CONFIG,
new Password(((Password) props.get(SslConfigs.SSL_KEYSTORE_KEY_CONFIG)).value() + " "));
sslConfig = new TestSecurityConfig(props);
sslFactory.reconfigure(sslConfig.values());
assertNotSame(sslEngineFactory, sslFactory.sslEngineFactory(), "SslEngineFactory not recreated");
sslEngineFactory = sslFactory.sslEngineFactory();
props.put(SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG,
new Password(((Password) props.get(SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG)).value() + " "));
sslConfig = new TestSecurityConfig(props);
sslFactory.reconfigure(sslConfig.values());
assertNotSame(sslEngineFactory, sslFactory.sslEngineFactory(), "SslEngineFactory not recreated");
sslEngineFactory = sslFactory.sslEngineFactory();
props.put(SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG,
new Password(((Password) props.get(SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG)).value() + " "));
sslConfig = new TestSecurityConfig(props);
sslFactory.reconfigure(sslConfig.values());
assertNotSame(sslEngineFactory, sslFactory.sslEngineFactory(), "SslEngineFactory not recreated");
}
@Test
public void testKeyStoreTrustStoreValidation() throws Exception {
File trustStoreFile = TestUtils.tempFile("truststore", ".jks");
Map<String, Object> serverSslConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile)
.build();
SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER);
sslFactory.configure(serverSslConfig);
assertNotNull(sslFactory.sslEngineFactory(), "SslEngineFactory not created");
}
@Test
public void testUntrustedKeyStoreValidationFails() throws Exception {
File trustStoreFile1 = TestUtils.tempFile("truststore1", ".jks");
File trustStoreFile2 = TestUtils.tempFile("truststore2", ".jks");
Map<String, Object> sslConfig1 = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile1)
.build();
Map<String, Object> sslConfig2 = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile2)
.build();
SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER, null, true);
for (String key : Arrays.asList(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,
SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG,
SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG,
SslConfigs.SSL_TRUSTMANAGER_ALGORITHM_CONFIG)) {
sslConfig1.put(key, sslConfig2.get(key));
}
try {
sslFactory.configure(sslConfig1);
fail("Validation did not fail with untrusted truststore");
} catch (ConfigException e) {
// Expected exception
}
}
@Test
public void testKeystoreVerifiableUsingTruststore() throws Exception {
verifyKeystoreVerifiableUsingTruststore(false);
}
@Test
public void testPemKeystoreVerifiableUsingTruststore() throws Exception {
verifyKeystoreVerifiableUsingTruststore(true);
}
private void verifyKeystoreVerifiableUsingTruststore(boolean usePem) throws Exception {
File trustStoreFile1 = usePem ? null : TestUtils.tempFile("truststore1", ".jks");
Map<String, Object> sslConfig1 = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile1)
.usePem(usePem)
.build();
SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER, null, true);
sslFactory.configure(sslConfig1);
File trustStoreFile2 = usePem ? null : TestUtils.tempFile("truststore2", ".jks");
Map<String, Object> sslConfig2 = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile2)
.usePem(usePem)
.build();
// Verify that `createSSLContext` fails even if certificate from new keystore is trusted by
// the new truststore, if certificate is not trusted by the existing truststore on the `SslFactory`.
// This is to prevent both keystores and truststores to be modified simultaneously on an inter-broker
// listener to stores that may not work with other brokers where the update hasn't yet been performed.
try {
sslFactory.validateReconfiguration(sslConfig2);
fail("ValidateReconfiguration did not fail as expected");
} catch (ConfigException e) {
// Expected exception
}
}
@Test
public void testCertificateEntriesValidation() throws Exception {
verifyCertificateEntriesValidation(false);
}
@Test
public void testPemCertificateEntriesValidation() throws Exception {
verifyCertificateEntriesValidation(true);
}
private void verifyCertificateEntriesValidation(boolean usePem) throws Exception {
File trustStoreFile = usePem ? null : TestUtils.tempFile("truststore", ".jks");
Map<String, Object> serverSslConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile)
.usePem(usePem)
.build();
File newTrustStoreFile = usePem ? null : TestUtils.tempFile("truststore", ".jks");
Map<String, Object> newCnConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(newTrustStoreFile)
.cn("Another CN")
.usePem(usePem)
.build();
KeyStore ks1 = sslKeyStore(serverSslConfig);
KeyStore ks2 = sslKeyStore(serverSslConfig);
assertEquals(SslFactory.CertificateEntries.create(ks1), SslFactory.CertificateEntries.create(ks2));
// Use different alias name, validation should succeed
ks2.setCertificateEntry("another", ks1.getCertificate("localhost"));
assertEquals(SslFactory.CertificateEntries.create(ks1), SslFactory.CertificateEntries.create(ks2));
KeyStore ks3 = sslKeyStore(newCnConfig);
assertNotEquals(SslFactory.CertificateEntries.create(ks1), SslFactory.CertificateEntries.create(ks3));
}
/**
* Tests client side ssl.engine.factory configuration is used when specified
*/
@Test
public void testClientSpecifiedSslEngineFactoryUsed() throws Exception {
File trustStoreFile = TestUtils.tempFile("truststore", ".jks");
Map<String, Object> clientSslConfig = sslConfigsBuilder(ConnectionMode.CLIENT)
.createNewTrustStore(trustStoreFile)
.useClientCert(false)
.build();
clientSslConfig.put(SslConfigs.SSL_ENGINE_FACTORY_CLASS_CONFIG, TestSslUtils.TestSslEngineFactory.class);
SslFactory sslFactory = new SslFactory(ConnectionMode.CLIENT);
sslFactory.configure(clientSslConfig);
assertInstanceOf(TestSslUtils.TestSslEngineFactory.class, sslFactory.sslEngineFactory(),
"SslEngineFactory must be of expected type");
}
@Test
public void testEngineFactoryClosed() throws Exception {
File trustStoreFile = TestUtils.tempFile("truststore", ".jks");
Map<String, Object> clientSslConfig = sslConfigsBuilder(ConnectionMode.CLIENT)
.createNewTrustStore(trustStoreFile)
.useClientCert(false)
.build();
clientSslConfig.put(SslConfigs.SSL_ENGINE_FACTORY_CLASS_CONFIG, TestSslUtils.TestSslEngineFactory.class);
SslFactory sslFactory = new SslFactory(ConnectionMode.CLIENT);
sslFactory.configure(clientSslConfig);
TestSslUtils.TestSslEngineFactory engine = (TestSslUtils.TestSslEngineFactory) sslFactory.sslEngineFactory();
assertFalse(engine.closed);
sslFactory.close();
assertTrue(engine.closed);
}
/**
* Tests server side ssl.engine.factory configuration is used when specified
*/
@Test
public void testServerSpecifiedSslEngineFactoryUsed() throws Exception {
File trustStoreFile = TestUtils.tempFile("truststore", ".jks");
Map<String, Object> serverSslConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(trustStoreFile)
.useClientCert(false)
.build();
serverSslConfig.put(SslConfigs.SSL_ENGINE_FACTORY_CLASS_CONFIG, TestSslUtils.TestSslEngineFactory.class);
SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER);
sslFactory.configure(serverSslConfig);
assertInstanceOf(TestSslUtils.TestSslEngineFactory.class, sslFactory.sslEngineFactory(),
"SslEngineFactory must be of expected type");
}
/**
* Tests invalid ssl.engine.factory configuration
*/
@Test
public void testInvalidSslEngineFactory() throws Exception {
File trustStoreFile = TestUtils.tempFile("truststore", ".jks");
Map<String, Object> clientSslConfig = sslConfigsBuilder(ConnectionMode.CLIENT)
.createNewTrustStore(trustStoreFile)
.useClientCert(false)
.build();
clientSslConfig.put(SslConfigs.SSL_ENGINE_FACTORY_CLASS_CONFIG, String.class);
SslFactory sslFactory = new SslFactory(ConnectionMode.CLIENT);
assertThrows(ClassCastException.class, () -> sslFactory.configure(clientSslConfig));
}
@Test
public void testUsedConfigs() throws IOException, GeneralSecurityException {
Map<String, Object> serverSslConfig = sslConfigsBuilder(ConnectionMode.SERVER)
.createNewTrustStore(TestUtils.tempFile("truststore", ".jks"))
.useClientCert(false)
.build();
serverSslConfig.put(SslConfigs.SSL_ENGINE_FACTORY_CLASS_CONFIG, TestSslUtils.TestSslEngineFactory.class);
TestSecurityConfig securityConfig = new TestSecurityConfig(serverSslConfig);
SslFactory sslFactory = new SslFactory(ConnectionMode.SERVER);
sslFactory.configure(securityConfig.values());
assertFalse(securityConfig.unused().contains(SslConfigs.SSL_ENGINE_FACTORY_CLASS_CONFIG));
}
@Test
public void testDynamicUpdateCompatibility() throws Exception {
KeyPair keyPair = TestSslUtils.generateKeyPair("RSA");
KeyStore ks = createKeyStore(keyPair, "*.example.com", "Kafka", true, "localhost", "*.example.com");
ensureCompatible(ks, ks, false, false);
ensureCompatible(ks, createKeyStore(keyPair, "*.example.com", "Kafka", true, "localhost", "*.example.com"), false, false);
ensureCompatible(ks, createKeyStore(keyPair, " *.example.com", " Kafka ", true, "localhost", "*.example.com"), false, false);
ensureCompatible(ks, createKeyStore(keyPair, "*.example.COM", "Kafka", true, "localhost", "*.example.com"), false, false);
ensureCompatible(ks, createKeyStore(keyPair, "*.EXAMPLE.COM", "KAFKA", true, "localhost", "*.example.com"), false, false);
ensureCompatible(ks, createKeyStore(keyPair, "*.EXAMPLE.COM", "Kafka", true, "*.example.com"), false, false);
ensureCompatible(ks, createKeyStore(keyPair, "*.EXAMPLE.COM", "Kafka", true, "localhost"), false, false);
ensureCompatible(ks, createKeyStore(keyPair, "*.example.com", "Kafka", false, "localhost", "*.example.com"), false, false);
ensureCompatible(ks, createKeyStore(keyPair, "*.example.COM", "Kafka", false, "localhost", "*.example.com"), false, false);
ensureCompatible(ks, createKeyStore(keyPair, "*.EXAMPLE.COM", "KAFKA", false, "localhost", "*.example.com"), false, false);
ensureCompatible(ks, createKeyStore(keyPair, "*.EXAMPLE.COM", "Kafka", false, "*.example.com"), false, false);
ensureCompatible(ks, createKeyStore(keyPair, "*.EXAMPLE.COM", "Kafka", false, "localhost"), false, false);
assertThrows(ConfigException.class, () ->
ensureCompatible(ks, createKeyStore(keyPair, " *.example.com", " Kafka ", false, "localhost", "*.example.com"), false, false));
assertThrows(ConfigException.class, () ->
ensureCompatible(ks, createKeyStore(keyPair, "*.another.example.com", "Kafka", true, "*.example.com"), false, false));
assertThrows(ConfigException.class, () ->
ensureCompatible(ks, createKeyStore(keyPair, "*.EXAMPLE.COM", "Kafka", true, "*.another.example.com"), false, false));
// Test disabling of validation
ensureCompatible(ks, createKeyStore(keyPair, " *.another.example.com", "Kafka ", true, "localhost", "*.another.example.com"), true, true);
ensureCompatible(ks, createKeyStore(keyPair, "*.example.com", "Kafka", true, "localhost", "*.another.example.com"), false, true);
assertThrows(ConfigException.class, () -> ensureCompatible(ks, createKeyStore(keyPair, "*.example.com", "Kafka", true, "localhost", "*.another.example.com"), true, false));
ensureCompatible(ks, createKeyStore(keyPair, "*.another.example.com", "Kafka", true, "localhost", "*.example.com"), true, false);
assertThrows(ConfigException.class, () -> ensureCompatible(ks, createKeyStore(keyPair, "*.another.example.com", "Kafka", true, "localhost", "*.example.com"), false, true));
}
private KeyStore createKeyStore(KeyPair keyPair, String commonName, String org, boolean utf8, String... dnsNames) throws Exception {
X509Certificate cert = new TestSslUtils.CertificateBuilder().sanDnsNames(dnsNames)
.generate(commonName, org, utf8, keyPair);
KeyStore ks = KeyStore.getInstance("PKCS12");
ks.load(null, null);
ks.setKeyEntry("kafka", keyPair.getPrivate(), null, new X509Certificate[] {cert});
return ks;
}
private KeyStore sslKeyStore(Map<String, Object> sslConfig) {
SecurityStore store;
if (sslConfig.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG) != null) {
store = new FileBasedStore(
(String) sslConfig.get(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG),
(String) sslConfig.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG),
(Password) sslConfig.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG),
(Password) sslConfig.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG),
true
);
} else {
store = new PemStore(
(Password) sslConfig.get(SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG),
(Password) sslConfig.get(SslConfigs.SSL_KEYSTORE_KEY_CONFIG),
(Password) sslConfig.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG)
);
}
return store.get();
}
private TestSslUtils.SslConfigsBuilder sslConfigsBuilder(ConnectionMode connectionMode) {
return new TestSslUtils.SslConfigsBuilder(connectionMode).tlsProtocol(tlsProtocol);
}
}
| SslFactoryTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java | {
"start": 68466,
"end": 69150
} | class ____ extends DataInputBuffer {
private byte[] buffer;
private int start;
private int length;
public void reset(byte[] buffer, int start, int length) {
this.buffer = buffer;
this.start = start;
this.length = length;
if (start + length > bufvoid) {
this.buffer = new byte[this.length];
final int taillen = bufvoid - start;
System.arraycopy(buffer, start, this.buffer, 0, taillen);
System.arraycopy(buffer, 0, this.buffer, taillen, length-taillen);
this.start = 0;
}
super.reset(this.buffer, this.start, this.length);
}
}
protected | InMemValBytes |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/io/buffer/DataBufferUtils.java | {
"start": 2237,
"end": 28913
} | class ____ {
private static final Log logger = LogFactory.getLog(DataBufferUtils.class);
private static final Consumer<DataBuffer> RELEASE_CONSUMER = DataBufferUtils::release;
//---------------------------------------------------------------------
// Reading
//---------------------------------------------------------------------
/**
* Obtain an {@link InputStream} from the given supplier, and read it into a
* {@code Flux} of {@code DataBuffer}s. Closes the input stream when the
* Flux is terminated.
* @param inputStreamSupplier the supplier for the input stream to read from
* @param bufferFactory the factory to create data buffers with
* @param bufferSize the maximum size of the data buffers
* @return a Flux of data buffers read from the given channel
*/
public static Flux<DataBuffer> readInputStream(
Callable<InputStream> inputStreamSupplier, DataBufferFactory bufferFactory, int bufferSize) {
Assert.notNull(inputStreamSupplier, "'inputStreamSupplier' must not be null");
return readByteChannel(() -> Channels.newChannel(inputStreamSupplier.call()), bufferFactory, bufferSize);
}
/**
* Obtain a {@link ReadableByteChannel} from the given supplier, and read
* it into a {@code Flux} of {@code DataBuffer}s. Closes the channel when
* the Flux is terminated.
* @param channelSupplier the supplier for the channel to read from
* @param bufferFactory the factory to create data buffers with
* @param bufferSize the maximum size of the data buffers
* @return a Flux of data buffers read from the given channel
*/
public static Flux<DataBuffer> readByteChannel(
Callable<ReadableByteChannel> channelSupplier, DataBufferFactory bufferFactory, int bufferSize) {
Assert.notNull(channelSupplier, "'channelSupplier' must not be null");
Assert.notNull(bufferFactory, "'bufferFactory' must not be null");
Assert.isTrue(bufferSize > 0, "'bufferSize' must be > 0");
return Flux.using(channelSupplier,
channel -> Flux.generate(new ReadableByteChannelGenerator(channel, bufferFactory, bufferSize)),
DataBufferUtils::closeChannel);
// No doOnDiscard as operators used do not cache
}
/**
* Obtain a {@code AsynchronousFileChannel} from the given supplier, and read
* it into a {@code Flux} of {@code DataBuffer}s. Closes the channel when
* the Flux is terminated.
* @param channelSupplier the supplier for the channel to read from
* @param bufferFactory the factory to create data buffers with
* @param bufferSize the maximum size of the data buffers
* @return a Flux of data buffers read from the given channel
*/
public static Flux<DataBuffer> readAsynchronousFileChannel(
Callable<AsynchronousFileChannel> channelSupplier, DataBufferFactory bufferFactory, int bufferSize) {
return readAsynchronousFileChannel(channelSupplier, 0, bufferFactory, bufferSize);
}
/**
* Obtain an {@code AsynchronousFileChannel} from the given supplier, and
* read it into a {@code Flux} of {@code DataBuffer}s, starting at the given
* position. Closes the channel when the Flux is terminated.
* @param channelSupplier the supplier for the channel to read from
* @param position the position to start reading from
* @param bufferFactory the factory to create data buffers with
* @param bufferSize the maximum size of the data buffers
* @return a Flux of data buffers read from the given channel
*/
public static Flux<DataBuffer> readAsynchronousFileChannel(
Callable<AsynchronousFileChannel> channelSupplier, long position,
DataBufferFactory bufferFactory, int bufferSize) {
Assert.notNull(channelSupplier, "'channelSupplier' must not be null");
Assert.notNull(bufferFactory, "'bufferFactory' must not be null");
Assert.isTrue(position >= 0, "'position' must be >= 0");
Assert.isTrue(bufferSize > 0, "'bufferSize' must be > 0");
Flux<DataBuffer> flux = Flux.using(channelSupplier,
channel -> Flux.create(sink -> {
ReadCompletionHandler handler =
new ReadCompletionHandler(channel, sink, position, bufferFactory, bufferSize);
sink.onCancel(handler::cancel);
sink.onRequest(handler::request);
}),
channel -> {
// Do not close channel from here, rather wait for the current read callback
// and then complete after releasing the DataBuffer.
});
return flux.doOnDiscard(DataBuffer.class, DataBufferUtils::release);
}
/**
* Read bytes from the given file {@code Path} into a {@code Flux} of {@code DataBuffer}s.
* The method ensures that the file is closed when the flux is terminated.
* @param path the path to read bytes from
* @param bufferFactory the factory to create data buffers with
* @param bufferSize the maximum size of the data buffers
* @return a Flux of data buffers read from the given channel
* @since 5.2
*/
public static Flux<DataBuffer> read(
Path path, DataBufferFactory bufferFactory, int bufferSize, OpenOption... options) {
Assert.notNull(path, "Path must not be null");
Assert.notNull(bufferFactory, "DataBufferFactory must not be null");
Assert.isTrue(bufferSize > 0, "'bufferSize' must be > 0");
if (options.length > 0) {
for (OpenOption option : options) {
Assert.isTrue(!(option == StandardOpenOption.APPEND || option == StandardOpenOption.WRITE),
() -> "'" + option + "' not allowed");
}
}
return readAsynchronousFileChannel(() -> AsynchronousFileChannel.open(path, options),
bufferFactory, bufferSize);
}
/**
* Read the given {@code Resource} into a {@code Flux} of {@code DataBuffer}s.
* <p>If the resource is a file, it is read into an
* {@code AsynchronousFileChannel} and turned to {@code Flux} via
* {@link #readAsynchronousFileChannel(Callable, DataBufferFactory, int)} or else
* fall back to {@link #readByteChannel(Callable, DataBufferFactory, int)}.
* Closes the channel when the flux is terminated.
* @param resource the resource to read from
* @param bufferFactory the factory to create data buffers with
* @param bufferSize the maximum size of the data buffers
* @return a Flux of data buffers read from the given channel
*/
public static Flux<DataBuffer> read(Resource resource, DataBufferFactory bufferFactory, int bufferSize) {
return read(resource, 0, bufferFactory, bufferSize);
}
/**
* Read the given {@code Resource} into a {@code Flux} of {@code DataBuffer}s
* starting at the given position.
* <p>If the resource is a file, it is read into an
* {@code AsynchronousFileChannel} and turned to {@code Flux} via
* {@link #readAsynchronousFileChannel(Callable, DataBufferFactory, int)} or else
* fall back on {@link #readByteChannel(Callable, DataBufferFactory, int)}.
* Closes the channel when the flux is terminated.
* @param resource the resource to read from
* @param position the position to start reading from
* @param bufferFactory the factory to create data buffers with
* @param bufferSize the maximum size of the data buffers
* @return a Flux of data buffers read from the given channel
*/
public static Flux<DataBuffer> read(
Resource resource, long position, DataBufferFactory bufferFactory, int bufferSize) {
try {
if (resource.isFile()) {
Path filePath = resource.getFilePath();
return readAsynchronousFileChannel(
() -> AsynchronousFileChannel.open(filePath, StandardOpenOption.READ),
position, bufferFactory, bufferSize);
}
}
catch (IOException ignore) {
// fallback to resource.readableChannel(), below
}
Flux<DataBuffer> result = readByteChannel(resource::readableChannel, bufferFactory, bufferSize);
return (position == 0 ? result : skipUntilByteCount(result, position));
}
//---------------------------------------------------------------------
// Writing
//---------------------------------------------------------------------
/**
* Write the given stream of {@link DataBuffer DataBuffers} to the given
* {@code OutputStream}. Does <strong>not</strong> close the output stream
* when the flux is terminated, and does <strong>not</strong>
* {@linkplain #release(DataBuffer) release} the data buffers in the source.
* If releasing is required, then subscribe to the returned {@code Flux}
* with a {@link #releaseConsumer()}.
* <p>Note that the writing process does not start until the returned
* {@code Flux} is subscribed to.
* @param source the stream of data buffers to be written
* @param outputStream the output stream to write to
* @return a Flux containing the same buffers as in {@code source}, that
* starts the writing process when subscribed to, and that publishes any
* writing errors and the completion signal
*/
public static Flux<DataBuffer> write(Publisher<DataBuffer> source, OutputStream outputStream) {
Assert.notNull(source, "'source' must not be null");
Assert.notNull(outputStream, "'outputStream' must not be null");
WritableByteChannel channel = Channels.newChannel(outputStream);
return write(source, channel);
}
/**
* Write the given stream of {@link DataBuffer DataBuffers} to the given
* {@code WritableByteChannel}. Does <strong>not</strong> close the channel
* when the flux is terminated, and does <strong>not</strong>
* {@linkplain #release(DataBuffer) release} the data buffers in the source.
* If releasing is required, then subscribe to the returned {@code Flux}
* with a {@link #releaseConsumer()}.
* <p>Note that the writing process does not start until the returned
* {@code Flux} is subscribed to.
* @param source the stream of data buffers to be written
* @param channel the channel to write to
* @return a Flux containing the same buffers as in {@code source}, that
* starts the writing process when subscribed to, and that publishes any
* writing errors and the completion signal
*/
public static Flux<DataBuffer> write(Publisher<DataBuffer> source, WritableByteChannel channel) {
Assert.notNull(source, "'source' must not be null");
Assert.notNull(channel, "'channel' must not be null");
Flux<DataBuffer> flux = Flux.from(source);
return Flux.create(sink -> {
WritableByteChannelSubscriber subscriber = new WritableByteChannelSubscriber(sink, channel);
sink.onDispose(subscriber);
flux.subscribe(subscriber);
});
}
/**
* Write the given stream of {@link DataBuffer DataBuffers} to the given
* {@code AsynchronousFileChannel}. Does <strong>not</strong> close the
* channel when the flux is terminated, and does <strong>not</strong>
* {@linkplain #release(DataBuffer) release} the data buffers in the source.
* If releasing is required, then subscribe to the returned {@code Flux}
* with a {@link #releaseConsumer()}.
* <p>Note that the writing process does not start until the returned
* {@code Flux} is subscribed to.
* @param source the stream of data buffers to be written
* @param channel the channel to write to
* @return a Flux containing the same buffers as in {@code source}, that
* starts the writing process when subscribed to, and that publishes any
* writing errors and the completion signal
* @since 5.0.10
*/
public static Flux<DataBuffer> write(Publisher<DataBuffer> source, AsynchronousFileChannel channel) {
return write(source, channel, 0);
}
/**
* Write the given stream of {@link DataBuffer DataBuffers} to the given
* {@code AsynchronousFileChannel}. Does <strong>not</strong> close the channel
* when the flux is terminated, and does <strong>not</strong>
* {@linkplain #release(DataBuffer) release} the data buffers in the source.
* If releasing is required, then subscribe to the returned {@code Flux} with a
* {@link #releaseConsumer()}.
* <p>Note that the writing process does not start until the returned
* {@code Flux} is subscribed to.
* @param source the stream of data buffers to be written
* @param channel the channel to write to
* @param position the file position where writing is to begin; must be non-negative
* @return a flux containing the same buffers as in {@code source}, that
* starts the writing process when subscribed to, and that publishes any
* writing errors and the completion signal
*/
public static Flux<DataBuffer> write(
Publisher<? extends DataBuffer> source, AsynchronousFileChannel channel, long position) {
Assert.notNull(source, "'source' must not be null");
Assert.notNull(channel, "'channel' must not be null");
Assert.isTrue(position >= 0, "'position' must be >= 0");
Flux<DataBuffer> flux = Flux.from(source);
return Flux.create(sink -> {
WriteCompletionHandler handler = new WriteCompletionHandler(sink, channel, position);
sink.onDispose(handler);
flux.subscribe(handler);
});
}
/**
* Write the given stream of {@link DataBuffer DataBuffers} to the given
* file {@link Path}. The optional {@code options} parameter specifies
* how the file is created or opened (defaults to
* {@link StandardOpenOption#CREATE CREATE},
* {@link StandardOpenOption#TRUNCATE_EXISTING TRUNCATE_EXISTING}, and
* {@link StandardOpenOption#WRITE WRITE}).
* @param source the stream of data buffers to be written
* @param destination the path to the file
* @param options the options specifying how the file is opened
* @return a {@link Mono} that indicates completion or error
* @since 5.2
*/
public static Mono<Void> write(Publisher<DataBuffer> source, Path destination, OpenOption... options) {
Assert.notNull(source, "Source must not be null");
Assert.notNull(destination, "Destination must not be null");
Set<OpenOption> optionSet = checkWriteOptions(options);
return Mono.create(sink -> {
try {
AsynchronousFileChannel channel = AsynchronousFileChannel.open(destination, optionSet, null);
sink.onDispose(() -> closeChannel(channel));
write(source, channel).subscribe(DataBufferUtils::release,
sink::error,
sink::success,
Context.of(sink.contextView()));
}
catch (IOException ex) {
sink.error(ex);
}
});
}
private static Set<OpenOption> checkWriteOptions(OpenOption[] options) {
int length = options.length;
Set<OpenOption> result = CollectionUtils.newHashSet(length > 0 ? length : 2);
if (length == 0) {
result.add(StandardOpenOption.CREATE);
result.add(StandardOpenOption.TRUNCATE_EXISTING);
}
else {
for (OpenOption opt : options) {
if (opt == StandardOpenOption.READ) {
throw new IllegalArgumentException("READ not allowed");
}
result.add(opt);
}
}
result.add(StandardOpenOption.WRITE);
return result;
}
static void closeChannel(@Nullable Channel channel) {
if (channel != null && channel.isOpen()) {
try {
channel.close();
}
catch (IOException ignored) {
}
}
}
/**
* Create a new {@code Publisher<DataBuffer>} based on bytes written to a
* {@code OutputStream}.
* <ul>
* <li>The parameter {@code outputStreamConsumer} is invoked once per
* subscription of the returned {@code Publisher}, when the first
* item is
* {@linkplain Subscription#request(long) requested}.</li>
* <li>{@link OutputStream#write(byte[], int, int) OutputStream.write()}
* invocations made by {@code outputStreamConsumer} are buffered until they
* exceed the default chunk size of 1024, or when the stream is
* {@linkplain OutputStream#flush() flushed} and then result in a
* {@linkplain Subscriber#onNext(Object) published} item
* if there is {@linkplain Subscription#request(long) demand}.</li>
* <li>If there is <em>no demand</em>, {@code OutputStream.write()} will block
* until there is.</li>
* <li>If the subscription is {@linkplain Subscription#cancel() cancelled},
* {@code OutputStream.write()} will throw a {@code IOException}.</li>
* <li>The subscription is
* {@linkplain Subscriber#onComplete() completed} when
* {@code outputStreamHandler} completes.</li>
* <li>Any exceptions thrown from {@code outputStreamHandler} will
* be dispatched to the {@linkplain Subscriber#onError(Throwable) Subscriber}.
* </ul>
* @param consumer invoked when the first buffer is requested
* @param executor used to invoke the {@code outputStreamHandler}
* @return a {@code Publisher<DataBuffer>} based on bytes written by
* {@code outputStreamHandler}
* @since 6.1
*/
public static Publisher<DataBuffer> outputStreamPublisher(
Consumer<OutputStream> consumer, DataBufferFactory bufferFactory, Executor executor) {
return new OutputStreamPublisher<>(
consumer::accept, new DataBufferMapper(bufferFactory), executor, null);
}
/**
* Variant of {@link #outputStreamPublisher(Consumer, DataBufferFactory, Executor)}
* providing control over the chunk sizes to be produced by the publisher.
* @since 6.1
*/
public static Publisher<DataBuffer> outputStreamPublisher(
Consumer<OutputStream> consumer, DataBufferFactory bufferFactory, Executor executor, int chunkSize) {
return new OutputStreamPublisher<>(
consumer::accept, new DataBufferMapper(bufferFactory), executor, chunkSize);
}
/**
* Subscribe to given {@link Publisher} of {@code DataBuffer}s, and return an
* {@link InputStream} to consume the byte content with.
* <p>Byte buffers are stored in a queue. The {@code demand} constructor value
* determines the number of buffers requested initially. When storage falls
* below a {@code (demand - (demand >> 2))} limit, a request is made to refill
* the queue.
* <p>The {@code InputStream} terminates after an onError or onComplete signal,
* and stored buffers are read. If the {@code InputStream} is closed,
* the {@link Flow.Subscription} is cancelled, and stored buffers released.
* @param publisher the source of {@code DataBuffer}s
* @param demand the number of buffers to request initially, and buffer
* internally on an ongoing basis.
* @return an {@link InputStream} backed by the {@link Publisher}
*/
public static <T extends DataBuffer> InputStream subscriberInputStream(Publisher<T> publisher, int demand) {
Assert.notNull(publisher, "Publisher must not be null");
Assert.isTrue(demand > 0, "maxBufferCount must be > 0");
SubscriberInputStream subscriber = new SubscriberInputStream(demand);
publisher.subscribe(subscriber);
return subscriber;
}
//---------------------------------------------------------------------
// Various
//---------------------------------------------------------------------
/**
* Relay buffers from the given {@link Publisher} until the total
* {@linkplain DataBuffer#readableByteCount() byte count} reaches
* the given maximum byte count, or until the publisher is complete.
* @param publisher the publisher to filter
* @param maxByteCount the maximum byte count
* @return a flux whose maximum byte count is {@code maxByteCount}
*/
@SuppressWarnings("unchecked")
public static <T extends DataBuffer> Flux<T> takeUntilByteCount(Publisher<T> publisher, long maxByteCount) {
Assert.notNull(publisher, "Publisher must not be null");
Assert.isTrue(maxByteCount >= 0, "'maxByteCount' must be >= 0");
return Flux.defer(() -> {
AtomicLong countDown = new AtomicLong(maxByteCount);
return Flux.from(publisher)
.map(buffer -> {
long remainder = countDown.addAndGet(-buffer.readableByteCount());
if (remainder < 0) {
int index = buffer.readableByteCount() + (int) remainder;
DataBuffer split = buffer.split(index);
release(buffer);
return (T)split;
}
else {
return buffer;
}
})
.takeUntil(buffer -> countDown.get() <= 0);
});
// No doOnDiscard as operators used do not cache (and drop) buffers
}
/**
* Skip buffers from the given {@link Publisher} until the total
* {@linkplain DataBuffer#readableByteCount() byte count} reaches
* the given maximum byte count, or until the publisher is complete.
* @param publisher the publisher to filter
* @param maxByteCount the maximum byte count
* @return a flux with the remaining part of the given publisher
*/
public static <T extends DataBuffer> Flux<T> skipUntilByteCount(Publisher<T> publisher, long maxByteCount) {
Assert.notNull(publisher, "Publisher must not be null");
Assert.isTrue(maxByteCount >= 0, "'maxByteCount' must be >= 0");
return Flux.defer(() -> {
AtomicLong countDown = new AtomicLong(maxByteCount);
return Flux.from(publisher)
.skipUntil(buffer -> {
long remainder = countDown.addAndGet(-buffer.readableByteCount());
return remainder < 0;
})
.map(buffer -> {
long remainder = countDown.get();
if (remainder < 0) {
countDown.set(0);
int start = buffer.readableByteCount() + (int)remainder;
DataBuffer split = buffer.split(start);
release(split);
return buffer;
}
else {
return buffer;
}
});
}).doOnDiscard(DataBuffer.class, DataBufferUtils::release);
}
/**
* Retain the given data buffer, if it is a {@link PooledDataBuffer}.
* @param dataBuffer the data buffer to retain
* @return the retained buffer
*/
@SuppressWarnings("unchecked")
public static <T extends DataBuffer> T retain(T dataBuffer) {
if (dataBuffer instanceof PooledDataBuffer pooledDataBuffer) {
return (T) pooledDataBuffer.retain();
}
else {
return dataBuffer;
}
}
/**
* Associate the given hint with the data buffer if it is a pooled buffer
* and supports leak tracking.
* @param dataBuffer the data buffer to attach the hint to
* @param hint the hint to attach
* @return the input buffer
* @since 5.3.2
*/
@SuppressWarnings("unchecked")
public static <T extends DataBuffer> T touch(T dataBuffer, Object hint) {
if (dataBuffer instanceof TouchableDataBuffer touchableDataBuffer) {
return (T) touchableDataBuffer.touch(hint);
}
else {
return dataBuffer;
}
}
/**
* Release the given data buffer. If it is a {@link PooledDataBuffer} and
* has been {@linkplain PooledDataBuffer#isAllocated() allocated}, this
* method will call {@link PooledDataBuffer#release()}. If it is a
* {@link CloseableDataBuffer}, this method will call
* {@link CloseableDataBuffer#close()}.
* @param dataBuffer the data buffer to release
* @return {@code true} if the buffer was released; {@code false} otherwise.
*/
public static boolean release(@Nullable DataBuffer dataBuffer) {
if (dataBuffer instanceof PooledDataBuffer pooledDataBuffer) {
if (pooledDataBuffer.isAllocated()) {
try {
return pooledDataBuffer.release();
}
catch (IllegalStateException ex) {
if (logger.isDebugEnabled()) {
logger.debug("Failed to release PooledDataBuffer: " + dataBuffer, ex);
}
return false;
}
}
}
else if (dataBuffer instanceof CloseableDataBuffer closeableDataBuffer) {
try {
closeableDataBuffer.close();
return true;
}
catch (IllegalStateException ex) {
if (logger.isDebugEnabled()) {
logger.debug("Failed to release CloseableDataBuffer " + dataBuffer, ex);
}
return false;
}
}
return false;
}
/**
* Return a consumer that calls {@link #release(DataBuffer)} on all
* passed data buffers.
*/
public static Consumer<DataBuffer> releaseConsumer() {
return RELEASE_CONSUMER;
}
/**
* Return a new {@code DataBuffer} composed of joining together the given
* {@code dataBuffers} elements. Depending on the {@link DataBuffer} type,
* the returned buffer may be a single buffer containing all data of the
* provided buffers, or it may be a zero-copy, composite with references to
* the given buffers.
* <p>If {@code dataBuffers} produces an error or if there is a cancel
* signal, then all accumulated buffers will be
* {@linkplain #release(DataBuffer) released}.
* <p>Note that the given data buffers do <strong>not</strong> have to be
* released. They will be released as part of the returned composite.
* @param dataBuffers the data buffers that are to be composed
* @return a buffer that is composed of the {@code dataBuffers} argument
* @since 5.0.3
*/
public static Mono<DataBuffer> join(Publisher<? extends DataBuffer> dataBuffers) {
return join(dataBuffers, -1);
}
/**
* Variant of {@link #join(Publisher)} that behaves the same way up until
* the specified max number of bytes to buffer. Once the limit is exceeded,
* {@link DataBufferLimitException} is raised.
* @param buffers the data buffers that are to be composed
* @param maxByteCount the max number of bytes to buffer, or -1 for unlimited
* @return a buffer with the aggregated content, possibly an empty Mono if
* the max number of bytes to buffer is exceeded.
* @throws DataBufferLimitException if maxByteCount is exceeded
* @since 5.1.11
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public static Mono<DataBuffer> join(Publisher<? extends DataBuffer> buffers, int maxByteCount) {
Assert.notNull(buffers, "'buffers' must not be null");
if (buffers instanceof Mono mono) {
return mono;
}
return Flux.from(buffers)
.collect(() -> new LimitedDataBufferList(maxByteCount), LimitedDataBufferList::add)
.filter(list -> !list.isEmpty())
.map(list -> list.get(0).factory().join(list))
.doOnDiscard(DataBuffer.class, DataBufferUtils::release);
}
/**
* Return a {@link Matcher} for the given delimiter.
* The matcher can be used to find the delimiters in a stream of data buffers.
* @param delimiter the delimiter bytes to find
* @return the matcher
* @since 5.2
*/
public static Matcher matcher(byte[] delimiter) {
return createMatcher(delimiter);
}
/**
* Return a {@link Matcher} for the given delimiters.
* The matcher can be used to find the delimiters in a stream of data buffers.
* @param delimiters the delimiters bytes to find
* @return the matcher
* @since 5.2
*/
public static Matcher matcher(byte[]... delimiters) {
Assert.isTrue(delimiters.length > 0, "Delimiters must not be empty");
return (delimiters.length == 1 ? createMatcher(delimiters[0]) : new CompositeMatcher(delimiters));
}
private static NestedMatcher createMatcher(byte[] delimiter) {
// extract length due to Eclipse IDE compiler error in switch expression
int length = delimiter.length;
Assert.isTrue(length > 0, "Delimiter must not be empty");
return switch (length) {
case 1 -> (delimiter[0] == 10 ? SingleByteMatcher.NEWLINE_MATCHER : new SingleByteMatcher(delimiter));
case 2 -> new TwoByteMatcher(delimiter);
default -> new KnuthMorrisPrattMatcher(delimiter);
};
}
/**
* Contract to find delimiter(s) against one or more data buffers that can
* be passed one at a time to the {@link #match(DataBuffer)} method.
*
* @since 5.2
* @see #match(DataBuffer)
*/
public | DataBufferUtils |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/system/MockApplicationPid.java | {
"start": 788,
"end": 941
} | class ____ {
private MockApplicationPid() {
}
public static ApplicationPid of(long value) {
return new ApplicationPid(value);
}
}
| MockApplicationPid |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/arm-java/org/apache/hadoop/ipc/protobuf/TestProtosLegacy.java | {
"start": 47255,
"end": 58170
} | class ____ extends
com.google.protobuf.GeneratedMessage
implements EchoResponseProtoOrBuilder {
// Use EchoResponseProto.newBuilder() to construct.
private EchoResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private EchoResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final EchoResponseProto defaultInstance;
public static EchoResponseProto getDefaultInstance() {
return defaultInstance;
}
public EchoResponseProto getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private EchoResponseProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
message_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.Builder.class);
}
public static com.google.protobuf.Parser<EchoResponseProto> PARSER =
new com.google.protobuf.AbstractParser<EchoResponseProto>() {
public EchoResponseProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new EchoResponseProto(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<EchoResponseProto> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string message = 1;
public static final int MESSAGE_FIELD_NUMBER = 1;
private java.lang.Object message_;
/**
* <code>required string message = 1;</code>
*/
public boolean hasMessage() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string message = 1;</code>
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
message_ = s;
}
return s;
}
}
/**
* <code>required string message = 1;</code>
*/
public com.google.protobuf.ByteString
getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
message_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
message_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasMessage()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getMessageBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getMessageBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto)) {
return super.equals(obj);
}
org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) obj;
boolean result = true;
result = result && (hasMessage() == other.hasMessage());
if (hasMessage()) {
result = result && getMessage()
.equals(other.getMessage());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasMessage()) {
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code hadoop.common.EchoResponseProto}
*/
public static final | EchoResponseProto |
java | elastic__elasticsearch | libs/x-content/src/main/java/org/elasticsearch/xcontent/support/filtering/FilterPath.java | {
"start": 5147,
"end": 8541
} | class ____ {
private final Map<String, BuildNode> children;
private final boolean isFinalNode;
BuildNode(boolean isFinalNode) {
children = new HashMap<>();
this.isFinalNode = isFinalNode;
}
}
private final BuildNode root = new BuildNode(false);
void insert(String filter) {
insertNode(filter, root, 0);
}
FilterPath build() {
return buildPath("", root);
}
static void insertNode(String filter, BuildNode node, int depth) {
if (depth > MAX_TREE_DEPTH) {
throw new IllegalArgumentException(
"Filter exceeds maximum depth at [" + (filter.length() > 100 ? filter.substring(0, 100) : filter) + "]"
);
}
int end = filter.length();
int splitPosition = -1;
boolean findEscapes = false;
for (int i = 0; i < end; i++) {
char c = filter.charAt(i);
if (c == '.') {
splitPosition = i;
break;
} else if ((c == '\\') && (i + 1 < end) && (filter.charAt(i + 1) == '.')) {
++i;
findEscapes = true;
}
}
if (splitPosition > 0) {
String field = findEscapes ? filter.substring(0, splitPosition).replace("\\.", ".") : filter.substring(0, splitPosition);
BuildNode child = node.children.computeIfAbsent(field, f -> new BuildNode(false));
if (false == child.isFinalNode) {
insertNode(filter.substring(splitPosition + 1), child, depth + 1);
}
} else {
String field = findEscapes ? filter.replace("\\.", ".") : filter;
node.children.put(field, new BuildNode(true));
}
}
static FilterPath buildPath(String segment, BuildNode node) {
Map<String, FilterPath> termsChildren = new HashMap<>();
List<FilterPath> wildcardChildren = new ArrayList<>();
for (Map.Entry<String, BuildNode> entry : node.children.entrySet()) {
String childName = entry.getKey();
BuildNode childNode = entry.getValue();
FilterPath childFilterPath = buildPath(childName, childNode);
if (childName.contains(WILDCARD)) {
wildcardChildren.add(childFilterPath);
} else {
termsChildren.put(childName, childFilterPath);
}
}
return new FilterPath(segment, node.isFinalNode, termsChildren, wildcardChildren.toArray(new FilterPath[0]));
}
}
public static FilterPath[] compile(Set<String> filters) {
if (filters == null || filters.isEmpty()) {
return null;
}
FilterPathBuilder builder = new FilterPathBuilder();
for (String filter : filters) {
if (filter != null) {
filter = filter.trim();
if (filter.length() > 0) {
builder.insert(filter);
}
}
}
FilterPath filterPath = builder.build();
return Collections.singletonList(filterPath).toArray(new FilterPath[0]);
}
}
| BuildNode |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/dynamic/ExecutableCommandLookupStrategySupport.java | {
"start": 1570,
"end": 2789
} | class ____ implements CommandFactoryResolver {
final AnnotationCommandSegmentFactory commandSegmentFactory = new AnnotationCommandSegmentFactory();
final AnnotationRedisCodecResolver codecResolver;
DefaultCommandFactoryResolver() {
codecResolver = new AnnotationRedisCodecResolver(redisCodecs);
}
@Override
public CommandFactory resolveRedisCommandFactory(CommandMethod commandMethod, RedisCommandsMetadata commandsMetadata) {
RedisCodec<?, ?> codec = codecResolver.resolve(commandMethod);
if (codec == null) {
throw new CommandCreationException(commandMethod, "Cannot resolve RedisCodec");
}
CodecAwareOutputFactoryResolver outputFactoryResolver = new CodecAwareOutputFactoryResolver(
commandOutputFactoryResolver, codec);
CommandSegments commandSegments = commandSegmentFactory.createCommandSegments(commandMethod);
commandMethodVerifier.validate(commandSegments, commandMethod);
return new CommandSegmentCommandFactory(commandSegments, commandMethod, codec, outputFactoryResolver);
}
}
}
| DefaultCommandFactoryResolver |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/MissingSuperCallTest.java | {
"start": 15755,
"end": 16351
} | class ____ extends Super {
@Override
// BUG: Diagnostic contains:
// This method overrides Super#doIt, which is annotated with @CallSuper,
// but does not call the super method
public void doIt() {
Runnable r = () -> super.doIt();
}
}
""")
.doTest();
}
@Test
public void methodReferences() {
compilationHelper
.addSourceLines(
"Super.java",
"""
import android.support.annotation.CallSuper;
public | Sub |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableAny.java | {
"start": 1420,
"end": 3273
} | class ____<T> implements Observer<T>, Disposable {
final Observer<? super Boolean> downstream;
final Predicate<? super T> predicate;
Disposable upstream;
boolean done;
AnyObserver(Observer<? super Boolean> actual, Predicate<? super T> predicate) {
this.downstream = actual;
this.predicate = predicate;
}
@Override
public void onSubscribe(Disposable d) {
if (DisposableHelper.validate(this.upstream, d)) {
this.upstream = d;
downstream.onSubscribe(this);
}
}
@Override
public void onNext(T t) {
if (done) {
return;
}
boolean b;
try {
b = predicate.test(t);
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
upstream.dispose();
onError(e);
return;
}
if (b) {
done = true;
upstream.dispose();
downstream.onNext(true);
downstream.onComplete();
}
}
@Override
public void onError(Throwable t) {
if (done) {
RxJavaPlugins.onError(t);
return;
}
done = true;
downstream.onError(t);
}
@Override
public void onComplete() {
if (!done) {
done = true;
downstream.onNext(false);
downstream.onComplete();
}
}
@Override
public void dispose() {
upstream.dispose();
}
@Override
public boolean isDisposed() {
return upstream.isDisposed();
}
}
}
| AnyObserver |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/Web3jEndpointBuilderFactory.java | {
"start": 39556,
"end": 47227
} | interface ____
extends
Web3jEndpointConsumerBuilder,
Web3jEndpointProducerBuilder {
default AdvancedWeb3jEndpointBuilder advanced() {
return (AdvancedWeb3jEndpointBuilder) this;
}
/**
* Contract address or a comma separated list of addresses.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param addresses the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder addresses(String addresses) {
doSetProperty("addresses", addresses);
return this;
}
/**
* The address the transaction is send from.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param fromAddress the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder fromAddress(String fromAddress) {
doSetProperty("fromAddress", fromAddress);
return this;
}
/**
* The block number, or the string latest for the last mined block or
* pending, earliest for not yet mined transactions.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: latest
* Group: common
*
* @param fromBlock the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder fromBlock(String fromBlock) {
doSetProperty("fromBlock", fromBlock);
return this;
}
/**
* If true it returns the full transaction objects, if false only the
* hashes of the transactions.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param fullTransactionObjects the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder fullTransactionObjects(boolean fullTransactionObjects) {
doSetProperty("fullTransactionObjects", fullTransactionObjects);
return this;
}
/**
* If true it returns the full transaction objects, if false only the
* hashes of the transactions.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param fullTransactionObjects the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder fullTransactionObjects(String fullTransactionObjects) {
doSetProperty("fullTransactionObjects", fullTransactionObjects);
return this;
}
/**
* The maximum gas allowed in this block.
*
* The option is a: <code>java.math.BigInteger</code> type.
*
* Group: common
*
* @param gasLimit the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder gasLimit(java.math.BigInteger gasLimit) {
doSetProperty("gasLimit", gasLimit);
return this;
}
/**
* The maximum gas allowed in this block.
*
* The option will be converted to a <code>java.math.BigInteger</code>
* type.
*
* Group: common
*
* @param gasLimit the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder gasLimit(String gasLimit) {
doSetProperty("gasLimit", gasLimit);
return this;
}
/**
* A comma separated transaction privateFor nodes with public keys in a
* Quorum network.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param privateFor the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder privateFor(String privateFor) {
doSetProperty("privateFor", privateFor);
return this;
}
/**
* If true, this will support Quorum API.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param quorumAPI the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder quorumAPI(boolean quorumAPI) {
doSetProperty("quorumAPI", quorumAPI);
return this;
}
/**
* If true, this will support Quorum API.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param quorumAPI the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder quorumAPI(String quorumAPI) {
doSetProperty("quorumAPI", quorumAPI);
return this;
}
/**
* The address the transaction is directed to.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param toAddress the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder toAddress(String toAddress) {
doSetProperty("toAddress", toAddress);
return this;
}
/**
* The block number, or the string latest for the last mined block or
* pending, earliest for not yet mined transactions.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: latest
* Group: common
*
* @param toBlock the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder toBlock(String toBlock) {
doSetProperty("toBlock", toBlock);
return this;
}
/**
* Topics are order-dependent. Each topic can also be a list of topics.
* Specify multiple topics separated by comma.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param topics the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder topics(String topics) {
doSetProperty("topics", topics);
return this;
}
/**
* The preconfigured Web3j object.
*
* The option is a: <code>org.web3j.protocol.Web3j</code> type.
*
* Group: common
*
* @param web3j the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder web3j(org.web3j.protocol.Web3j web3j) {
doSetProperty("web3j", web3j);
return this;
}
/**
* The preconfigured Web3j object.
*
* The option will be converted to a
* <code>org.web3j.protocol.Web3j</code> type.
*
* Group: common
*
* @param web3j the value to set
* @return the dsl builder
*/
default Web3jEndpointBuilder web3j(String web3j) {
doSetProperty("web3j", web3j);
return this;
}
}
/**
* Advanced builder for endpoint for the Web3j Ethereum Blockchain component.
*/
public | Web3jEndpointBuilder |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraOutputFormat.java | {
"start": 1692,
"end": 2388
} | class ____ extends FileOutputFormat<Text,Text> {
private static final Logger LOG =
LoggerFactory.getLogger(TeraOutputFormat.class);
/**
* Set the requirement for a final sync before the stream is closed.
*/
static void setFinalSync(JobContext job, boolean newValue) {
job.getConfiguration().setBoolean(
TeraSortConfigKeys.FINAL_SYNC_ATTRIBUTE.key(), newValue);
}
/**
* Does the user want a final sync at close?
*/
public static boolean getFinalSync(JobContext job) {
return job.getConfiguration().getBoolean(
TeraSortConfigKeys.FINAL_SYNC_ATTRIBUTE.key(),
TeraSortConfigKeys.DEFAULT_FINAL_SYNC_ATTRIBUTE);
}
static | TeraOutputFormat |
java | spring-projects__spring-boot | core/spring-boot-testcontainers/src/dockerTest/java/org/springframework/boot/testcontainers/service/connection/ServiceConnectionAutoConfigurationTests.java | {
"start": 6568,
"end": 6790
} | class ____ {
@Bean
@ServiceConnection
PostgreSQLContainer postgresContainer() {
return TestImage.container(PostgreSQLContainer.class);
}
}
@Configuration(proxyBeanMethods = false)
static | ContainerConfiguration |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/BigStringFieldTest.java | {
"start": 260,
"end": 2232
} | class ____ extends TestCase {
public void test_bigFieldString() throws Exception {
Model model = new Model();
model.f0 = random(1024);
model.f1 = random(1024);
model.f2 = random(1024);
model.f3 = random(1024);
model.f4 = random(1024);
String text = JSON.toJSONString(model);
Model model2 = JSON.parseObject(text, Model.class);
Assert.assertEquals(model2.f0, model.f0);
Assert.assertEquals(model2.f1, model.f1);
Assert.assertEquals(model2.f2, model.f2);
Assert.assertEquals(model2.f3, model.f3);
Assert.assertEquals(model2.f4, model.f4);
}
public void test_list() throws Exception {
List<Model> list = new ArrayList<Model>();
for (int i = 0; i < 1000; ++i) {
Model model = new Model();
model.f0 = random(64);
model.f1 = random(64);
model.f2 = random(64);
model.f3 = random(64);
model.f4 = random(64);
list.add(model);
}
String text = JSON.toJSONString(list);
List<Model> list2 = JSON.parseObject(text, new TypeReference<List<Model>>() {});
Assert.assertEquals(list.size(), list2.size());
for (int i = 0; i < 1000; ++i) {
Assert.assertEquals(list.get(i).f0, list2.get(i).f0);
Assert.assertEquals(list.get(i).f1, list2.get(i).f1);
Assert.assertEquals(list.get(i).f2, list2.get(i).f2);
Assert.assertEquals(list.get(i).f3, list2.get(i).f3);
Assert.assertEquals(list.get(i).f4, list2.get(i).f4);
}
}
public String random(int count) {
Random random = new Random();
char[] chars = new char[count];
for (int i = 0; i < count; ++i) {
chars[i] = (char) random.nextInt();
}
return new String(chars);
}
public static | BigStringFieldTest |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/TestContextAnnotationUtils.java | {
"start": 8224,
"end": 9100
} | class ____ which to search for annotations (never {@code null})
* @param annotationType the annotation type to find (never {@code null})
* @return the set of all merged repeatable annotations found, or an empty set
* if none were found
* @see AnnotatedElementUtils#getMergedRepeatableAnnotations(java.lang.reflect.AnnotatedElement, Class)
* @see #searchEnclosingClass(Class)
*/
public static <T extends Annotation> Set<T> getMergedRepeatableAnnotations(
Class<?> clazz, Class<T> annotationType) {
// Present (via @Inherited semantics), directly present, or meta-present?
Set<T> mergedAnnotations = MergedAnnotations.from(clazz, SearchStrategy.INHERITED_ANNOTATIONS)
.stream(annotationType)
.collect(MergedAnnotationCollectors.toAnnotationSet());
if (!mergedAnnotations.isEmpty()) {
return mergedAnnotations;
}
// Declared on an enclosing | on |
java | google__auto | value/src/test/java/com/google/auto/value/processor/ExtensionTest.java | {
"start": 37946,
"end": 39407
} | class ____ implements Parent<String> {",
"}");
ContextChecker checker =
context -> {
assertThat(context.builder()).isEmpty();
Map<String, TypeMirror> propertyTypes = context.propertyTypes();
assertThat(propertyTypes.keySet()).containsExactly("thing", "list");
TypeMirror thingType = propertyTypes.get("thing");
assertThat(thingType).isNotNull();
assertThat(thingType.getKind()).isEqualTo(TypeKind.DECLARED);
assertThat(MoreTypes.asTypeElement(thingType).getQualifiedName().toString())
.isEqualTo("java.lang.String");
TypeMirror listType = propertyTypes.get("list");
assertThat(listType).isNotNull();
assertThat(listType.toString()).isEqualTo("java.util.List<java.lang.String>");
};
ContextCheckingExtension extension = new ContextCheckingExtension(checker);
Compilation compilation =
javac()
.withProcessors(new AutoValueProcessor(ImmutableList.of(extension)))
.compile(autoValueClass, parent);
assertThat(compilation).succeededWithoutWarnings();
}
@Test
public void finalAutoValueClassName() {
JavaFileObject autoValueClass =
JavaFileObjects.forSourceLines(
"foo.bar.Baz",
"package foo.bar;",
"",
"import com.google.auto.value.AutoValue;",
"",
"@AutoValue",
"abstract | Baz |
java | apache__camel | components/camel-sql/src/test/java/org/apache/camel/component/sql/SqlProducerInsertNullTest.java | {
"start": 1529,
"end": 3502
} | class ____ extends CamelTestSupport {
private EmbeddedDatabase db;
@Override
public void doPreSetup() throws Exception {
db = new EmbeddedDatabaseBuilder()
.setName(getClass().getSimpleName())
.setType(EmbeddedDatabaseType.H2)
.addScript("sql/createAndPopulateDatabase2.sql").build();
}
@Override
public void doPostTearDown() throws Exception {
if (db != null) {
db.shutdown();
}
}
@Test
public void testInsertNull() throws Exception {
Map<String, Object> map = new HashMap<>();
map.put("id", 4);
map.put("project", "Foo");
map.put("lic", "ASF");
map.put("description", null);
template.sendBody("direct:insert", map);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
template.sendBody("direct:start", "Foo");
mock.assertIsSatisfied();
List<?> received = assertIsInstanceOf(List.class, mock.getReceivedExchanges().get(0).getIn().getBody());
assertEquals(1, received.size());
Map<?, ?> row = assertIsInstanceOf(Map.class, received.get(0));
assertEquals("Foo", row.get("project"));
assertEquals("ASF", row.get("license"));
assertNull(row.get("description"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
getContext().getComponent("sql", SqlComponent.class).setDataSource(db);
from("direct:insert")
.to("sql:insert into projects (id, project, license, description) values (:#id, :#project, :#lic, :#description)");
from("direct:start")
.to("sql:select * from projects where project = #")
.to("mock:result");
}
};
}
}
| SqlProducerInsertNullTest |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/support/JacksonHandlerInstantiator.java | {
"start": 2415,
"end": 5127
} | class ____ extends HandlerInstantiator {
private final AutowireCapableBeanFactory beanFactory;
/**
* Create a new {@code JacksonHandlerInstantiator} for the given BeanFactory.
* @param beanFactory the target BeanFactory
*/
public JacksonHandlerInstantiator(AutowireCapableBeanFactory beanFactory) {
Assert.notNull(beanFactory, "BeanFactory must not be null");
this.beanFactory = beanFactory;
}
@Override
@Nullable
public ValueDeserializer<?> deserializerInstance(DeserializationConfig config, Annotated annotated, Class<?> deserClass) {
return (ValueDeserializer<?>) this.beanFactory.createBean(deserClass);
}
@Override
public KeyDeserializer keyDeserializerInstance(DeserializationConfig config, Annotated annotated, Class<?> keyDeserClass) {
return (KeyDeserializer) this.beanFactory.createBean(keyDeserClass);
}
@Override
public ValueSerializer<?> serializerInstance(SerializationConfig config, Annotated annotated, Class<?> serClass) {
return (ValueSerializer<?>) this.beanFactory.createBean(serClass);
}
@Override
public TypeResolverBuilder<?> typeResolverBuilderInstance(MapperConfig<?> config, Annotated annotated, Class<?> builderClass) {
return (TypeResolverBuilder<?>) this.beanFactory.createBean(builderClass);
}
@Override
public TypeIdResolver typeIdResolverInstance(MapperConfig<?> config, Annotated annotated, Class<?> resolverClass) {
return (TypeIdResolver) this.beanFactory.createBean(resolverClass);
}
@Override
public ValueInstantiator valueInstantiatorInstance(MapperConfig<?> config, Annotated annotated, Class<?> implClass) {
return (ValueInstantiator) this.beanFactory.createBean(implClass);
}
@Override
public ObjectIdGenerator<?> objectIdGeneratorInstance(MapperConfig<?> config, Annotated annotated, Class<?> implClass) {
return (ObjectIdGenerator<?>) this.beanFactory.createBean(implClass);
}
@Override
public ObjectIdResolver resolverIdGeneratorInstance(MapperConfig<?> config, Annotated annotated, Class<?> implClass) {
return (ObjectIdResolver) this.beanFactory.createBean(implClass);
}
@Override
public PropertyNamingStrategy namingStrategyInstance(MapperConfig<?> config, Annotated annotated, Class<?> implClass) {
return (PropertyNamingStrategy) this.beanFactory.createBean(implClass);
}
@Override
public Converter<?, ?> converterInstance(MapperConfig<?> config, Annotated annotated, Class<?> implClass) {
return (Converter<?, ?>) this.beanFactory.createBean(implClass);
}
@Override
public VirtualBeanPropertyWriter virtualPropertyWriterInstance(MapperConfig<?> config, Class<?> implClass) {
return (VirtualBeanPropertyWriter) this.beanFactory.createBean(implClass);
}
}
| JacksonHandlerInstantiator |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/module/TestTypeModifierNameResolution.java | {
"start": 1656,
"end": 2434
} | interface ____ { }
// Expect that the TypeModifier kicks in when the type id is written.
@Test
public void testTypeModiferNameResolution() throws Exception
{
final ObjectMapper mapper = jsonMapperBuilder()
.typeFactory(defaultTypeFactory().withModifier(new CustomTypeModifier()))
.addMixIn(MyType.class, Mixin.class)
.build();
MyType obj = new MyTypeImpl();
obj.setData("something");
String s = mapper.writer().writeValueAsString(obj);
assertNotNull(s);
/*
final String EXP = "{\"TestTypeModifierNameResolution$MyType\":";
if (!s.startsWith(EXP)) {
fail("Should start with ["+EXP+"], does not ["+s+"]");
}
*/
}
}
| Mixin |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/JoinedInheritanceTreatQueryTest.java | {
"start": 9843,
"end": 10207
} | class ____ extends ProductOwner {
@ManyToOne
private Description description;
public ProductOwner1() {
}
public ProductOwner1(Description description) {
this.description = description;
}
public Description getDescription() {
return description;
}
}
@SuppressWarnings("unused")
@Entity( name = "ProductOwner2" )
public static | ProductOwner1 |
java | processing__processing4 | java/src/processing/mode/java/runner/Runner.java | {
"start": 28093,
"end": 39031
} | interface ____'s listening for errors
* @return true if the error was purtified, false otherwise
*/
public static boolean handleCommonErrors(final String exceptionClass,
final String message,
final RunnerListener listener,
final PrintStream err) {
if (exceptionClass.equals("java.lang.OutOfMemoryError")) {
if (message.contains("exceeds VM budget")) {
// TODO this is a kludge for Android, since there is no memory preference
listener.statusError("OutOfMemoryError: This code attempts to use more memory than available.");
err.println("An OutOfMemoryError means that your code is either using up too much memory");
err.println("because of a bug (e.g. creating an array that's too large, or unintentionally");
err.println("loading thousands of images), or simply that it's trying to use more memory");
err.println("than what is supported by the current device.");
} else {
listener.statusError("OutOfMemoryError: You may need to increase the memory setting in Preferences.");
err.println("An OutOfMemoryError means that your code is either using up too much memory");
err.println("because of a bug (e.g. creating an array that's too large, or unintentionally");
err.println("loading thousands of images), or that your sketch may need more memory to run.");
err.println("If your sketch uses a lot of memory (for instance if it loads a lot of data files)");
err.println("you can increase the memory available to your sketch using the Preferences window.");
}
} else if (exceptionClass.equals("java.lang.UnsatisfiedLinkError")) {
err.println("A library used by this sketch relies on native code that is not available.");
err.println(message);
} else if (exceptionClass.equals("java.lang.StackOverflowError")) {
listener.statusError("StackOverflowError: This sketch is attempting too much recursion.");
err.println("A StackOverflowError means that you have a bug that's causing a function");
err.println("to be called recursively (it's calling itself and going in circles),");
err.println("or you're intentionally calling a recursive function too much,");
err.println("and your code should be rewritten in a more efficient manner.");
} else if (exceptionClass.equals("java.lang.UnsupportedClassVersionError")) {
int javaVersion = Runtime.version().feature();
listener.statusError("UnsupportedClassVersionError: A library is using code compiled with an unsupported version of Java.");
err.println("This version of Processing only supports libraries and JAR files compiled for Java " + javaVersion + " or earlier.");
err.println("A library used by this sketch was compiled for Java " + (javaVersion + 1) + " or later, ");
err.println("and needs to be recompiled to be compatible with Java " + javaVersion + ".");
} else if (exceptionClass.equals("java.lang.NoSuchMethodError") ||
exceptionClass.equals("java.lang.NoSuchFieldError")) {
listener.statusError(exceptionClass.substring(10) + ": " +
"You may be using a library that's incompatible " +
"with this version of Processing.");
} else {
return false;
}
return true;
}
// TODO This may be called more than one time per error in the VM,
// presumably because exceptions may be wrapped inside others,
// and this will fire for both.
protected void reportException(String message, ObjectReference or, ThreadReference thread) {
listener.statusError(findException(message, or, thread));
}
/**
* Move through a list of stack frames, searching for references to code
* found in the current sketch. Return with a RunnerException that contains
* the location of the error, or if nothing is found, just return with a
* RunnerException that wraps the error message itself.
*/
protected SketchException findException(String message, ObjectReference or, ThreadReference thread) {
try {
// use to dump the stack for debugging
// for (StackFrame frame : thread.frames()) {
// System.out.println("frame: " + frame);
// }
List<StackFrame> frames = thread.frames();
for (StackFrame frame : frames) {
try {
Location location = frame.location();
String filename = location.sourceName();
int lineNumber = location.lineNumber() - 1;
SketchException rex =
build.placeException(message, filename, lineNumber);
if (rex != null) {
return rex;
}
} catch (AbsentInformationException e) {
// Any of the thread.blah() methods can throw an AbsentInformationEx
// if that bit of data is missing. If so, just write out the error
// message to the console.
//e.printStackTrace(); // not useful
exception = new SketchException(message);
exception.hideStackTrace();
listener.statusError(exception);
}
}
} catch (IncompatibleThreadStateException e) {
// This shouldn't happen, but if it does, print the exception in case
// it's something that needs to be debugged separately.
e.printStackTrace(sketchErr);
} catch (Exception e) {
// stack overflows seem to trip in frame.location() above
// ignore this case so that the actual error gets reported to the user
if (!"StackOverflowError".equals(message)) {
e.printStackTrace(sketchErr);
}
}
// before giving up, try to extract from the throwable object itself
// since sometimes exceptions are re-thrown from a different context
try {
// assume object reference is Throwable, get stack trace
Method method = ((ClassType) or.referenceType()).concreteMethodByName("getStackTrace", "()[Ljava/lang/StackTraceElement;");
ArrayReference result = (ArrayReference) or.invokeMethod(thread, method, new ArrayList<>(), ObjectReference.INVOKE_SINGLE_THREADED);
// iterate through stack frames and pull filename and line number for each
for (Value val: result.getValues()) {
ObjectReference ref = (ObjectReference)val;
method = ((ClassType) ref.referenceType()).concreteMethodByName("getFileName", "()Ljava/lang/String;");
StringReference strref = (StringReference) ref.invokeMethod(thread, method, new ArrayList<>(), ObjectReference.INVOKE_SINGLE_THREADED);
String filename = strref == null ? "Unknown Source" : strref.value();
method = ((ClassType) ref.referenceType()).concreteMethodByName("getLineNumber", "()I");
IntegerValue intval = (IntegerValue) ref.invokeMethod(thread, method, new ArrayList<>(), ObjectReference.INVOKE_SINGLE_THREADED);
int lineNumber = intval.intValue() - 1;
SketchException rex =
build.placeException(message, filename, lineNumber);
if (rex != null) {
return rex;
}
}
// for (Method m : ((ClassType) or.referenceType()).allMethods()) {
// System.out.println(m + " | " + m.signature() + " | " + m.genericSignature());
// }
// Implemented for 2.0b9, writes a stack trace when there's an internal error inside core.
method = ((ClassType) or.referenceType()).concreteMethodByName("printStackTrace", "()V");
// System.err.println("got method " + method);
or.invokeMethod(thread, method, new ArrayList<>(), ObjectReference.INVOKE_SINGLE_THREADED);
} catch (Exception e) {
// stack overflows will make the exception handling above trip again
// ignore this case so that the actual error gets reported to the user
if (!"StackOverflowError".equals(message)) {
e.printStackTrace(sketchErr);
}
}
// Give up, nothing found inside the pile of stack frames
SketchException rex = new SketchException(message);
// exception is being created /here/, so stack trace is not useful
rex.hideStackTrace();
return rex;
}
public void close() {
synchronized (cancelLock) {
cancelled = true;
// TODO make sure stop() has already been called to exit the sketch
// TODO actually kill off the vm here
if (vm != null) {
try {
vm.exit(0);
} catch (com.sun.jdi.VMDisconnectedException vmde) {
// if the vm has disconnected on its own, ignore message
//System.out.println("harmless disconnect " + vmde.getMessage());
// TODO shouldn't need to do this, need to do more cleanup
}
}
}
}
// made synchronized for 0087
// attempted to remove synchronized for 0136 to fix bug #775 (no luck tho)
// https://download.processing.org/bugzilla/775.html
synchronized public void message(String s) {
// System.out.println("M" + s.length() + ":" + s.trim()); // + "MMM" + s.length());
// this eats the CRLFs on the lines.. oops.. do it later
//if (s.trim().length() == 0) return;
// this is PApplet sending a message (via System.out.println)
// that signals that the applet has been quit.
if (s.indexOf(PApplet.EXTERNAL_STOP) == 0) {
//System.out.println("external: quit");
if (editor != null) {
// editor.internalCloseRunner(); // [091124]
// editor.handleStop(); // prior to 0192
java.awt.EventQueue.invokeLater(() -> {
editor.internalCloseRunner(); // 0192
});
}
return;
}
// this is the PApplet sending us a message that the applet
// is being moved to a new window location
if (s.indexOf(PApplet.EXTERNAL_MOVE) == 0) {
String nums = s.substring(s.indexOf(' ') + 1).trim();
int space = nums.indexOf(' ');
int left = Integer.parseInt(nums.substring(0, space));
int top = Integer.parseInt(nums.substring(space + 1));
// this is only fired when connected to an editor
editor.setSketchLocation(new Point(left, top));
//System.out.println("external: move to " + left + " " + top);
return;
}
// these are used for debugging, in case there are concerns
// that some errors aren't coming through properly
// if (s.length() > 2) {
// System.err.println(newMessage);
// System.err.println("message " + s.length() + ":" + s);
// }
// Ignore the warnings on macOS Sequoia to prevent confusion: https://github.com/processing/processing4/issues/864
if(s.contains("+[IMKClient subclass]:") || s.contains("+[IMKInputSession subclass]:")){
return;
}
// always shove out the message, since it might not fall under
// the same setup as we're expecting
sketchErr.print(s);
//System.err.println("[" + s.length() + "] " + s);
sketchErr.flush();
}
}
| that |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java | {
"start": 1444,
"end": 8193
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(TimelineEntityConverterV2.class);
static final String JOB = "MAPREDUCE_JOB";
static final String TASK = "MAPREDUCE_TASK";
static final String TASK_ATTEMPT = "MAPREDUCE_TASK_ATTEMPT";
/**
* Creates job, task, and task attempt entities based on the job history info
* and configuration.
*
* Note: currently these are plan timeline entities created for mapreduce
* types. These are not meant to be the complete and accurate entity set-up
* for mapreduce jobs. We do not leverage hierarchical timeline entities. If
* we create canonical mapreduce hierarchical timeline entities with proper
* parent-child relationship, we could modify this to use that instead.
*
* Note that we also do not add info to the YARN application entity, which
* would be needed for aggregation.
*/
public List<TimelineEntity> createTimelineEntities(JobInfo jobInfo,
Configuration conf) {
List<TimelineEntity> entities = new ArrayList<>();
// create the job entity
TimelineEntity job = createJobEntity(jobInfo, conf);
entities.add(job);
// create the task and task attempt entities
List<TimelineEntity> tasksAndAttempts =
createTaskAndTaskAttemptEntities(jobInfo);
entities.addAll(tasksAndAttempts);
return entities;
}
private TimelineEntity createJobEntity(JobInfo jobInfo, Configuration conf) {
TimelineEntity job = new TimelineEntity();
job.setType(JOB);
job.setId(jobInfo.getJobId().toString());
job.setCreatedTime(jobInfo.getSubmitTime());
job.addInfo("JOBNAME", jobInfo.getJobname());
job.addInfo("USERNAME", jobInfo.getUsername());
job.addInfo("JOB_QUEUE_NAME", jobInfo.getJobQueueName());
job.addInfo("SUBMIT_TIME", jobInfo.getSubmitTime());
job.addInfo("LAUNCH_TIME", jobInfo.getLaunchTime());
job.addInfo("FINISH_TIME", jobInfo.getFinishTime());
job.addInfo("JOB_STATUS", jobInfo.getJobStatus());
job.addInfo("PRIORITY", jobInfo.getPriority());
job.addInfo("TOTAL_MAPS", jobInfo.getTotalMaps());
job.addInfo("TOTAL_REDUCES", jobInfo.getTotalReduces());
job.addInfo("UBERIZED", jobInfo.getUberized());
job.addInfo("ERROR_INFO", jobInfo.getErrorInfo());
// add metrics from total counters
// we omit the map counters and reduce counters for now as it's kind of
// awkward to put them (map/reduce/total counters are really a group of
// related counters)
Counters totalCounters = jobInfo.getTotalCounters();
if (totalCounters != null) {
addMetrics(job, totalCounters);
}
// finally add configuration to the job
addConfiguration(job, conf);
LOG.info("converted job " + jobInfo.getJobId() + " to a timeline entity");
return job;
}
private void addConfiguration(TimelineEntity job, Configuration conf) {
for (Map.Entry<String, String> e: conf) {
job.addConfig(e.getKey(), e.getValue());
}
}
private void addMetrics(TimelineEntity entity, Counters counters) {
for (CounterGroup g: counters) {
String groupName = g.getName();
for (Counter c: g) {
String name = groupName + ":" + c.getName();
TimelineMetric metric = new TimelineMetric();
metric.setId(name);
metric.addValue(System.currentTimeMillis(), c.getValue());
entity.addMetric(metric);
}
}
}
private List<TimelineEntity> createTaskAndTaskAttemptEntities(
JobInfo jobInfo) {
List<TimelineEntity> entities = new ArrayList<>();
Map<TaskID, TaskInfo> taskInfoMap = jobInfo.getAllTasks();
LOG.info("job " + jobInfo.getJobId()+ " has " + taskInfoMap.size() +
" tasks");
for (TaskInfo taskInfo: taskInfoMap.values()) {
TimelineEntity task = createTaskEntity(taskInfo);
entities.add(task);
// add the task attempts from this task
Set<TimelineEntity> taskAttempts = createTaskAttemptEntities(taskInfo);
entities.addAll(taskAttempts);
}
return entities;
}
private TimelineEntity createTaskEntity(TaskInfo taskInfo) {
TimelineEntity task = new TimelineEntity();
task.setType(TASK);
task.setId(taskInfo.getTaskId().toString());
task.setCreatedTime(taskInfo.getStartTime());
task.addInfo("START_TIME", taskInfo.getStartTime());
task.addInfo("FINISH_TIME", taskInfo.getFinishTime());
task.addInfo("TASK_TYPE", taskInfo.getTaskType());
task.addInfo("TASK_STATUS", taskInfo.getTaskStatus());
task.addInfo("ERROR_INFO", taskInfo.getError());
// add metrics from counters
Counters counters = taskInfo.getCounters();
if (counters != null) {
addMetrics(task, counters);
}
LOG.info("converted task " + taskInfo.getTaskId() +
" to a timeline entity");
return task;
}
private Set<TimelineEntity> createTaskAttemptEntities(TaskInfo taskInfo) {
Set<TimelineEntity> taskAttempts = new HashSet<TimelineEntity>();
Map<TaskAttemptID, TaskAttemptInfo> taskAttemptInfoMap =
taskInfo.getAllTaskAttempts();
LOG.info("task " + taskInfo.getTaskId() + " has " +
taskAttemptInfoMap.size() + " task attempts");
for (TaskAttemptInfo taskAttemptInfo: taskAttemptInfoMap.values()) {
TimelineEntity taskAttempt = createTaskAttemptEntity(taskAttemptInfo);
taskAttempts.add(taskAttempt);
}
return taskAttempts;
}
private TimelineEntity createTaskAttemptEntity(
TaskAttemptInfo taskAttemptInfo) {
TimelineEntity taskAttempt = new TimelineEntity();
taskAttempt.setType(TASK_ATTEMPT);
taskAttempt.setId(taskAttemptInfo.getAttemptId().toString());
taskAttempt.setCreatedTime(taskAttemptInfo.getStartTime());
taskAttempt.addInfo("START_TIME", taskAttemptInfo.getStartTime());
taskAttempt.addInfo("FINISH_TIME", taskAttemptInfo.getFinishTime());
taskAttempt.addInfo("MAP_FINISH_TIME",
taskAttemptInfo.getMapFinishTime());
taskAttempt.addInfo("SHUFFLE_FINISH_TIME",
taskAttemptInfo.getShuffleFinishTime());
taskAttempt.addInfo("SORT_FINISH_TIME",
taskAttemptInfo.getSortFinishTime());
taskAttempt.addInfo("TASK_STATUS", taskAttemptInfo.getTaskStatus());
taskAttempt.addInfo("STATE", taskAttemptInfo.getState());
taskAttempt.addInfo("ERROR", taskAttemptInfo.getError());
taskAttempt.addInfo("CONTAINER_ID",
taskAttemptInfo.getContainerId().toString());
// add metrics from counters
Counters counters = taskAttemptInfo.getCounters();
if (counters != null) {
addMetrics(taskAttempt, counters);
}
LOG.info("converted task attempt " + taskAttemptInfo.getAttemptId() +
" to a timeline entity");
return taskAttempt;
}
}
| TimelineEntityConverterV2 |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLForStatement.java | {
"start": 340,
"end": 1430
} | class ____ extends SQLStatementImpl {
protected SQLName index;
protected boolean isReverse;
protected SQLExpr range;
protected List<SQLStatement> statements = new ArrayList<SQLStatement>();
public SQLForStatement() {
}
public SQLName getIndex() {
return index;
}
public void setIndex(SQLName index) {
this.index = index;
}
public void setReverse(boolean isReverse) {
this.isReverse = isReverse;
}
public boolean isReverse() {
return isReverse;
}
public SQLExpr getRange() {
return range;
}
public void setRange(SQLExpr range) {
if (range != null) {
range.setParent(this);
}
this.range = range;
}
public List<SQLStatement> getStatements() {
return statements;
}
@Override
protected void accept0(SQLASTVisitor v) {
if (v.visit(this)) {
acceptChild(v, index);
acceptChild(v, range);
acceptChild(v, statements);
}
v.endVisit(this);
}
}
| SQLForStatement |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingVisitor.java | {
"start": 598,
"end": 3491
} | class ____ {
private MappingVisitor() {}
public static void visitMapping(Map<String, ?> mapping, BiConsumer<String, Map<String, ?>> fieldMappingConsumer) {
visitMapping(mapping, "", fieldMappingConsumer);
}
private static void visitMapping(
final Map<String, ?> mapping,
final String path,
final BiConsumer<String, Map<String, ?>> fieldMappingConsumer
) {
Object properties = mapping.get("properties");
if (properties instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, ?> propertiesAsMap = (Map<String, ?>) properties;
for (Map.Entry<String, ?> entry : propertiesAsMap.entrySet()) {
final Object v = entry.getValue();
if (v instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, ?> fieldMapping = (Map<String, ?>) v;
final String prefix = path + entry.getKey();
fieldMappingConsumer.accept(prefix, fieldMapping);
visitMapping(fieldMapping, prefix + ".", fieldMappingConsumer);
// Multi fields
Object fieldsO = fieldMapping.get("fields");
if (fieldsO instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, ?> fields = (Map<String, ?>) fieldsO;
for (Map.Entry<String, ?> subfieldEntry : fields.entrySet()) {
Object v2 = subfieldEntry.getValue();
if (v2 instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, ?> fieldMapping2 = (Map<String, ?>) v2;
fieldMappingConsumer.accept(prefix + "." + subfieldEntry.getKey(), fieldMapping2);
}
}
}
}
}
}
}
public static void visitRuntimeMapping(Map<String, ?> mapping, BiConsumer<String, Map<String, ?>> runtimeFieldMappingConsumer) {
Object runtimeObject = mapping.get("runtime");
if (runtimeObject instanceof Map == false) {
return;
}
@SuppressWarnings("unchecked")
Map<String, ?> runtimeMappings = (Map<String, ?>) runtimeObject;
for (Map.Entry<String, ?> entry : runtimeMappings.entrySet()) {
final Object runtimeFieldMappingObject = entry.getValue();
if (runtimeFieldMappingObject instanceof Map == false) {
continue;
}
@SuppressWarnings("unchecked")
Map<String, ?> runtimeFieldMapping = (Map<String, ?>) runtimeFieldMappingObject;
runtimeFieldMappingConsumer.accept(entry.getKey(), runtimeFieldMapping);
}
}
}
| MappingVisitor |
java | hibernate__hibernate-orm | hibernate-spatial/src/main/java/org/hibernate/spatial/criteria/GeolatteSpatialCriteriaBuilder.java | {
"start": 214,
"end": 303
} | interface ____ extends SpatialCriteriaBuilder<Geometry<?>> {
}
| GeolatteSpatialCriteriaBuilder |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/ClassTemplateInvocationTests.java | {
"start": 67097,
"end": 67416
} | class ____ implements ExtensionContext.Store.CloseableResource {
static boolean closed;
@Override
public void close() {
closed = true;
}
}
@SuppressWarnings("JUnitMalformedDeclaration")
@ExtendWith(TwoInvocationsClassTemplateInvocationContextProvider.class)
@ClassTemplate
static | CustomCloseableResource |
java | apache__thrift | lib/javame/src/org/apache/thrift/protocol/TField.java | {
"start": 856,
"end": 910
} | class ____ encapsulates field metadata.
*
*/
public | that |
java | apache__maven | impl/maven-core/src/test/java/org/apache/maven/internal/impl/DefaultSessionTest.java | {
"start": 1427,
"end": 2680
} | class ____ {
@Test
void testRootDirectoryWithNull() {
RepositorySystemSession rss = new DefaultRepositorySystemSession(h -> false);
DefaultMavenExecutionRequest mer = new DefaultMavenExecutionRequest();
MavenSession ms = new MavenSession(null, rss, mer, null);
DefaultSession session =
new DefaultSession(ms, mock(RepositorySystem.class), Collections.emptyList(), null, null, null);
assertEquals(
RootLocator.UNABLE_TO_FIND_ROOT_PROJECT_MESSAGE,
assertThrows(IllegalStateException.class, session::getRootDirectory)
.getMessage());
}
@Test
void testRootDirectory() {
RepositorySystemSession rss = new DefaultRepositorySystemSession(h -> false);
DefaultMavenExecutionRequest mer = new DefaultMavenExecutionRequest();
MavenSession ms = new MavenSession(null, rss, mer, null);
ms.getRequest().setRootDirectory(Paths.get("myRootDirectory"));
DefaultSession session =
new DefaultSession(ms, mock(RepositorySystem.class), Collections.emptyList(), null, null, null);
assertEquals(Paths.get("myRootDirectory"), session.getRootDirectory());
}
}
| DefaultSessionTest |
java | google__dagger | javatests/dagger/internal/codegen/kotlin/KspComponentProcessorTest.java | {
"start": 12384,
"end": 12612
} | class ____ {",
" @Provides @Named(\"key\") fun provideFoo(@Named(\"key\") bar: Bar) = Foo(bar)",
" @Provides @Named(\"key\") fun provideBar() = Bar()",
"}",
"",
" | MyModule |
java | bumptech__glide | annotation/compiler/src/main/java/com/bumptech/glide/annotation/compiler/ProcessorUtil.java | {
"start": 7698,
"end": 20569
} | class ____ the method
* without any generic types like {@literal <T>}.
* @param methodSimpleName The name of the method.
* @param methodParameters A maybe empty list of all the parameters for the method in question.
*/
CodeBlock generateSeeMethodJavadoc(
TypeName nameOfClassContainingMethod,
String methodSimpleName,
List<? extends VariableElement> methodParameters) {
return generateSeeMethodJavadocInternal(
nameOfClassContainingMethod,
methodSimpleName,
Lists.transform(
methodParameters,
new Function<VariableElement, Object>() {
@Override
public Object apply(VariableElement input) {
return getJavadocSafeName(input);
}
}));
}
CodeBlock generateSeeMethodJavadoc(TypeName nameOfClassContainingMethod, MethodSpec methodSpec) {
return generateSeeMethodJavadocInternal(
nameOfClassContainingMethod,
methodSpec.name,
Lists.transform(
methodSpec.parameters,
new Function<ParameterSpec, Object>() {
@Override
public Object apply(ParameterSpec input) {
return input.type;
}
}));
}
private CodeBlock generateSeeMethodJavadocInternal(
TypeName nameOfClassContainingMethod, String methodName, List<Object> safeParameterNames) {
StringBuilder javadocString = new StringBuilder("@see $T#$L(");
List<Object> javadocArgs = new ArrayList<>();
javadocArgs.add(nameOfClassContainingMethod);
javadocArgs.add(methodName);
for (Object param : safeParameterNames) {
javadocString.append("$T, ");
javadocArgs.add(param);
}
if (javadocArgs.size() > 2) {
javadocString = new StringBuilder(javadocString.substring(0, javadocString.length() - 2));
}
javadocString.append(")\n");
return CodeBlock.of(javadocString.toString(), javadocArgs.toArray(new Object[0]));
}
/**
* Returns a safe String to use in a Javadoc that will function in a link.
*
* <p>This method exists because by Javadoc doesn't handle type parameters({@literal <T>} in
* {@literal RequestOptions<T>} for example).
*/
private TypeName getJavadocSafeName(Element element) {
Types typeUtils = processingEnv.getTypeUtils();
TypeMirror type = element.asType();
if (typeUtils.asElement(type) == null) {
// If there is no Element, it's a primitive and can't have additional types, so we're done.
return ClassName.get(element.asType());
}
Name simpleName = typeUtils.asElement(type).getSimpleName();
return ClassName.bestGuess(simpleName.toString());
}
void debugLog(String toLog) {
if (DEBUG) {
infoLog(toLog);
}
}
void infoLog(String toLog) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE, "[" + round + "] " + toLog);
}
static CodeBlock generateCastingSuperCall(TypeName toReturn, MethodSpec method) {
return CodeBlock.builder()
.add("return ($T) super.$N(", toReturn, method.name)
.add(
FluentIterable.from(method.parameters)
.transform(
new Function<ParameterSpec, String>() {
@Override
public String apply(ParameterSpec input) {
return input.name;
}
})
.join(Joiner.on(",")))
.add(");\n")
.build();
}
MethodSpec.Builder overriding(ExecutableElement method) {
String methodName = method.getSimpleName().toString();
MethodSpec.Builder builder = MethodSpec.methodBuilder(methodName).addAnnotation(Override.class);
Set<Modifier> modifiers = method.getModifiers();
modifiers = new LinkedHashSet<>(modifiers);
modifiers.remove(Modifier.ABSTRACT);
Modifier defaultModifier = null;
// Modifier.DEFAULT doesn't exist until Java 8.
try {
defaultModifier = Modifier.valueOf("DEFAULT");
} catch (IllegalArgumentException e) {
// Ignored.
}
modifiers.remove(defaultModifier);
builder = builder.addModifiers(modifiers);
for (TypeParameterElement typeParameterElement : method.getTypeParameters()) {
TypeVariable var = (TypeVariable) typeParameterElement.asType();
builder = builder.addTypeVariable(TypeVariableName.get(var));
}
builder =
builder
.returns(TypeName.get(method.getReturnType()))
.addParameters(getParameters(method))
.varargs(method.isVarArgs());
for (TypeMirror thrownType : method.getThrownTypes()) {
builder = builder.addException(TypeName.get(thrownType));
}
return builder;
}
List<ParameterSpec> getParameters(ExecutableElement method) {
return getParameters(method.getParameters());
}
List<ParameterSpec> getParameters(List<? extends VariableElement> parameters) {
List<ParameterSpec> result = new ArrayList<>();
for (VariableElement parameter : parameters) {
result.add(getParameter(parameter));
}
return dedupedParameters(result);
}
private static List<ParameterSpec> dedupedParameters(List<ParameterSpec> parameters) {
boolean hasDupes = false;
Set<String> names = new HashSet<>();
for (ParameterSpec parameter : parameters) {
String name = parameter.name;
if (names.contains(name)) {
hasDupes = true;
} else {
names.add(name);
}
}
if (hasDupes) {
List<ParameterSpec> copy = parameters;
parameters = new ArrayList<>();
for (int i = 0; i < copy.size(); i++) {
ParameterSpec parameter = copy.get(i);
parameters.add(
ParameterSpec.builder(parameter.type, parameter.name + i)
.addModifiers(parameter.modifiers)
.addAnnotations(parameter.annotations)
.build());
}
}
return parameters;
}
private ParameterSpec getParameter(VariableElement parameter) {
TypeName type = TypeName.get(parameter.asType());
return ParameterSpec.builder(type, computeParameterName(parameter, type))
.addModifiers(parameter.getModifiers())
.addAnnotations(getAnnotations(parameter))
.build();
}
private static String computeParameterName(VariableElement parameter, TypeName type) {
String rawClassName = type.withoutAnnotations().toString();
String name;
if (type.isPrimitive() || type.isBoxedPrimitive()) {
name = getSmartPrimitiveParameterName(parameter);
} else {
if (rawClassName.contains("<") && rawClassName.contains(">")) {
String[] preGenericSplit = rawClassName.split("<");
String preGeneric = preGenericSplit[0];
String[] postGenericSplit = rawClassName.split(">");
String postGeneric = postGenericSplit[postGenericSplit.length - 1];
if (postGenericSplit.length > 1) {
rawClassName = preGeneric + postGeneric;
} else {
rawClassName = preGeneric;
}
}
String[] qualifiers = rawClassName.split("\\.");
rawClassName = qualifiers[qualifiers.length - 1];
rawClassName = applySmartParameterNameReplacements(rawClassName);
boolean allCaps = true;
for (char c : rawClassName.toCharArray()) {
if (Character.isLowerCase(c)) {
allCaps = false;
break;
}
}
if (allCaps) {
name = rawClassName.toLowerCase(Locale.ROOT);
} else {
int indexOfLastWordStart = 0;
char[] chars = rawClassName.toCharArray();
for (int i = 0, charArrayLength = chars.length; i < charArrayLength; i++) {
char c = chars[i];
if (Character.isUpperCase(c)) {
indexOfLastWordStart = i;
}
}
rawClassName = rawClassName.substring(indexOfLastWordStart, rawClassName.length());
name =
Character.toLowerCase(rawClassName.charAt(0))
+ rawClassName.substring(1, rawClassName.length());
}
}
return name;
}
private static String getSmartPrimitiveParameterName(VariableElement parameter) {
for (AnnotationMirror annotation : parameter.getAnnotationMirrors()) {
String annotationName = annotation.getAnnotationType().toString().toUpperCase(Locale.ROOT);
if (annotationName.endsWith("RES")) {
// Catch annotations like StringRes
return "id";
} else if (annotationName.endsWith("RANGE")) {
// Catch annotations like IntRange
return "value";
}
}
return parameter.getSimpleName().toString();
}
private static String applySmartParameterNameReplacements(String name) {
name = name.replace("[]", "s");
name = name.replace(Class.class.getSimpleName(), "clazz");
name = name.replace(Object.class.getSimpleName(), "o");
return name;
}
private List<AnnotationSpec> getAnnotations(VariableElement element) {
List<AnnotationSpec> result = new ArrayList<>();
for (AnnotationMirror mirror : element.getAnnotationMirrors()) {
result.add(maybeConvertSupportLibraryAnnotation(mirror));
}
return result;
}
private AnnotationSpec maybeConvertSupportLibraryAnnotation(AnnotationMirror mirror) {
String annotationName = mirror.getAnnotationType().asElement().toString();
boolean preferAndroidX = visibleForTesting().equals(ANDROIDX_VISIBLE_FOR_TESTING);
ImmutableBiMap<ClassName, ClassName> map =
ImmutableBiMap.<ClassName, ClassName>builder()
.put(SUPPORT_NONNULL_ANNOTATION, ANDROIDX_NONNULL_ANNOTATION)
.put(SUPPORT_CHECK_RESULT_ANNOTATION, ANDROIDX_CHECK_RESULT_ANNOTATION)
.put(SUPPORT_VISIBLE_FOR_TESTING, ANDROIDX_VISIBLE_FOR_TESTING)
.build();
ClassName remapped = null;
if (preferAndroidX && annotationName.startsWith("android.support.annotation")) {
remapped = ClassName.get((TypeElement) mirror.getAnnotationType().asElement());
} else if (!preferAndroidX && annotationName.startsWith("androidx.annotation")) {
remapped = ClassName.get((TypeElement) mirror.getAnnotationType().asElement());
}
if (remapped != null && map.containsKey(remapped)) {
return AnnotationSpec.builder(map.get(remapped)).build();
} else {
return AnnotationSpec.get(mirror);
}
}
ClassName visibleForTesting() {
return findAnnotationClassName(ANDROIDX_VISIBLE_FOR_TESTING, SUPPORT_VISIBLE_FOR_TESTING);
}
ClassName nonNull() {
return findAnnotationClassName(ANDROIDX_NONNULL_ANNOTATION, SUPPORT_NONNULL_ANNOTATION);
}
ClassName checkResult() {
return findAnnotationClassName(
ANDROIDX_CHECK_RESULT_ANNOTATION, SUPPORT_CHECK_RESULT_ANNOTATION);
}
static List<ClassName> nonNulls() {
return ImmutableList.of(
SUPPORT_NONNULL_ANNOTATION, JETBRAINS_NOTNULL_ANNOTATION, ANDROIDX_NONNULL_ANNOTATION);
}
private ClassName findAnnotationClassName(ClassName androidxName, ClassName supportName) {
Elements elements = processingEnv.getElementUtils();
TypeElement visibleForTestingTypeElement =
elements.getTypeElement(androidxName.reflectionName());
if (visibleForTestingTypeElement != null) {
return androidxName;
}
return supportName;
}
List<ExecutableElement> findInstanceMethodsReturning(TypeElement clazz, TypeMirror returnType) {
return FluentIterable.from(clazz.getEnclosedElements())
.filter(new FilterPublicMethods(returnType, MethodType.INSTANCE))
.transform(new ToMethod())
.toList();
}
List<ExecutableElement> findInstanceMethodsReturning(TypeElement clazz, TypeElement returnType) {
return FluentIterable.from(clazz.getEnclosedElements())
.filter(new FilterPublicMethods(returnType, MethodType.INSTANCE))
.transform(new ToMethod())
.toList();
}
List<ExecutableElement> findStaticMethodsReturning(TypeElement clazz, TypeElement returnType) {
return FluentIterable.from(clazz.getEnclosedElements())
.filter(new FilterPublicMethods(returnType, MethodType.STATIC))
.transform(new ToMethod())
.toList();
}
List<ExecutableElement> findStaticMethods(TypeElement clazz) {
return FluentIterable.from(clazz.getEnclosedElements())
.filter(new FilterPublicMethods((TypeMirror) null /*returnType*/, MethodType.STATIC))
.transform(new ToMethod())
.toList();
}
ImmutableSet<String> findClassValuesFromAnnotationOnClassAsNames(
Element clazz, Class<? extends Annotation> annotationClass) {
String annotationClassName = annotationClass.getName();
AnnotationValue excludedModuleAnnotationValue = null;
for (AnnotationMirror annotationMirror : clazz.getAnnotationMirrors()) {
// Two different AnnotationMirrors the same | containing |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/timer/TimerReferenceConfigurationTest.java | {
"start": 1172,
"end": 5697
} | class ____ extends ContextTestSupport {
/**
* reference params
*/
final String refExpectedTimeString = "1972-12-11 19:55:00";
final String refExpectedPattern = "yyyy-MM-dd HH:mm:ss";
final long refExpectedPeriod = 500;
final long refExpectedDelay = 100;
final boolean refExpectedFixedRate = true;
final boolean refExpectedDaemon = false;
final long refExpectedRepeatCount = 11;
/**
* value params
*/
final String valExpectedTimeString = "1970-04-17T18:07:41";
final String valExpectedPattern = "yyyy-MM-dd'T'HH:mm:ss";
final long valExpectedPeriod = 350;
final long valExpectedDelay = 123;
final boolean valExpectedFixedRate = false;
final boolean valExpectedDaemon = true;
final long valExpectedRepeatCount = 13;
final String refTimerUri = "timer://passByRefTimer?" + "time=#refExpectedTimeString" + "&pattern=#refExpectedPattern"
+ "&period=#refExpectedPeriod"
+ "&delay=#refExpectedDelay" + "&fixedRate=#refExpectedFixedRate" + "&daemon=#refExpectedDaemon"
+ "&repeatCount=#refExpectedRepeatCount";
final String valueTimerUri = "timer://passByValueTimer?" + "time=" + valExpectedTimeString + "&pattern="
+ valExpectedPattern + "&period=" + valExpectedPeriod + "&delay="
+ valExpectedDelay + "&fixedRate=" + valExpectedFixedRate + "&daemon=" + valExpectedDaemon
+ "&repeatCount=" + valExpectedRepeatCount;
final String mockEndpointUri = "mock:result";
@Override
protected Registry createCamelRegistry() throws Exception {
Registry reg = super.createCamelRegistry();
reg.bind("refExpectedTimeString", refExpectedTimeString);
reg.bind("refExpectedPattern", refExpectedPattern);
reg.bind("refExpectedPeriod", refExpectedPeriod);
reg.bind("refExpectedDelay", refExpectedDelay);
reg.bind("refExpectedFixedRate", refExpectedFixedRate);
reg.bind("refExpectedDaemon", refExpectedDaemon);
reg.bind("refExpectedRepeatCount", refExpectedRepeatCount);
return reg;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(refTimerUri).to(mockEndpointUri);
from(valueTimerUri).to(mockEndpointUri);
}
};
}
/**
* Test that the reference configuration params are correct
*/
@Test
public void testReferenceConfiguration() throws Exception {
Endpoint e = context.getEndpoint(refTimerUri);
TimerEndpoint timer = (TimerEndpoint) e;
final Date expectedTimeObject = new SimpleDateFormat(refExpectedPattern).parse(refExpectedTimeString);
final Date time = timer.getTime();
final long period = timer.getPeriod();
final long delay = timer.getDelay();
final boolean fixedRate = timer.isFixedRate();
final boolean daemon = timer.isDaemon();
final long repeatCount = timer.getRepeatCount();
assertEquals(refExpectedDelay, delay);
assertEquals(refExpectedPeriod, period);
assertEquals(expectedTimeObject, time);
assertEquals(refExpectedFixedRate, fixedRate);
assertEquals(refExpectedDaemon, daemon);
assertEquals(refExpectedRepeatCount, repeatCount);
}
/**
* Test that the 'value' configuration params are correct
*/
@Test
public void testValueConfiguration() throws Exception {
Endpoint e = context.getEndpoint(valueTimerUri);
TimerEndpoint timer = (TimerEndpoint) e;
final Date expectedTimeObject = new SimpleDateFormat(valExpectedPattern).parse(valExpectedTimeString);
final Date time = timer.getTime();
final long period = timer.getPeriod();
final long delay = timer.getDelay();
final boolean fixedRate = timer.isFixedRate();
final boolean daemon = timer.isDaemon();
final long repeatCount = timer.getRepeatCount();
assertEquals(valExpectedDelay, delay);
assertEquals(valExpectedPeriod, period);
assertEquals(expectedTimeObject, time);
assertEquals(valExpectedFixedRate, fixedRate);
assertEquals(valExpectedDaemon, daemon);
assertEquals(valExpectedRepeatCount, repeatCount);
}
}
| TimerReferenceConfigurationTest |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/NettyHttpEndpointBuilderFactory.java | {
"start": 28488,
"end": 29914
} | class ____ could be used to return an SSL Handler.
*
* The option will be converted to a
* <code>io.netty.handler.ssl.SslHandler</code> type.
*
* Group: security
*
* @param sslHandler the value to set
* @return the dsl builder
*/
default NettyHttpEndpointConsumerBuilder sslHandler(String sslHandler) {
doSetProperty("sslHandler", sslHandler);
return this;
}
/**
* Server side certificate keystore to be used for encryption. Is loaded
* by default from classpath, but you can prefix with classpath:, file:,
* or http: to load the resource from different systems.
*
* This option can also be loaded from an existing file, by prefixing
* with file: or classpath: followed by the location of the file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param trustStoreResource the value to set
* @return the dsl builder
*/
default NettyHttpEndpointConsumerBuilder trustStoreResource(String trustStoreResource) {
doSetProperty("trustStoreResource", trustStoreResource);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the Netty HTTP component.
*/
public | that |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/datasource/init/UncategorizedScriptException.java | {
"start": 981,
"end": 1457
} | class ____ extends ScriptException {
/**
* Create a new {@code UncategorizedScriptException}.
* @param message detailed message
*/
public UncategorizedScriptException(String message) {
super(message);
}
/**
* Create a new {@code UncategorizedScriptException}.
* @param message detailed message
* @param cause the root cause
*/
public UncategorizedScriptException(String message, Throwable cause) {
super(message, cause);
}
}
| UncategorizedScriptException |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/junit/jupiter/SpringExtension.java | {
"start": 7691,
"end": 9617
} | class ____
* {@code @SpringExtensionConfig(useTestClassScopedExtensionContext = true)}.
* See the {@linkplain SpringExtension class-level Javadoc} for further details.
* @since 7.0
* @see SpringExtensionConfig#useTestClassScopedExtensionContext()
*/
@Override
public ExtensionContextScope getTestInstantiationExtensionContextScope(ExtensionContext rootContext) {
return ExtensionContextScope.TEST_METHOD;
}
/**
* Delegates to {@link TestContextManager#beforeTestClass}.
*/
@Override
public void beforeAll(ExtensionContext context) throws Exception {
TestContextManager testContextManager = getTestContextManager(context);
registerMethodInvoker(testContextManager, context);
testContextManager.beforeTestClass();
}
/**
* Delegates to {@link TestContextManager#afterTestClass}.
*/
@Override
public void afterAll(ExtensionContext context) throws Exception {
try {
TestContextManager testContextManager = getTestContextManager(context);
registerMethodInvoker(testContextManager, context);
testContextManager.afterTestClass();
}
finally {
getStore(context).remove(context.getRequiredTestClass());
}
}
/**
* Delegates to {@link TestContextManager#prepareTestInstance}.
* <p>This method also validates that test methods and test lifecycle methods
* are not annotated with {@link Autowired @Autowired}.
*/
@Override
public void postProcessTestInstance(Object testInstance, ExtensionContext context) throws Exception {
context = findProperlyScopedExtensionContext(testInstance.getClass(), context);
validateAutowiredConfig(context);
validateRecordApplicationEventsConfig(context);
TestContextManager testContextManager = getTestContextManager(context);
registerMethodInvoker(testContextManager, context);
testContextManager.prepareTestInstance(testInstance);
}
/**
* Validate that test methods and test lifecycle methods in the supplied
* test | with |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/expression/ExpressionUsageTests.java | {
"start": 1191,
"end": 1957
} | class ____ {
@Autowired
@Qualifier("derived")
private Properties props;
@Autowired
@Qualifier("andy2")
private Foo andy2;
@Autowired
@Qualifier("andy")
private Foo andy;
@Test
void testSpr5906() {
// verify the property values have been evaluated as expressions
assertThat(props.getProperty("user.name")).isEqualTo("Dave");
assertThat(props.getProperty("username")).isEqualTo("Andy");
// verify the property keys have been evaluated as expressions
assertThat(props.getProperty("Dave")).isEqualTo("exists");
assertThat(props.getProperty("Andy")).isEqualTo("exists also");
}
@Test
void testSpr5847() {
assertThat(andy2.getName()).isEqualTo("Andy");
assertThat(andy.getName()).isEqualTo("Andy");
}
public static | ExpressionUsageTests |
java | apache__camel | test-infra/camel-test-infra-ollama/src/main/java/org/apache/camel/test/infra/ollama/services/OllamaLocalContainerInfraService.java | {
"start": 3087,
"end": 6870
} | class ____ extends OllamaContainer {
public TestInfraOllamaContainer(boolean fixedPort) {
super(DockerImageName.parse(CONTAINER_NAME)
.asCompatibleSubstituteFor("ollama/ollama"));
// Add file system bind for Ollama data persistence
String homeDir = System.getenv("HOME");
if (homeDir != null) {
try {
Path ollamaDir = Paths.get(homeDir, ".camel-test", "ollama");
Files.createDirectories(ollamaDir);
withFileSystemBind(ollamaDir.toString(), "/root/.ollama", BindMode.READ_WRITE);
LOG.info("Binding host directory {} to container path /root/.ollama", ollamaDir);
} catch (IOException e) {
LOG.warn("Failed to create Ollama data directory, continuing without bind mount", e);
}
}
// Conditionally enable GPU support based on configuration
String enableGpu = LocalPropertyResolver.getProperty(
OllamaLocalContainerInfraService.class, OllamaProperties.ENABLE_GPU);
if ("enabled".equalsIgnoreCase(enableGpu)) {
LOG.info("Enabling GPU support for Ollama container");
withCreateContainerCmdModifier(cmd -> cmd.getHostConfig()
.withDeviceRequests(
Arrays.asList(
new DeviceRequest()
.withCount(-1) // -1 means all GPUs
.withCapabilities(Arrays.asList(Arrays.asList("gpu"))))));
} else {
LOG.info("GPU support disabled");
}
if (fixedPort) {
addFixedExposedPort(11434, 11434);
}
String name = ContainerEnvironmentUtil.containerName(OllamaLocalContainerInfraService.this.getClass());
if (name != null) {
withCreateContainerCmdModifier(cmd -> cmd.withName(name));
}
}
}
return new TestInfraOllamaContainer(ContainerEnvironmentUtil.isFixedPort(this.getClass()));
}
@Override
public String getEndpoint() {
return container.getEndpoint();
}
@Override
public String getModel() {
return modelName();
}
@Override
public String modelName() {
return configuration.modelName();
}
@Override
public String baseUrl() {
return container.getEndpoint();
}
@Override
public String baseUrlV1() {
return container.getEndpoint() + "/v1";
}
@Override
public String apiKey() {
return configuration.apiKey();
}
@Override
public void registerProperties() {
System.setProperty(OllamaProperties.ENDPOINT, getEndpoint());
}
@Override
public void initialize() {
LOG.info("Trying to start the Ollama container");
container.start();
LOG.info("Pulling the model {}", getModel());
try {
container.execInContainer("ollama", "pull", getModel());
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
registerProperties();
LOG.info("Ollama instance running at {}", getEndpoint());
}
@Override
public void shutdown() {
LOG.info("Stopping the Ollama container");
container.stop();
}
@Override
public OllamaContainer getContainer() {
return container;
}
}
| TestInfraOllamaContainer |
java | apache__camel | components/camel-twilio/src/generated/java/org/apache/camel/component/twilio/internal/IncomingPhoneNumberLocalApiMethod.java | {
"start": 686,
"end": 2176
} | enum ____ implements ApiMethod {
CREATOR(
com.twilio.rest.api.v2010.account.incomingphonenumber.LocalCreator.class,
"creator",
arg("phoneNumber", com.twilio.type.PhoneNumber.class)),
CREATOR_1(
com.twilio.rest.api.v2010.account.incomingphonenumber.LocalCreator.class,
"creator",
arg("pathAccountSid", String.class),
arg("phoneNumber", com.twilio.type.PhoneNumber.class)),
READER(
com.twilio.rest.api.v2010.account.incomingphonenumber.LocalReader.class,
"reader"),
READER_1(
com.twilio.rest.api.v2010.account.incomingphonenumber.LocalReader.class,
"reader",
arg("pathAccountSid", String.class));
private final ApiMethod apiMethod;
IncomingPhoneNumberLocalApiMethod(Class<?> resultType, String name, ApiMethodArg... args) {
this.apiMethod = new ApiMethodImpl(Local.class, resultType, name, args);
}
@Override
public String getName() { return apiMethod.getName(); }
@Override
public Class<?> getResultType() { return apiMethod.getResultType(); }
@Override
public List<String> getArgNames() { return apiMethod.getArgNames(); }
@Override
public List<String> getSetterArgNames() { return apiMethod.getSetterArgNames(); }
@Override
public List<Class<?>> getArgTypes() { return apiMethod.getArgTypes(); }
@Override
public Method getMethod() { return apiMethod.getMethod(); }
}
| IncomingPhoneNumberLocalApiMethod |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/schemaStat/SchemaStatTest21_issue3980.java | {
"start": 553,
"end": 2670
} | class ____ extends TestCase {
public void test_schemaStat() throws Exception {
SchemaRepository repository = new SchemaRepository(JdbcConstants.MYSQL);
String sql = "select n.user_name,\n" +
"n.user_passwd,\n" +
"n.user_project,\n" +
"n.start_date,\n" +
"n.end_date\n" +
"from (\n" +
"select t.name as user_name,\n" +
"t.passwd as user_passwd,\n" +
"cast(from_unixtime(t.from_time, \"yyyyMMdd\") as int) as start_date,\n" +
"cast(from_unixtime(t.to_time, \"yyyyMMdd\") as int) as end_date\n" +
"from tableA as t\n" +
"where t.user_id = 1\n" +
"union all\n" +
"select p.project as user_project\n" +
"from tableB as p\n" +
"where p.project_id = 10\n" +
") as n;";
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, JdbcConstants.MYSQL);
SQLStatement stmt = parser.parseStatementList().get(0);
SchemaStatVisitor statVisitor = SQLUtils.createSchemaStatVisitor(repository);
stmt.accept(statVisitor);
System.out.println("Tables : " + statVisitor.getTables());
System.out.println("columns : " + statVisitor.getColumns());
System.out.println(statVisitor.getGroupByColumns()); // group by
System.out.println("relationships : " + statVisitor.getRelationships()); // group by
System.out.println("conditions : " + statVisitor.getConditions());
System.out.println("functions : " + statVisitor.getFunctions());
assertEquals(7, statVisitor.getColumns().size());
assertEquals(2, statVisitor.getConditions().size());
assertEquals(2, statVisitor.getFunctions().size());
SQLPropertyExpr expr = (SQLPropertyExpr) statVisitor.getFunctions().get(0).getArguments().get(0);
SQLIdentifierExpr tableAlias = (SQLIdentifierExpr) expr.getOwner();
tableAlias.getResolvedTableSource();
}
}
| SchemaStatTest21_issue3980 |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java | {
"start": 42288,
"end": 77480
} | class ____ extends org.apache.hadoop.fs.HardLink {
// This is a stub to assist with coordinated change between
// COMMON and HDFS projects. It will be removed after the
// corresponding change is committed to HDFS.
}
/**
* Create a soft link between a src and destination
* only on a local disk. HDFS does not support this.
* On Windows, when symlink creation fails due to security
* setting, we will log a warning. The return code in this
* case is 2.
*
* @param target the target for symlink
* @param linkname the symlink
* @return 0 on success
* @throws IOException raised on errors performing I/O.
*/
public static int symLink(String target, String linkname) throws IOException{
if (target == null || linkname == null) {
LOG.warn("Can not create a symLink with a target = " + target
+ " and link =" + linkname);
return 1;
}
// Run the input paths through Java's File so that they are converted to the
// native OS form
File targetFile = new File(
Path.getPathWithoutSchemeAndAuthority(new Path(target)).toString());
File linkFile = new File(
Path.getPathWithoutSchemeAndAuthority(new Path(linkname)).toString());
String[] cmd = Shell.getSymlinkCommand(
targetFile.toString(),
linkFile.toString());
ShellCommandExecutor shExec;
try {
if (Shell.WINDOWS &&
linkFile.getParentFile() != null &&
!new Path(target).isAbsolute()) {
// Relative links on Windows must be resolvable at the time of
// creation. To ensure this we run the shell command in the directory
// of the link.
//
shExec = new ShellCommandExecutor(cmd, linkFile.getParentFile());
} else {
shExec = new ShellCommandExecutor(cmd);
}
shExec.execute();
} catch (Shell.ExitCodeException ec) {
int returnVal = ec.getExitCode();
if (Shell.WINDOWS && returnVal == SYMLINK_NO_PRIVILEGE) {
LOG.warn("Fail to create symbolic links on Windows. "
+ "The default security settings in Windows disallow non-elevated "
+ "administrators and all non-administrators from creating symbolic links. "
+ "This behavior can be changed in the Local Security Policy management console");
} else if (returnVal != 0) {
LOG.warn("Command '" + StringUtils.join(" ", cmd) + "' failed "
+ returnVal + " with: " + ec.getMessage());
}
return returnVal;
} catch (IOException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Error while create symlink " + linkname + " to " + target
+ "." + " Exception: " + StringUtils.stringifyException(e));
}
throw e;
}
return shExec.getExitCode();
}
/**
* Change the permissions on a filename.
* @param filename the name of the file to change
* @param perm the permission string
* @return the exit code from the command
* @throws IOException raised on errors performing I/O.
* @throws InterruptedException command interrupted.
*/
public static int chmod(String filename, String perm
) throws IOException, InterruptedException {
return chmod(filename, perm, false);
}
/**
* Change the permissions on a file / directory, recursively, if
* needed.
* @param filename name of the file whose permissions are to change
* @param perm permission string
* @param recursive true, if permissions should be changed recursively
* @return the exit code from the command.
* @throws IOException raised on errors performing I/O.
*/
public static int chmod(String filename, String perm, boolean recursive)
throws IOException {
String [] cmd = Shell.getSetPermissionCommand(perm, recursive);
String[] args = new String[cmd.length + 1];
System.arraycopy(cmd, 0, args, 0, cmd.length);
args[cmd.length] = new File(filename).getPath();
ShellCommandExecutor shExec = new ShellCommandExecutor(args);
try {
shExec.execute();
}catch(IOException e) {
if(LOG.isDebugEnabled()) {
LOG.debug("Error while changing permission : " + filename
+" Exception: " + StringUtils.stringifyException(e));
}
}
return shExec.getExitCode();
}
/**
* Set the ownership on a file / directory. User name and group name
* cannot both be null.
* @param file the file to change
* @param username the new user owner name
* @param groupname the new group owner name
* @throws IOException raised on errors performing I/O.
*/
public static void setOwner(File file, String username,
String groupname) throws IOException {
if (username == null && groupname == null) {
throw new IOException("username == null && groupname == null");
}
String arg = (username == null ? "" : username)
+ (groupname == null ? "" : ":" + groupname);
String [] cmd = Shell.getSetOwnerCommand(arg);
execCommand(file, cmd);
}
/**
* Platform independent implementation for {@link File#setReadable(boolean)}
* File#setReadable does not work as expected on Windows.
* @param f input file
* @param readable readable.
* @return true on success, false otherwise
*/
public static boolean setReadable(File f, boolean readable) {
if (Shell.WINDOWS) {
try {
String permission = readable ? "u+r" : "u-r";
FileUtil.chmod(f.getCanonicalPath(), permission, false);
return true;
} catch (IOException ex) {
return false;
}
} else {
return f.setReadable(readable);
}
}
/**
* Platform independent implementation for {@link File#setWritable(boolean)}
* File#setWritable does not work as expected on Windows.
* @param f input file
* @param writable writable.
* @return true on success, false otherwise
*/
public static boolean setWritable(File f, boolean writable) {
if (Shell.WINDOWS) {
try {
String permission = writable ? "u+w" : "u-w";
FileUtil.chmod(f.getCanonicalPath(), permission, false);
return true;
} catch (IOException ex) {
return false;
}
} else {
return f.setWritable(writable);
}
}
/**
* Platform independent implementation for {@link File#setExecutable(boolean)}
* File#setExecutable does not work as expected on Windows.
* Note: revoking execute permission on folders does not have the same
* behavior on Windows as on Unix platforms. Creating, deleting or renaming
* a file within that folder will still succeed on Windows.
* @param f input file
* @param executable executable.
* @return true on success, false otherwise
*/
public static boolean setExecutable(File f, boolean executable) {
if (Shell.WINDOWS) {
try {
String permission = executable ? "u+x" : "u-x";
FileUtil.chmod(f.getCanonicalPath(), permission, false);
return true;
} catch (IOException ex) {
return false;
}
} else {
return f.setExecutable(executable);
}
}
/**
* Platform independent implementation for {@link File#canRead()}
* @param f input file
* @return On Unix, same as {@link File#canRead()}
* On Windows, true if process has read access on the path
*/
public static boolean canRead(File f) {
if (Shell.WINDOWS) {
try {
return NativeIO.Windows.access(f.getCanonicalPath(),
NativeIO.Windows.AccessRight.ACCESS_READ);
} catch (IOException e) {
return false;
}
} else {
return f.canRead();
}
}
/**
* Platform independent implementation for {@link File#canWrite()}
* @param f input file
* @return On Unix, same as {@link File#canWrite()}
* On Windows, true if process has write access on the path
*/
public static boolean canWrite(File f) {
if (Shell.WINDOWS) {
try {
return NativeIO.Windows.access(f.getCanonicalPath(),
NativeIO.Windows.AccessRight.ACCESS_WRITE);
} catch (IOException e) {
return false;
}
} else {
return f.canWrite();
}
}
/**
* Platform independent implementation for {@link File#canExecute()}
* @param f input file
* @return On Unix, same as {@link File#canExecute()}
* On Windows, true if process has execute access on the path
*/
public static boolean canExecute(File f) {
if (Shell.WINDOWS) {
try {
return NativeIO.Windows.access(f.getCanonicalPath(),
NativeIO.Windows.AccessRight.ACCESS_EXECUTE);
} catch (IOException e) {
return false;
}
} else {
return f.canExecute();
}
}
/**
* Set permissions to the required value. Uses the java primitives instead
* of forking if group == other.
* @param f the file to change
* @param permission the new permissions
* @throws IOException raised on errors performing I/O.
*/
public static void setPermission(File f, FsPermission permission
) throws IOException {
FsAction user = permission.getUserAction();
FsAction group = permission.getGroupAction();
FsAction other = permission.getOtherAction();
// use the native/fork if the group/other permissions are different
// or if the native is available or on Windows
if (group != other || NativeIO.isAvailable() || Shell.WINDOWS) {
execSetPermission(f, permission);
return;
}
boolean rv = true;
// read perms
rv = f.setReadable(group.implies(FsAction.READ), false);
checkReturnValue(rv, f, permission);
if (group.implies(FsAction.READ) != user.implies(FsAction.READ)) {
rv = f.setReadable(user.implies(FsAction.READ), true);
checkReturnValue(rv, f, permission);
}
// write perms
rv = f.setWritable(group.implies(FsAction.WRITE), false);
checkReturnValue(rv, f, permission);
if (group.implies(FsAction.WRITE) != user.implies(FsAction.WRITE)) {
rv = f.setWritable(user.implies(FsAction.WRITE), true);
checkReturnValue(rv, f, permission);
}
// exec perms
rv = f.setExecutable(group.implies(FsAction.EXECUTE), false);
checkReturnValue(rv, f, permission);
if (group.implies(FsAction.EXECUTE) != user.implies(FsAction.EXECUTE)) {
rv = f.setExecutable(user.implies(FsAction.EXECUTE), true);
checkReturnValue(rv, f, permission);
}
}
private static void checkReturnValue(boolean rv, File p,
FsPermission permission
) throws IOException {
if (!rv) {
throw new IOException("Failed to set permissions of path: " + p +
" to " +
String.format("%04o", permission.toShort()));
}
}
private static void execSetPermission(File f,
FsPermission permission
) throws IOException {
if (NativeIO.isAvailable()) {
NativeIO.POSIX.chmod(f.getCanonicalPath(), permission.toShort());
} else {
execCommand(f, Shell.getSetPermissionCommand(
String.format("%04o", permission.toShort()), false));
}
}
static String execCommand(File f, String... cmd) throws IOException {
String[] args = new String[cmd.length + 1];
System.arraycopy(cmd, 0, args, 0, cmd.length);
args[cmd.length] = f.getCanonicalPath();
String output = Shell.execCommand(args);
return output;
}
/**
* Create a tmp file for a base file.
* @param basefile the base file of the tmp
* @param prefix file name prefix of tmp
* @param isDeleteOnExit if true, the tmp will be deleted when the VM exits
* @return a newly created tmp file
* @exception IOException If a tmp file cannot created
* @see java.io.File#createTempFile(String, String, File)
* @see java.io.File#deleteOnExit()
*/
public static final File createLocalTempFile(final File basefile,
final String prefix,
final boolean isDeleteOnExit)
throws IOException {
File tmp = File.createTempFile(prefix + basefile.getName(),
"", basefile.getParentFile());
if (isDeleteOnExit) {
tmp.deleteOnExit();
}
return tmp;
}
/**
* Move the src file to the name specified by target.
* @param src the source file
* @param target the target file
* @exception IOException If this operation fails
*/
public static void replaceFile(File src, File target) throws IOException {
/* renameTo() has two limitations on Windows platform.
* src.renameTo(target) fails if
* 1) If target already exists OR
* 2) If target is already open for reading/writing.
*/
if (!src.renameTo(target)) {
int retries = 5;
while (target.exists() && !target.delete() && retries-- >= 0) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
throw new IOException("replaceFile interrupted.");
}
}
if (!src.renameTo(target)) {
throw new IOException("Unable to rename " + src +
" to " + target);
}
}
}
/**
* A wrapper for {@link File#listFiles()}. This java.io API returns null
* when a dir is not a directory or for any I/O error. Instead of having
* null check everywhere File#listFiles() is used, we will add utility API
* to get around this problem. For the majority of cases where we prefer
* an IOException to be thrown.
* @param dir directory for which listing should be performed
* @return list of files or empty list
* @exception IOException for invalid directory or for a bad disk.
*/
public static File[] listFiles(File dir) throws IOException {
File[] files = dir.listFiles();
if(files == null) {
throw new IOException("Invalid directory or I/O error occurred for dir: "
+ dir.toString());
}
return files;
}
/**
* A wrapper for {@link File#list()}. This java.io API returns null
* when a dir is not a directory or for any I/O error. Instead of having
* null check everywhere File#list() is used, we will add utility API
* to get around this problem. For the majority of cases where we prefer
* an IOException to be thrown.
* @param dir directory for which listing should be performed
* @return list of file names or empty string list
* @exception AccessDeniedException for unreadable directory
* @exception IOException for invalid directory or for bad disk
*/
public static String[] list(File dir) throws IOException {
if (!canRead(dir)) {
throw new AccessDeniedException(dir.toString(), null,
FSExceptionMessages.PERMISSION_DENIED);
}
String[] fileNames = dir.list();
if(fileNames == null) {
throw new IOException("Invalid directory or I/O error occurred for dir: "
+ dir.toString());
}
return fileNames;
}
public static String[] createJarWithClassPath(String inputClassPath, Path pwd,
Map<String, String> callerEnv) throws IOException {
return createJarWithClassPath(inputClassPath, pwd, pwd, callerEnv);
}
/**
* Create a jar file at the given path, containing a manifest with a classpath
* that references all specified entries.
*
* Some platforms may have an upper limit on command line length. For example,
* the maximum command line length on Windows is 8191 characters, but the
* length of the classpath may exceed this. To work around this limitation,
* use this method to create a small intermediate jar with a manifest that
* contains the full classpath. It returns the absolute path to the new jar,
* which the caller may set as the classpath for a new process.
*
* Environment variable evaluation is not supported within a jar manifest, so
* this method expands environment variables before inserting classpath entries
* to the manifest. The method parses environment variables according to
* platform-specific syntax (%VAR% on Windows, or $VAR otherwise). On Windows,
* environment variables are case-insensitive. For example, %VAR% and %var%
* evaluate to the same value.
*
* Specifying the classpath in a jar manifest does not support wildcards, so
* this method expands wildcards internally. Any classpath entry that ends
* with * is translated to all files at that path with extension .jar or .JAR.
*
* @param inputClassPath String input classpath to bundle into the jar manifest
* @param pwd Path to working directory to save jar
* @param targetDir path to where the jar execution will have its working dir
* @param callerEnv Map {@literal <}String, String{@literal >} caller's
* environment variables to use for expansion
* @return String[] with absolute path to new jar in position 0 and
* unexpanded wild card entry path in position 1
* @throws IOException if there is an I/O error while writing the jar file
*/
public static String[] createJarWithClassPath(String inputClassPath, Path pwd,
Path targetDir,
Map<String, String> callerEnv) throws IOException {
// Replace environment variables, case-insensitive on Windows
@SuppressWarnings("unchecked")
Map<String, String> env = Shell.WINDOWS ? new CaseInsensitiveMap(callerEnv) :
callerEnv;
String[] classPathEntries = inputClassPath.split(File.pathSeparator);
for (int i = 0; i < classPathEntries.length; ++i) {
classPathEntries[i] = StringUtils.replaceTokens(classPathEntries[i],
StringUtils.ENV_VAR_PATTERN, env);
}
File workingDir = new File(pwd.toString());
if (!workingDir.mkdirs()) {
// If mkdirs returns false because the working directory already exists,
// then this is acceptable. If it returns false due to some other I/O
// error, then this method will fail later with an IOException while saving
// the jar.
LOG.debug("mkdirs false for " + workingDir + ", execution will continue");
}
StringBuilder unexpandedWildcardClasspath = new StringBuilder();
// Append all entries
List<String> classPathEntryList = new ArrayList<String>(
classPathEntries.length);
for (String classPathEntry: classPathEntries) {
if (classPathEntry.length() == 0) {
continue;
}
if (classPathEntry.endsWith("*")) {
// Append all jars that match the wildcard
List<Path> jars = getJarsInDirectory(classPathEntry);
if (!jars.isEmpty()) {
for (Path jar: jars) {
classPathEntryList.add(jar.toUri().toURL().toExternalForm());
}
} else {
unexpandedWildcardClasspath.append(File.pathSeparator)
.append(classPathEntry);
}
} else {
// Append just this entry
File fileCpEntry = null;
if(!new Path(classPathEntry).isAbsolute()) {
fileCpEntry = new File(targetDir.toString(), classPathEntry);
}
else {
fileCpEntry = new File(classPathEntry);
}
String classPathEntryUrl = fileCpEntry.toURI().toURL()
.toExternalForm();
// File.toURI only appends trailing '/' if it can determine that it is a
// directory that already exists. (See JavaDocs.) If this entry had a
// trailing '/' specified by the caller, then guarantee that the
// classpath entry in the manifest has a trailing '/', and thus refers to
// a directory instead of a file. This can happen if the caller is
// creating a classpath jar referencing a directory that hasn't been
// created yet, but will definitely be created before running.
if (classPathEntry.endsWith(Path.SEPARATOR) &&
!classPathEntryUrl.endsWith(Path.SEPARATOR)) {
classPathEntryUrl = classPathEntryUrl + Path.SEPARATOR;
}
classPathEntryList.add(classPathEntryUrl);
}
}
String jarClassPath = StringUtils.join(" ", classPathEntryList);
// Create the manifest
Manifest jarManifest = new Manifest();
jarManifest.getMainAttributes().putValue(
Attributes.Name.MANIFEST_VERSION.toString(), "1.0");
jarManifest.getMainAttributes().putValue(
Attributes.Name.CLASS_PATH.toString(), jarClassPath);
// Write the manifest to output JAR file
File classPathJar = File.createTempFile("classpath-", ".jar", workingDir);
try (OutputStream fos = Files.newOutputStream(classPathJar.toPath());
BufferedOutputStream bos = new BufferedOutputStream(fos)) {
JarOutputStream jos = new JarOutputStream(bos, jarManifest);
jos.close();
}
String[] jarCp = {classPathJar.getCanonicalPath(),
unexpandedWildcardClasspath.toString()};
return jarCp;
}
/**
* Returns all jars that are in the directory. It is useful in expanding a
* wildcard path to return all jars from the directory to use in a classpath.
* It operates only on local paths.
*
* @param path the path to the directory. The path may include the wildcard.
* @return the list of jars as URLs, or an empty list if there are no jars, or
* the directory does not exist locally
*/
public static List<Path> getJarsInDirectory(String path) {
return getJarsInDirectory(path, true);
}
/**
* Returns all jars that are in the directory. It is useful in expanding a
* wildcard path to return all jars from the directory to use in a classpath.
*
* @param path the path to the directory. The path may include the wildcard.
* @param useLocal use local.
* @return the list of jars as URLs, or an empty list if there are no jars, or
* the directory does not exist
*/
public static List<Path> getJarsInDirectory(String path, boolean useLocal) {
List<Path> paths = new ArrayList<>();
try {
// add the wildcard if it is not provided
if (!path.endsWith("*")) {
path += File.separator + "*";
}
Path globPath = new Path(path).suffix("{.jar,.JAR}");
FileContext context = useLocal ?
FileContext.getLocalFSFileContext() :
FileContext.getFileContext(globPath.toUri());
FileStatus[] files = context.util().globStatus(globPath);
if (files != null) {
for (FileStatus file: files) {
paths.add(file.getPath());
}
}
} catch (IOException ignore) {} // return the empty list
return paths;
}
public static boolean compareFs(FileSystem srcFs, FileSystem destFs) {
if (srcFs==null || destFs==null) {
return false;
}
URI srcUri = srcFs.getUri();
URI dstUri = destFs.getUri();
if (srcUri.getScheme()==null) {
return false;
}
if (!srcUri.getScheme().equals(dstUri.getScheme())) {
return false;
}
String srcHost = srcUri.getHost();
String dstHost = dstUri.getHost();
if ((srcHost!=null) && (dstHost!=null)) {
if (srcHost.equals(dstHost)) {
return srcUri.getPort()==dstUri.getPort();
}
try {
srcHost = InetAddress.getByName(srcHost).getCanonicalHostName();
dstHost = InetAddress.getByName(dstHost).getCanonicalHostName();
} catch (UnknownHostException ue) {
if (LOG.isDebugEnabled()) {
LOG.debug("Could not compare file-systems. Unknown host: ", ue);
}
return false;
}
if (!srcHost.equals(dstHost)) {
return false;
}
} else if (srcHost==null && dstHost!=null) {
return false;
} else if (srcHost!=null) {
return false;
}
// check for ports
return srcUri.getPort()==dstUri.getPort();
}
/**
* Writes bytes to a file. This utility method opens the file for writing,
* creating the file if it does not exist, or overwrites an existing file. All
* bytes in the byte array are written to the file.
*
* @param fs the file system with which to create the file
* @param path the path to the file
* @param bytes the byte array with the bytes to write
*
* @return the file system
*
* @throws NullPointerException if any of the arguments are {@code null}
* @throws IOException if an I/O error occurs creating or writing to the file
*/
public static FileSystem write(final FileSystem fs, final Path path,
final byte[] bytes) throws IOException {
Objects.requireNonNull(path);
Objects.requireNonNull(bytes);
try (FSDataOutputStream out = fs.createFile(path).overwrite(true).build()) {
out.write(bytes);
}
return fs;
}
/**
* Writes bytes to a file. This utility method opens the file for writing,
* creating the file if it does not exist, or overwrites an existing file. All
* bytes in the byte array are written to the file.
*
* @param fileContext the file context with which to create the file
* @param path the path to the file
* @param bytes the byte array with the bytes to write
*
* @return the file context
*
* @throws NullPointerException if any of the arguments are {@code null}
* @throws IOException if an I/O error occurs creating or writing to the file
*/
public static FileContext write(final FileContext fileContext,
final Path path, final byte[] bytes) throws IOException {
Objects.requireNonNull(path);
Objects.requireNonNull(bytes);
try (FSDataOutputStream out =
fileContext.create(path).overwrite(true).build()) {
out.write(bytes);
}
return fileContext;
}
/**
* Write lines of text to a file. Each line is a char sequence and is written
* to the file in sequence with each line terminated by the platform's line
* separator, as defined by the system property {@code
* line.separator}. Characters are encoded into bytes using the specified
* charset. This utility method opens the file for writing, creating the file
* if it does not exist, or overwrites an existing file.
*
* @param fs the file system with which to create the file
* @param path the path to the file
* @param lines a Collection to iterate over the char sequences
* @param cs the charset to use for encoding
*
* @return the file system
*
* @throws NullPointerException if any of the arguments are {@code null}
* @throws IOException if an I/O error occurs creating or writing to the file
*/
public static FileSystem write(final FileSystem fs, final Path path,
final Iterable<? extends CharSequence> lines, final Charset cs)
throws IOException {
Objects.requireNonNull(path);
Objects.requireNonNull(lines);
Objects.requireNonNull(cs);
CharsetEncoder encoder = cs.newEncoder();
try (FSDataOutputStream out = fs.createFile(path).overwrite(true).build();
BufferedWriter writer =
new BufferedWriter(new OutputStreamWriter(out, encoder))) {
for (CharSequence line : lines) {
writer.append(line);
writer.newLine();
}
}
return fs;
}
/**
* Write lines of text to a file. Each line is a char sequence and is written
* to the file in sequence with each line terminated by the platform's line
* separator, as defined by the system property {@code
* line.separator}. Characters are encoded into bytes using the specified
* charset. This utility method opens the file for writing, creating the file
* if it does not exist, or overwrites an existing file.
*
* @param fileContext the file context with which to create the file
* @param path the path to the file
* @param lines a Collection to iterate over the char sequences
* @param cs the charset to use for encoding
*
* @return the file context
*
* @throws NullPointerException if any of the arguments are {@code null}
* @throws IOException if an I/O error occurs creating or writing to the file
*/
public static FileContext write(final FileContext fileContext,
final Path path, final Iterable<? extends CharSequence> lines,
final Charset cs) throws IOException {
Objects.requireNonNull(path);
Objects.requireNonNull(lines);
Objects.requireNonNull(cs);
CharsetEncoder encoder = cs.newEncoder();
try (FSDataOutputStream out = fileContext.create(path).overwrite(true).build();
BufferedWriter writer =
new BufferedWriter(new OutputStreamWriter(out, encoder))) {
for (CharSequence line : lines) {
writer.append(line);
writer.newLine();
}
}
return fileContext;
}
/**
* Write a line of text to a file. Characters are encoded into bytes using the
* specified charset. This utility method opens the file for writing, creating
* the file if it does not exist, or overwrites an existing file.
*
* @param fs the file system with which to create the file
* @param path the path to the file
* @param charseq the char sequence to write to the file
* @param cs the charset to use for encoding
*
* @return the file system
*
* @throws NullPointerException if any of the arguments are {@code null}
* @throws IOException if an I/O error occurs creating or writing to the file
*/
public static FileSystem write(final FileSystem fs, final Path path,
final CharSequence charseq, final Charset cs) throws IOException {
Objects.requireNonNull(path);
Objects.requireNonNull(charseq);
Objects.requireNonNull(cs);
CharsetEncoder encoder = cs.newEncoder();
try (FSDataOutputStream out = fs.createFile(path).overwrite(true).build();
BufferedWriter writer =
new BufferedWriter(new OutputStreamWriter(out, encoder))) {
writer.append(charseq);
}
return fs;
}
/**
* Write a line of text to a file. Characters are encoded into bytes using the
* specified charset. This utility method opens the file for writing, creating
* the file if it does not exist, or overwrites an existing file.
*
* @param fs the file context with which to create the file
* @param path the path to the file
* @param charseq the char sequence to write to the file
* @param cs the charset to use for encoding
*
* @return the file context
*
* @throws NullPointerException if any of the arguments are {@code null}
* @throws IOException if an I/O error occurs creating or writing to the file
*/
public static FileContext write(final FileContext fs, final Path path,
final CharSequence charseq, final Charset cs) throws IOException {
Objects.requireNonNull(path);
Objects.requireNonNull(charseq);
Objects.requireNonNull(cs);
CharsetEncoder encoder = cs.newEncoder();
try (FSDataOutputStream out = fs.create(path).overwrite(true).build();
BufferedWriter writer =
new BufferedWriter(new OutputStreamWriter(out, encoder))) {
writer.append(charseq);
}
return fs;
}
/**
* Write a line of text to a file. Characters are encoded into bytes using
* UTF-8. This utility method opens the file for writing, creating the file if
* it does not exist, or overwrites an existing file.
*
* @param fs the files system with which to create the file
* @param path the path to the file
* @param charseq the char sequence to write to the file
*
* @return the file system
*
* @throws NullPointerException if any of the arguments are {@code null}
* @throws IOException if an I/O error occurs creating or writing to the file
*/
public static FileSystem write(final FileSystem fs, final Path path,
final CharSequence charseq) throws IOException {
return write(fs, path, charseq, StandardCharsets.UTF_8);
}
/**
* Write a line of text to a file. Characters are encoded into bytes using
* UTF-8. This utility method opens the file for writing, creating the file if
* it does not exist, or overwrites an existing file.
*
* @param fileContext the files system with which to create the file
* @param path the path to the file
* @param charseq the char sequence to write to the file
*
* @return the file context
*
* @throws NullPointerException if any of the arguments are {@code null}
* @throws IOException if an I/O error occurs creating or writing to the file
*/
public static FileContext write(final FileContext fileContext,
final Path path, final CharSequence charseq) throws IOException {
return write(fileContext, path, charseq, StandardCharsets.UTF_8);
}
@InterfaceAudience.LimitedPrivate({"ViewDistributedFileSystem"})
@InterfaceStability.Unstable
/**
* Used in ViewDistributedFileSystem rename API to get access to the protected
* API of FileSystem interface. Even though Rename with options API
* deprecated, we are still using as part of trash. If any filesystem provided
* implementation to this protected FileSystem API, we can't invoke it with
* out casting to the specific filesystem. This util method is proposed to get
* the access to FileSystem#rename with options.
*/
@SuppressWarnings("deprecation")
public static void rename(FileSystem srcFs, Path src, Path dst,
final Options.Rename... options) throws IOException {
srcFs.rename(src, dst, options);
}
/**
* Method to call after a FNFE has been raised on a treewalk, so as to
* decide whether to throw the exception (default), or, if the FS
* supports inconsistent directory listings, to log and ignore it.
* If this returns then the caller should ignore the failure and continue.
* @param fs filesystem
* @param path path
* @param e exception caught
* @throws FileNotFoundException the exception passed in, if rethrown.
*/
public static void maybeIgnoreMissingDirectory(FileSystem fs,
Path path,
FileNotFoundException e) throws FileNotFoundException {
final boolean b;
try {
b = !fs.hasPathCapability(path, DIRECTORY_LISTING_INCONSISTENT);
} catch (IOException ex) {
// something went wrong; rethrow the existing exception
e.addSuppressed(ex);
throw e;
}
if (b) {
throw e;
}
LOG.info("Ignoring missing directory {}", path);
LOG.debug("Directory missing", e);
}
/**
* Return true if the FS implements {@link WithErasureCoding} and
* supports EC_POLICY option in {@link Options.OpenFileOptions}.
* A message is logged when the filesystem does not support Erasure coding.
* @param fs filesystem
* @param path path
* @return true if the Filesystem supports EC
* @throws IOException if there is a failure in hasPathCapability call
*/
public static boolean checkFSSupportsEC(FileSystem fs, Path path) throws IOException {
if (fs instanceof WithErasureCoding &&
fs.hasPathCapability(path, Options.OpenFileOptions.FS_OPTION_OPENFILE_EC_POLICY)) {
return true;
}
LOG.warn("Filesystem with scheme {} does not support Erasure Coding" +
" at path {}", fs.getScheme(), path);
return false;
}
}
| HardLink |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/foreignkeys/disabled/DefaultConstraintModeTest.java | {
"start": 2867,
"end": 2939
} | class ____ extends TestEntity {
private String childName;
}
}
| ChildEntity |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/AdditionalMatchers.java | {
"start": 1484,
"end": 31261
} | class ____ {
/**
* argument greater than or equal the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>null</code>.
*/
public static <T extends Comparable<T>> T geq(T value) {
reportMatcher(new GreaterOrEqual<T>(value));
return null;
}
/**
* byte argument greater than or equal to the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static byte geq(byte value) {
reportMatcher(new GreaterOrEqual<Byte>(value));
return 0;
}
/**
* double argument greater than or equal to the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static double geq(double value) {
reportMatcher(new GreaterOrEqual<Double>(value));
return 0;
}
/**
* float argument greater than or equal to the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static float geq(float value) {
reportMatcher(new GreaterOrEqual<Float>(value));
return 0;
}
/**
* int argument greater than or equal to the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static int geq(int value) {
reportMatcher(new GreaterOrEqual<Integer>(value));
return 0;
}
/**
* long argument greater than or equal to the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static long geq(long value) {
reportMatcher(new GreaterOrEqual<Long>(value));
return 0;
}
/**
* short argument greater than or equal to the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static short geq(short value) {
reportMatcher(new GreaterOrEqual<Short>(value));
return 0;
}
/**
* comparable argument less than or equal the given value details.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>null</code>.
*/
public static <T extends Comparable<T>> T leq(T value) {
reportMatcher(new LessOrEqual<T>(value));
return null;
}
/**
* byte argument less than or equal to the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static byte leq(byte value) {
reportMatcher(new LessOrEqual<Byte>(value));
return 0;
}
/**
* double argument less than or equal to the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static double leq(double value) {
reportMatcher(new LessOrEqual<Double>(value));
return 0;
}
/**
* float argument less than or equal to the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static float leq(float value) {
reportMatcher(new LessOrEqual<Float>(value));
return 0;
}
/**
* int argument less than or equal to the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static int leq(int value) {
reportMatcher(new LessOrEqual<Integer>(value));
return 0;
}
/**
* long argument less than or equal to the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static long leq(long value) {
reportMatcher(new LessOrEqual<Long>(value));
return 0;
}
/**
* short argument less than or equal to the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static short leq(short value) {
reportMatcher(new LessOrEqual<Short>(value));
return 0;
}
/**
* comparable argument greater than the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>null</code>.
*/
public static <T extends Comparable<T>> T gt(T value) {
reportMatcher(new GreaterThan<T>(value));
return null;
}
/**
* byte argument greater than the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static byte gt(byte value) {
reportMatcher(new GreaterThan<Byte>(value));
return 0;
}
/**
* double argument greater than the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static double gt(double value) {
reportMatcher(new GreaterThan<Double>(value));
return 0;
}
/**
* float argument greater than the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static float gt(float value) {
reportMatcher(new GreaterThan<Float>(value));
return 0;
}
/**
* int argument greater than the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static int gt(int value) {
reportMatcher(new GreaterThan<Integer>(value));
return 0;
}
/**
* long argument greater than the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static long gt(long value) {
reportMatcher(new GreaterThan<Long>(value));
return 0;
}
/**
* short argument greater than the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static short gt(short value) {
reportMatcher(new GreaterThan<Short>(value));
return 0;
}
/**
* comparable argument less than the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>null</code>.
*/
public static <T extends Comparable<T>> T lt(T value) {
reportMatcher(new LessThan<T>(value));
return null;
}
/**
* byte argument less than the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static byte lt(byte value) {
reportMatcher(new LessThan<Byte>(value));
return 0;
}
/**
* double argument less than the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static double lt(double value) {
reportMatcher(new LessThan<Double>(value));
return 0;
}
/**
* float argument less than the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static float lt(float value) {
reportMatcher(new LessThan<Float>(value));
return 0;
}
/**
* int argument less than the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static int lt(int value) {
reportMatcher(new LessThan<Integer>(value));
return 0;
}
/**
* long argument less than the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static long lt(long value) {
reportMatcher(new LessThan<Long>(value));
return 0;
}
/**
* short argument less than the given value.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>0</code>.
*/
public static short lt(short value) {
reportMatcher(new LessThan<Short>(value));
return 0;
}
/**
* comparable argument equals to the given value according to their
* compareTo method.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @return <code>null</code>.
*/
public static <T extends Comparable<T>> T cmpEq(T value) {
reportMatcher(new CompareEqual<T>(value));
return null;
}
/**
* String argument that contains a substring that matches the given regular
* expression.
*
* @param regex
* the regular expression.
* @return <code>null</code>.
*/
public static String find(String regex) {
reportMatcher(new Find(regex));
return null;
}
/**
* Object array argument that is equal to the given array, i.e. it has to
* have the same type, length, and each element has to be equal.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param <T>
* the type of the array, it is passed through to prevent casts.
* @param value
* the given array.
* @return <code>null</code>.
*/
public static <T> T[] aryEq(T[] value) {
reportMatcher(new ArrayEquals(value));
return null;
}
/**
* short array argument that is equal to the given array, i.e. it has to
* have the same length, and each element has to be equal.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given array.
* @return <code>null</code>.
*/
public static short[] aryEq(short[] value) {
reportMatcher(new ArrayEquals(value));
return null;
}
/**
* long array argument that is equal to the given array, i.e. it has to have
* the same length, and each element has to be equal.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given array.
* @return <code>null</code>.
*/
public static long[] aryEq(long[] value) {
reportMatcher(new ArrayEquals(value));
return null;
}
/**
* int array argument that is equal to the given array, i.e. it has to have
* the same length, and each element has to be equal.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given array.
* @return <code>null</code>.
*/
public static int[] aryEq(int[] value) {
reportMatcher(new ArrayEquals(value));
return null;
}
/**
* float array argument that is equal to the given array, i.e. it has to
* have the same length, and each element has to be equal.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given array.
* @return <code>null</code>.
*/
public static float[] aryEq(float[] value) {
reportMatcher(new ArrayEquals(value));
return null;
}
/**
* double array argument that is equal to the given array, i.e. it has to
* have the same length, and each element has to be equal.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given array.
* @return <code>null</code>.
*/
public static double[] aryEq(double[] value) {
reportMatcher(new ArrayEquals(value));
return null;
}
/**
* char array argument that is equal to the given array, i.e. it has to have
* the same length, and each element has to be equal.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given array.
* @return <code>null</code>.
*/
public static char[] aryEq(char[] value) {
reportMatcher(new ArrayEquals(value));
return null;
}
/**
* byte array argument that is equal to the given array, i.e. it has to have
* the same length, and each element has to be equal.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given array.
* @return <code>null</code>.
*/
public static byte[] aryEq(byte[] value) {
reportMatcher(new ArrayEquals(value));
return null;
}
/**
* boolean array argument that is equal to the given array, i.e. it has to
* have the same length, and each element has to be equal.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given array.
* @return <code>null</code>.
*/
public static boolean[] aryEq(boolean[] value) {
reportMatcher(new ArrayEquals(value));
return null;
}
/**
* boolean argument that matches both given matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>false</code>.
*/
public static boolean and(boolean first, boolean second) {
mockingProgress().getArgumentMatcherStorage().reportAnd();
return false;
}
/**
* byte argument that matches both given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>0</code>.
*/
public static byte and(byte first, byte second) {
mockingProgress().getArgumentMatcherStorage().reportAnd();
return 0;
}
/**
* char argument that matches both given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>0</code>.
*/
public static char and(char first, char second) {
mockingProgress().getArgumentMatcherStorage().reportAnd();
return 0;
}
/**
* double argument that matches both given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>0</code>.
*/
public static double and(double first, double second) {
mockingProgress().getArgumentMatcherStorage().reportAnd();
return 0;
}
/**
* float argument that matches both given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>0</code>.
*/
public static float and(float first, float second) {
mockingProgress().getArgumentMatcherStorage().reportAnd();
return 0;
}
/**
* int argument that matches both given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>0</code>.
*/
public static int and(int first, int second) {
mockingProgress().getArgumentMatcherStorage().reportAnd();
return 0;
}
/**
* long argument that matches both given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>0</code>.
*/
public static long and(long first, long second) {
mockingProgress().getArgumentMatcherStorage().reportAnd();
return 0;
}
/**
* short argument that matches both given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>0</code>.
*/
public static short and(short first, short second) {
mockingProgress().getArgumentMatcherStorage().reportAnd();
return 0;
}
/**
* Object argument that matches both given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param <T>
* the type of the object, it is passed through to prevent casts.
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>null</code>.
*/
public static <T> T and(T first, T second) {
mockingProgress().getArgumentMatcherStorage().reportAnd();
return null;
}
/**
* boolean argument that matches any of the given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>false</code>.
*/
public static boolean or(boolean first, boolean second) {
mockingProgress().getArgumentMatcherStorage().reportOr();
return false;
}
/**
* Object argument that matches any of the given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param <T>
* the type of the object, it is passed through to prevent casts.
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>null</code>.
*/
public static <T> T or(T first, T second) {
mockingProgress().getArgumentMatcherStorage().reportOr();
return null;
}
/**
* short argument that matches any of the given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>0</code>.
*/
public static short or(short first, short second) {
mockingProgress().getArgumentMatcherStorage().reportOr();
return 0;
}
/**
* long argument that matches any of the given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>0</code>.
*/
public static long or(long first, long second) {
mockingProgress().getArgumentMatcherStorage().reportOr();
return 0;
}
/**
* int argument that matches any of the given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>0</code>.
*/
public static int or(int first, int second) {
mockingProgress().getArgumentMatcherStorage().reportOr();
return 0;
}
/**
* float argument that matches any of the given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>0</code>.
*/
public static float or(float first, float second) {
mockingProgress().getArgumentMatcherStorage().reportOr();
return 0;
}
/**
* double argument that matches any of the given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>0</code>.
*/
public static double or(double first, double second) {
mockingProgress().getArgumentMatcherStorage().reportOr();
return 0;
}
/**
* char argument that matches any of the given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>0</code>.
*/
public static char or(char first, char second) {
mockingProgress().getArgumentMatcherStorage().reportOr();
return 0;
}
/**
* byte argument that matches any of the given argument matchers.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the first argument matcher.
* @param second
* placeholder for the second argument matcher.
* @return <code>0</code>.
*/
public static byte or(byte first, byte second) {
mockingProgress().getArgumentMatcherStorage().reportOr();
return 0;
}
/**
* Object argument that does not match the given argument matcher.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param <T>
* the type of the object, it is passed through to prevent casts.
* @param first
* placeholder for the argument matcher.
* @return <code>null</code>.
*/
public static <T> T not(T first) {
mockingProgress().getArgumentMatcherStorage().reportNot();
return null;
}
/**
* short argument that does not match the given argument matcher.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the argument matcher.
* @return <code>0</code>.
*/
public static short not(short first) {
mockingProgress().getArgumentMatcherStorage().reportNot();
return 0;
}
/**
* int argument that does not match the given argument matcher.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the argument matcher.
* @return <code>0</code>.
*/
public static int not(int first) {
mockingProgress().getArgumentMatcherStorage().reportNot();
return 0;
}
/**
* long argument that does not match the given argument matcher.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the argument matcher.
* @return <code>0</code>.
*/
public static long not(long first) {
mockingProgress().getArgumentMatcherStorage().reportNot();
return 0;
}
/**
* float argument that does not match the given argument matcher.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the argument matcher.
* @return <code>0</code>.
*/
public static float not(float first) {
mockingProgress().getArgumentMatcherStorage().reportNot();
return 0;
}
/**
* double argument that does not match the given argument matcher.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the argument matcher.
* @return <code>0</code>.
*/
public static double not(double first) {
mockingProgress().getArgumentMatcherStorage().reportNot();
return 0;
}
/**
* char argument that does not match the given argument matcher.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the argument matcher.
* @return <code>0</code>.
*/
public static char not(char first) {
mockingProgress().getArgumentMatcherStorage().reportNot();
return 0;
}
/**
* boolean argument that does not match the given argument matcher.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the argument matcher.
* @return <code>false</code>.
*/
public static boolean not(boolean first) {
mockingProgress().getArgumentMatcherStorage().reportNot();
return false;
}
/**
* byte argument that does not match the given argument matcher.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param first
* placeholder for the argument matcher.
* @return <code>0</code>.
*/
public static byte not(byte first) {
mockingProgress().getArgumentMatcherStorage().reportNot();
return 0;
}
/**
* double argument that has an absolute difference to the given value that
* is less than the given delta details.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @param delta
* the given delta.
* @return <code>0</code>.
*/
public static double eq(double value, double delta) {
reportMatcher(new EqualsWithDelta(value, delta));
return 0;
}
/**
* float argument that has an absolute difference to the given value that is
* less than the given delta details.
* <p>
* See examples in javadoc for {@link AdditionalMatchers} class
*
* @param value
* the given value.
* @param delta
* the given delta.
* @return <code>0</code>.
*/
public static float eq(float value, float delta) {
reportMatcher(new EqualsWithDelta(value, delta));
return 0;
}
private static void reportMatcher(ArgumentMatcher<?> matcher) {
mockingProgress().getArgumentMatcherStorage().reportMatcher(matcher);
}
private AdditionalMatchers() {}
}
| AdditionalMatchers |
java | spring-projects__spring-security | ldap/src/test/java/org/springframework/security/ldap/authentication/MockUserSearch.java | {
"start": 848,
"end": 1133
} | class ____ implements LdapUserSearch {
DirContextOperations user;
public MockUserSearch() {
}
public MockUserSearch(DirContextOperations user) {
this.user = user;
}
@Override
public DirContextOperations searchForUser(String username) {
return this.user;
}
}
| MockUserSearch |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/ClientManagerImpl.java | {
"start": 2426,
"end": 9885
} | class ____
extends AbstractService
implements ClientManager {
public static final Logger LOG = LoggerFactory.getLogger(ClientManagerImpl.class);
/**
* Client factory to invoke.
*/
private final S3ClientFactory clientFactory;
/**
* Client factory to invoke for unencrypted client.
*/
private final S3ClientFactory unencryptedClientFactory;
/**
* Parameters to create sync/async clients.
*/
private final S3ClientFactory.S3ClientCreationParameters clientCreationParameters;
/**
* Duration tracker factory for creation.
*/
private final DurationTrackerFactory durationTrackerFactory;
/**
* Core S3 client.
*/
private final LazyAutoCloseableReference<S3Client> s3Client;
/** Async client is used for transfer manager. */
private final LazyAutoCloseableReference<S3AsyncClient> s3AsyncClient;
/**
* Unencrypted S3 client.
* This is used for unencrypted operations when CSE is enabled with V1 compatibility.
*/
private final LazyAutoCloseableReference<S3Client> unencryptedS3Client;
/** Transfer manager. */
private final LazyAutoCloseableReference<S3TransferManager> transferManager;
/**
* Constructor.
* <p>
* This does not create any clients.
* <p>
* It does disable noisy logging from the S3 Transfer Manager.
* @param clientFactory client factory to invoke
* @param unencryptedClientFactory client factory to invoke
* @param clientCreationParameters creation parameters.
* @param durationTrackerFactory duration tracker.
*/
public ClientManagerImpl(
final S3ClientFactory clientFactory,
final S3ClientFactory unencryptedClientFactory,
final S3ClientFactory.S3ClientCreationParameters clientCreationParameters,
final DurationTrackerFactory durationTrackerFactory) {
super("ClientManager");
this.clientFactory = requireNonNull(clientFactory);
this.unencryptedClientFactory = unencryptedClientFactory;
this.clientCreationParameters = requireNonNull(clientCreationParameters);
this.durationTrackerFactory = requireNonNull(durationTrackerFactory);
this.s3Client = new LazyAutoCloseableReference<>(createS3Client());
this.s3AsyncClient = new LazyAutoCloseableReference<>(createAsyncClient());
this.unencryptedS3Client = new LazyAutoCloseableReference<>(createUnencryptedS3Client());
this.transferManager = new LazyAutoCloseableReference<>(createTransferManager());
// fix up SDK logging.
AwsSdkWorkarounds.prepareLogging();
}
/**
* Create the function to create the S3 client.
* @return a callable which will create the client.
*/
private CallableRaisingIOE<S3Client> createS3Client() {
return trackDurationOfOperation(
durationTrackerFactory,
STORE_CLIENT_CREATION.getSymbol(),
() -> clientFactory.createS3Client(getUri(), clientCreationParameters));
}
/**
* Create the function to create the S3 Async client.
* @return a callable which will create the client.
*/
private CallableRaisingIOE<S3AsyncClient> createAsyncClient() {
return trackDurationOfOperation(
durationTrackerFactory,
STORE_CLIENT_CREATION.getSymbol(),
() -> clientFactory.createS3AsyncClient(getUri(), clientCreationParameters));
}
/**
* Create the function to create the unencrypted S3 client.
* @return a callable which will create the client.
*/
private CallableRaisingIOE<S3Client> createUnencryptedS3Client() {
return trackDurationOfOperation(
durationTrackerFactory,
STORE_CLIENT_CREATION.getSymbol(),
() -> unencryptedClientFactory.createS3Client(getUri(), clientCreationParameters));
}
/**
* Create the function to create the Transfer Manager.
* @return a callable which will create the component.
*/
private CallableRaisingIOE<S3TransferManager> createTransferManager() {
return () -> {
final S3AsyncClient asyncClient = s3AsyncClient.eval();
return trackDuration(durationTrackerFactory,
STORE_CLIENT_CREATION.getSymbol(), () ->
clientFactory.createS3TransferManager(asyncClient));
};
}
@Override
public synchronized S3Client getOrCreateS3Client() throws IOException {
checkNotClosed();
return s3Client.eval();
}
/**
* Get the S3Client, raising a failure to create as an UncheckedIOException.
* @return the S3 client
* @throws UncheckedIOException failure to create the client.
*/
@Override
public synchronized S3Client getOrCreateS3ClientUnchecked() throws UncheckedIOException {
checkNotClosed();
return s3Client.get();
}
@Override
public synchronized S3AsyncClient getOrCreateAsyncClient() throws IOException {
checkNotClosed();
return s3AsyncClient.eval();
}
/**
* Get the AsyncS3Client, raising a failure to create as an UncheckedIOException.
* @return the S3 client
* @throws UncheckedIOException failure to create the client.
*/
@Override
public synchronized S3Client getOrCreateAsyncS3ClientUnchecked() throws UncheckedIOException {
checkNotClosed();
return s3Client.get();
}
/**
* Get or create an unencrypted S3 client.
* This is used for unencrypted operations when CSE is enabled with V1 compatibility.
* @return unencrypted S3 client
* @throws IOException on any failure
*/
@Override
public synchronized S3Client getOrCreateUnencryptedS3Client() throws IOException {
checkNotClosed();
return unencryptedS3Client.eval();
}
@Override
public synchronized S3TransferManager getOrCreateTransferManager() throws IOException {
checkNotClosed();
return transferManager.eval();
}
@Override
protected void serviceStop() throws Exception {
// queue the closures.
List<Future<Object>> l = new ArrayList<>();
l.add(closeAsync(transferManager));
l.add(closeAsync(s3AsyncClient));
l.add(closeAsync(s3Client));
l.add(closeAsync(unencryptedS3Client));
// once all are queued, await their completion;
// exceptions will be swallowed.
awaitAllFutures(l);
super.serviceStop();
}
/**
* Check that the client manager is not closed.
* @throws IllegalStateException if it is closed.
*/
private void checkNotClosed() {
checkState(!isInState(STATE.STOPPED), "Client manager is closed");
}
/**
* Get the URI of the filesystem.
* @return URI to use when creating clients.
*/
public URI getUri() {
return clientCreationParameters.getPathUri();
}
/**
* Queue closing a closeable, logging any exception, and returning null
* to use in when awaiting a result.
* @param reference closeable.
* @param <T> type of closeable
* @return null
*/
private <T extends AutoCloseable> CompletableFuture<Object> closeAsync(
LazyAutoCloseableReference<T> reference) {
if (!reference.isSet()) {
// no-op
return completedFuture(null);
}
return supplyAsync(() -> {
try {
reference.close();
} catch (Exception e) {
LOG.warn("Failed to close {}", reference, e);
}
return null;
});
}
@Override
public String toString() {
return "ClientManagerImpl{" +
"state=" + getServiceState() +
", s3Client=" + s3Client +
", s3AsyncClient=" + s3AsyncClient +
", unencryptedS3Client=" + unencryptedS3Client +
", transferManager=" + transferManager +
'}';
}
}
| ClientManagerImpl |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/typelevel/MockitoBeansByTypeIntegrationTests.java | {
"start": 1415,
"end": 1875
} | class ____, as a
* repeatable annotation, and via a custom composed annotation.
*
* @author Sam Brannen
* @since 6.2.2
* @see <a href="https://github.com/spring-projects/spring-framework/issues/33925">gh-33925</a>
* @see MockitoBeansByNameIntegrationTests
*/
@SpringJUnitConfig
@MockitoBean(types = {Service04.class, Service05.class})
@SharedMocks // Intentionally declared between local @MockitoBean declarations
@MockitoBean(types = Service06.class)
| level |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/method/ControllerAdviceBeanTests.java | {
"start": 12005,
"end": 12139
} | class ____ {
}
@ControllerAdvice(assignableTypes = {ControllerInterface.class,
AbstractController.class})
static | MarkerClassSupport |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java | {
"start": 132589,
"end": 137597
} | class ____ implements Comparable {
long segmentOffset; //the start of the segment in the file
long segmentLength; //the length of the segment
Path segmentPathName; //the path name of the file containing the segment
boolean ignoreSync = true; //set to true for temp files
private Reader in = null;
private DataOutputBuffer rawKey = null; //this will hold the current key
private boolean preserveInput = false; //delete input segment files?
/** Constructs a segment
* @param segmentOffset the offset of the segment in the file
* @param segmentLength the length of the segment
* @param segmentPathName the path name of the file containing the segment
*/
public SegmentDescriptor (long segmentOffset, long segmentLength,
Path segmentPathName) {
this.segmentOffset = segmentOffset;
this.segmentLength = segmentLength;
this.segmentPathName = segmentPathName;
}
/** Do the sync checks. */
public void doSync() {ignoreSync = false;}
/**
* Whether to delete the files when no longer needed.
* @param preserve input boolean preserve.
*/
public void preserveInput(boolean preserve) {
preserveInput = preserve;
}
public boolean shouldPreserveInput() {
return preserveInput;
}
@Override
public int compareTo(Object o) {
SegmentDescriptor that = (SegmentDescriptor)o;
if (this.segmentLength != that.segmentLength) {
return (this.segmentLength < that.segmentLength ? -1 : 1);
}
if (this.segmentOffset != that.segmentOffset) {
return (this.segmentOffset < that.segmentOffset ? -1 : 1);
}
return (this.segmentPathName.toString()).
compareTo(that.segmentPathName.toString());
}
@Override
public boolean equals(Object o) {
if (!(o instanceof SegmentDescriptor)) {
return false;
}
SegmentDescriptor that = (SegmentDescriptor)o;
if (this.segmentLength == that.segmentLength &&
this.segmentOffset == that.segmentOffset &&
this.segmentPathName.toString().equals(
that.segmentPathName.toString())) {
return true;
}
return false;
}
@Override
public int hashCode() {
return 37 * 17 + (int) (segmentOffset^(segmentOffset>>>32));
}
/**
* Fills up the rawKey object with the key returned by the Reader.
* @return true if there is a key returned; false, otherwise
* @throws IOException raised on errors performing I/O.
*/
public boolean nextRawKey() throws IOException {
if (in == null) {
int bufferSize = getBufferSize(conf);
Reader reader = new Reader(conf,
Reader.file(segmentPathName),
Reader.bufferSize(bufferSize),
Reader.start(segmentOffset),
Reader.length(segmentLength));
//sometimes we ignore syncs especially for temp merge files
if (ignoreSync) reader.ignoreSync();
if (reader.getKeyClass() != keyClass)
throw new IOException("wrong key class: " + reader.getKeyClass() +
" is not " + keyClass);
if (reader.getValueClass() != valClass)
throw new IOException("wrong value class: "+reader.getValueClass()+
" is not " + valClass);
this.in = reader;
rawKey = new DataOutputBuffer();
}
rawKey.reset();
int keyLength =
in.nextRawKey(rawKey);
return (keyLength >= 0);
}
/**
* Fills up the passed rawValue with the value corresponding to the key
* read earlier.
* @param rawValue input ValueBytes rawValue.
* @return the length of the value
* @throws IOException raised on errors performing I/O.
*/
public int nextRawValue(ValueBytes rawValue) throws IOException {
int valLength = in.nextRawValue(rawValue);
return valLength;
}
/** @return Returns the stored rawKey */
public DataOutputBuffer getKey() {
return rawKey;
}
/** closes the underlying reader */
private void close() throws IOException {
this.in.close();
this.in = null;
}
/**
* The default cleanup. Subclasses can override this with a custom
* cleanup.
* @throws IOException raised on errors performing I/O.
*/
public void cleanup() throws IOException {
close();
if (!preserveInput) {
fs.delete(segmentPathName, true);
}
}
} // SequenceFile.Sorter.SegmentDescriptor
/** This | SegmentDescriptor |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/utils/FloatConversionUtils.java | {
"start": 306,
"end": 612
} | class ____ {
public static float[] floatArrayOf(double[] doublesArray) {
var floatArray = new float[doublesArray.length];
for (int i = 0; i < doublesArray.length; i++) {
floatArray[i] = (float) doublesArray[i];
}
return floatArray;
}
}
| FloatConversionUtils |
java | google__dagger | javatests/dagger/internal/codegen/HjarTest.java | {
"start": 2137,
"end": 3039
} | interface ____ {",
" @Provides static int provideInt() { return 0; }",
" @Provides static String provideString() { return null; }",
" @Provides static String[] provideStringArray() { return null; }",
" @Provides static int[] provideIntArray() { return null; }",
" @Provides static boolean provideBoolean() { return false; }",
"}");
daggerCompiler(module).compile(subject -> subject.hasErrorCount(0));
}
@Test
public void producerModuleTest() {
Source module =
CompilerTests.javaSource(
"test.MyModule",
"package test;",
"",
"import com.google.common.util.concurrent.ListenableFuture;",
"import dagger.producers.ProducerModule;",
"import dagger.producers.Produces;",
"",
"@ProducerModule",
" | MyModule |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/query/SlowRunningQueryBuilder.java | {
"start": 1544,
"end": 4277
} | class ____ extends AbstractQueryBuilder<SlowRunningQueryBuilder> {
public static final String NAME = "slow";
private long sleepTime;
private String index;
/**
* Sleep for sleepTime millis on all indexes
* @param sleepTime
*/
public SlowRunningQueryBuilder(long sleepTime) {
this.sleepTime = sleepTime;
}
/**
* Sleep for sleepTime millis but only on the specified index
* @param sleepTime
*/
public SlowRunningQueryBuilder(long sleepTime, String index) {
this.sleepTime = sleepTime;
this.index = index;
}
public SlowRunningQueryBuilder(StreamInput in) throws IOException {
super(in);
this.sleepTime = in.readLong();
this.index = in.readOptionalString();
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeLong(sleepTime);
out.writeOptionalString(index);
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
printBoostAndQueryName(builder);
builder.endObject();
}
private void sleep() {
try {
Thread.sleep(sleepTime);
} catch (InterruptedException e) {}
}
@Override
protected Query doToQuery(SearchExecutionContext context) throws IOException {
final Query delegate = Queries.newMatchAllQuery();
return new Query() {
@Override
public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
if (index == null || context.index().getName().equals(index)) {
sleep();
}
return delegate.createWeight(searcher, scoreMode, boost);
}
@Override
public String toString(String field) {
return delegate.toString(field);
}
@Override
public boolean equals(Object obj) {
return false;
}
@Override
public int hashCode() {
return 0;
}
@Override
public void visit(QueryVisitor visitor) {
visitor.visitLeaf(this);
}
};
}
@Override
protected boolean doEquals(SlowRunningQueryBuilder other) {
return false;
}
@Override
protected int doHashCode() {
return 0;
}
}
| SlowRunningQueryBuilder |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/sql/FunctionITCase.java | {
"start": 75611,
"end": 76321
} | class ____ extends ScalarFunction {
public String eval(
@DataTypeHint(inputGroup = InputGroup.ANY) Object o, java.sql.Timestamp t) {
return StringUtils.arrayAwareToString(o) + "+" + t.toString();
}
public @DataTypeHint("DECIMAL(5, 2)") BigDecimal eval() {
return new BigDecimal("123.4"); // 1 digit is missing
}
public @DataTypeHint("RAW") ByteBuffer eval(byte[] bytes) {
if (bytes == null) {
return null;
}
return ByteBuffer.wrap(bytes);
}
}
/** A function that returns either STRING or RAW type depending on a literal. */
public static | ComplexScalarFunction |
java | quarkusio__quarkus | extensions/websockets-next/runtime-dev/src/test/java/io/quarkus/websockets/next/runtime/dev/ui/WebSocketNextJsonRPCServiceTest.java | {
"start": 215,
"end": 1175
} | class ____ {
@Test
public void testIsInvalidPath() {
assertFalse(WebSocketNextJsonRPCService.isInvalidPath("/echo", "/echo"));
assertFalse(WebSocketNextJsonRPCService.isInvalidPath("/echo?foo=1", "/echo"));
assertFalse(WebSocketNextJsonRPCService.isInvalidPath("/echo/alpha", "/echo/alpha"));
assertTrue(WebSocketNextJsonRPCService.isInvalidPath("/echo", "/echo/alpha"));
assertTrue(WebSocketNextJsonRPCService.isInvalidPath("/echo", "/echo/{alpha}"));
assertTrue(WebSocketNextJsonRPCService.isInvalidPath("/echo/1/baz", "/echo/{alpha}_1/baz"));
assertFalse(WebSocketNextJsonRPCService.isInvalidPath("/echo/joe_1/baz", "/echo/{alpha}_1/baz"));
assertFalse(WebSocketNextJsonRPCService.isInvalidPath("/echo/joe_1foo/baz", "/echo/{alpha}_1{bravo}/baz"));
assertTrue(WebSocketNextJsonRPCService.isInvalidPath("/echos/1/baz", "/echo/{alpha}/baz"));
}
}
| WebSocketNextJsonRPCServiceTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/where/hbm/EagerManyToOneFetchModeSelectWhereTest.java | {
"start": 1177,
"end": 2958
} | class ____ {
@AfterEach
void dropTestData(SessionFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
@JiraKey( value = "HHH-12104" )
public void testAssociatedWhereClause(SessionFactoryScope factoryScope) {
var product = new Product();
var category = new Category();
category.name = "flowers";
product.category = category;
product.containedCategory = new ContainedCategory();
product.containedCategory.category = category;
product.containedCategories.add( new ContainedCategory( category ) );
factoryScope.inTransaction( (session) -> {
session.persist( product );
} );
factoryScope.inTransaction( (session) -> {
var p = session.find( Product.class, product.id );
assertNotNull( p );
assertNotNull( p.category );
assertNotNull( p.containedCategory.category );
assertEquals( 1, p.containedCategories.size() );
assertSame( p.category, p.containedCategory.category );
assertSame( p.category, p.containedCategories.iterator().next().category );
} );
factoryScope.inTransaction( (session) -> {
var c = session.find( Category.class, category.id );
assertNotNull( c );
c.inactive = 1;
} );
factoryScope.inTransaction( (session) -> {
var c = session.find( Category.class, category.id );
assertNull( c );
} );
factoryScope.inTransaction( (session) -> {
// Entity-level where clause is taken into account when to-one associations
// to that entity is loaded eagerly using FetchMode.SELECT, so Category
// associations will be null.
var p = session.find( Product.class, product.id );
assertNotNull( p );
assertNull( p.category );
assertNull( p.containedCategory );
assertEquals( 0, p.containedCategories.size() );
} );
}
public static | EagerManyToOneFetchModeSelectWhereTest |
java | google__guice | core/test/com/google/inject/matcher/MatcherTest.java | {
"start": 6958,
"end": 6992
} | interface ____ {}
@Foo
static | Foo |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedVariableTest.java | {
"start": 17592,
"end": 18050
} | class ____ {
// BUG: Diagnostic contains: is never read
private static final String NOT_USED_CONST_STR = "unused_test";
static final String CONST_STR = "test";
}
""")
.doTest();
}
@Test
public void unusedTryResource() {
helper
.addSourceLines(
"UnusedTryResource.java",
"""
package unusedvars;
public | UnusedStaticPrivate |
java | apache__camel | components/camel-ibm/camel-ibm-watson-text-to-speech/src/main/java/org/apache/camel/component/ibm/watson/tts/WatsonTextToSpeechProducer.java | {
"start": 1768,
"end": 10253
} | class ____ extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(WatsonTextToSpeechProducer.class);
public WatsonTextToSpeechProducer(WatsonTextToSpeechEndpoint endpoint) {
super(endpoint);
}
@Override
public void process(Exchange exchange) throws Exception {
WatsonTextToSpeechOperations operation = determineOperation(exchange);
switch (operation) {
case synthesize:
synthesize(exchange);
break;
case listVoices:
listVoices(exchange);
break;
case getVoice:
getVoice(exchange);
break;
case listCustomModels:
listCustomModels(exchange);
break;
case getCustomModel:
getCustomModel(exchange);
break;
case getPronunciation:
getPronunciation(exchange);
break;
default:
throw new IllegalArgumentException("Unsupported operation: " + operation);
}
}
@Override
public WatsonTextToSpeechEndpoint getEndpoint() {
return (WatsonTextToSpeechEndpoint) super.getEndpoint();
}
private WatsonTextToSpeechOperations determineOperation(Exchange exchange) {
WatsonTextToSpeechOperations operation
= exchange.getIn().getHeader(WatsonTextToSpeechConstants.OPERATION, WatsonTextToSpeechOperations.class);
if (operation == null) {
operation = getEndpoint().getConfiguration().getOperation();
}
if (operation == null) {
throw new IllegalArgumentException("Operation must be specified");
}
return operation;
}
private void synthesize(Exchange exchange) {
TextToSpeech tts = getEndpoint().getTtsClient();
if (tts == null) {
throw new IllegalStateException("TTS client not initialized");
}
String text = exchange.getIn().getHeader(WatsonTextToSpeechConstants.TEXT, String.class);
if (text == null) {
text = exchange.getIn().getBody(String.class);
}
if (text == null || text.isBlank()) {
throw new IllegalArgumentException("Text to synthesize must be specified");
}
String voice = exchange.getIn().getHeader(WatsonTextToSpeechConstants.VOICE,
getEndpoint().getConfiguration().getVoice(), String.class);
String accept = exchange.getIn().getHeader(WatsonTextToSpeechConstants.ACCEPT,
getEndpoint().getConfiguration().getAccept(), String.class);
String customizationId = exchange.getIn().getHeader(WatsonTextToSpeechConstants.CUSTOMIZATION_ID,
getEndpoint().getConfiguration().getCustomizationId(), String.class);
LOG.trace("Synthesizing text with TTS: voice={}, accept={}", voice, accept);
SynthesizeOptions.Builder builder = new SynthesizeOptions.Builder()
.text(text)
.voice(voice)
.accept(accept);
if (customizationId != null && !customizationId.isBlank()) {
builder.customizationId(customizationId);
}
SynthesizeOptions options = builder.build();
InputStream audioStream = tts.synthesize(options).execute().getResult();
Message message = getMessageForResponse(exchange);
message.setBody(audioStream);
message.setHeader(WatsonTextToSpeechConstants.VOICE, voice);
message.setHeader(WatsonTextToSpeechConstants.ACCEPT, accept);
}
private void listVoices(Exchange exchange) {
TextToSpeech tts = getEndpoint().getTtsClient();
if (tts == null) {
throw new IllegalStateException("TTS client not initialized");
}
LOG.trace("Listing available voices");
Voices voices = tts.listVoices().execute().getResult();
Message message = getMessageForResponse(exchange);
message.setBody(voices.getVoices());
}
private void getVoice(Exchange exchange) {
TextToSpeech tts = getEndpoint().getTtsClient();
if (tts == null) {
throw new IllegalStateException("TTS client not initialized");
}
String voiceName = exchange.getIn().getHeader(WatsonTextToSpeechConstants.VOICE_NAME, String.class);
if (voiceName == null) {
voiceName = exchange.getIn().getBody(String.class);
}
if (voiceName == null || voiceName.isBlank()) {
throw new IllegalArgumentException("Voice name must be specified");
}
LOG.trace("Getting voice information for: {}", voiceName);
GetVoiceOptions options = new GetVoiceOptions.Builder()
.voice(voiceName)
.build();
Voice voice = tts.getVoice(options).execute().getResult();
Message message = getMessageForResponse(exchange);
message.setBody(voice);
}
private void listCustomModels(Exchange exchange) {
TextToSpeech tts = getEndpoint().getTtsClient();
if (tts == null) {
throw new IllegalStateException("TTS client not initialized");
}
String language = exchange.getIn().getHeader(WatsonTextToSpeechConstants.LANGUAGE, String.class);
LOG.trace("Listing custom models, language filter: {}", language);
ListCustomModelsOptions.Builder builder = new ListCustomModelsOptions.Builder();
if (language != null && !language.isBlank()) {
builder.language(language);
}
ListCustomModelsOptions options = builder.build();
CustomModels customModels = tts.listCustomModels(options).execute().getResult();
Message message = getMessageForResponse(exchange);
List<CustomModel> models = customModels.getCustomizations();
message.setBody(models != null ? models : List.of());
}
private void getCustomModel(Exchange exchange) {
TextToSpeech tts = getEndpoint().getTtsClient();
if (tts == null) {
throw new IllegalStateException("TTS client not initialized");
}
String modelId = exchange.getIn().getHeader(WatsonTextToSpeechConstants.MODEL_ID, String.class);
if (modelId == null) {
modelId = exchange.getIn().getBody(String.class);
}
if (modelId == null || modelId.isBlank()) {
throw new IllegalArgumentException("Model ID must be specified");
}
LOG.trace("Getting custom model: {}", modelId);
GetCustomModelOptions options = new GetCustomModelOptions.Builder()
.customizationId(modelId)
.build();
CustomModel customModel = tts.getCustomModel(options).execute().getResult();
Message message = getMessageForResponse(exchange);
message.setBody(customModel);
}
private void getPronunciation(Exchange exchange) {
TextToSpeech tts = getEndpoint().getTtsClient();
if (tts == null) {
throw new IllegalStateException("TTS client not initialized");
}
String word = exchange.getIn().getHeader(WatsonTextToSpeechConstants.WORD, String.class);
if (word == null) {
word = exchange.getIn().getBody(String.class);
}
if (word == null || word.isBlank()) {
throw new IllegalArgumentException("Word must be specified");
}
String voice = exchange.getIn().getHeader(WatsonTextToSpeechConstants.VOICE,
getEndpoint().getConfiguration().getVoice(), String.class);
String format = exchange.getIn().getHeader(WatsonTextToSpeechConstants.FORMAT, String.class);
LOG.trace("Getting pronunciation for word: {}, voice: {}, format: {}", word, voice, format);
GetPronunciationOptions.Builder builder = new GetPronunciationOptions.Builder()
.text(word)
.voice(voice);
if (format != null && !format.isBlank()) {
builder.format(format);
}
GetPronunciationOptions options = builder.build();
Pronunciation pronunciation = tts.getPronunciation(options).execute().getResult();
Message message = getMessageForResponse(exchange);
message.setBody(pronunciation);
}
private Message getMessageForResponse(Exchange exchange) {
return exchange.getMessage();
}
}
| WatsonTextToSpeechProducer |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIngestPipelineTests.java | {
"start": 1175,
"end": 3444
} | class ____ extends ESTestCase {
private NamedWriteableRegistry namedWriteableRegistry;
@Before
public void registerNamedObjects() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, emptyList());
List<NamedWriteableRegistry.Entry> namedWriteables = searchModule.getNamedWriteables();
namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables);
}
public final void testRandomSerialization() throws IOException {
for (int runs = 0; runs < 10; runs++) {
ConnectorIngestPipeline testInstance = ConnectorTestUtils.getRandomConnectorIngestPipeline();
assertTransportSerialization(testInstance);
}
}
public void testToXContent() throws IOException {
String content = XContentHelper.stripWhitespace("""
{
"extract_binary_content": true,
"name": "search-default-ingestion",
"reduce_whitespace": true,
"run_ml_inference": false
}
""");
ConnectorIngestPipeline pipeline = ConnectorIngestPipeline.fromXContentBytes(new BytesArray(content), XContentType.JSON);
boolean humanReadable = true;
BytesReference originalBytes = toShuffledXContent(pipeline, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable);
ConnectorIngestPipeline parsed;
try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) {
parsed = ConnectorIngestPipeline.fromXContent(parser);
}
assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON);
}
private void assertTransportSerialization(ConnectorIngestPipeline testInstance) throws IOException {
ConnectorIngestPipeline deserializedInstance = copyInstance(testInstance);
assertNotSame(testInstance, deserializedInstance);
assertThat(testInstance, equalTo(deserializedInstance));
}
private ConnectorIngestPipeline copyInstance(ConnectorIngestPipeline instance) throws IOException {
return copyWriteable(instance, namedWriteableRegistry, ConnectorIngestPipeline::new);
}
}
| ConnectorIngestPipelineTests |
java | spring-projects__spring-boot | module/spring-boot-micrometer-tracing-opentelemetry/src/dockerTest/java/org/springframework/boot/micrometer/tracing/opentelemetry/testcontainers/otlp/OpenTelemetryTracingContainerConnectionDetailsFactoryIntegrationTests.java | {
"start": 1890,
"end": 2694
} | class ____ {
@Container
@ServiceConnection
static final GenericContainer<?> container = TestImage.OTEL_COLLECTOR.genericContainer()
.withExposedPorts(4317, 4318);
@Autowired
private OtlpTracingConnectionDetails connectionDetails;
@Test
void connectionCanBeMadeToOpenTelemetryContainer() {
assertThat(this.connectionDetails.getUrl(Transport.HTTP))
.isEqualTo("http://" + container.getHost() + ":" + container.getMappedPort(4318) + "/v1/traces");
assertThat(this.connectionDetails.getUrl(Transport.GRPC))
.isEqualTo("http://" + container.getHost() + ":" + container.getMappedPort(4317) + "/v1/traces");
}
@Configuration(proxyBeanMethods = false)
@ImportAutoConfiguration(OtlpTracingAutoConfiguration.class)
static | OpenTelemetryTracingContainerConnectionDetailsFactoryIntegrationTests |
java | google__dagger | java/dagger/example/atm/Command.java | {
"start": 721,
"end": 1089
} | interface ____ {
/**
* Processes and optionally acts upon the given {@code input}.
*
* @return a {@link Result} indicating how the input was handled
*/
Result handleInput(List<String> input);
/**
* A command result, which has a {@link Status} and optionally a new {@link CommandRouter} that
* will handle subsequent commands.
*/
final | Command |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/internal/InvalidStateDetectionTest.java | {
"start": 6251,
"end": 6497
} | class ____ implements DetectsInvalidState {
@SuppressWarnings({"CheckReturnValue", "MockitoUsage"})
public void detect(IMethods mock) {
verifyNoInteractions(mock);
}
}
private static | OnVerifyNoInteractions |
java | quarkusio__quarkus | test-framework/junit5-internal/src/main/java/io/quarkus/test/DisabledOnSemeru.java | {
"start": 274,
"end": 583
} | class ____ method should be disabled if Semeru is used as the JVM runtime.
* <p>
* We cannot test for Semeru exactly but we check the java.vendor is IBM Corporation.
*/
@Target({ ElementType.TYPE, ElementType.METHOD })
@Retention(RetentionPolicy.RUNTIME)
@ExtendWith(DisabledOnSemeruCondition.class)
public @ | or |
java | apache__camel | components/camel-cxf/camel-cxf-common/src/main/java/org/apache/camel/component/cxf/common/AbstractTLSClientParameterConfigurer.java | {
"start": 971,
"end": 1294
} | class ____ {
protected TLSClientParameters tryToGetTLSClientParametersFromConduit(HTTPConduit httpConduit) {
if (httpConduit.getTlsClientParameters() != null) {
return httpConduit.getTlsClientParameters();
}
return new TLSClientParameters();
}
}
| AbstractTLSClientParameterConfigurer |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/LuceneComponentBuilderFactory.java | {
"start": 1387,
"end": 1858
} | interface ____ {
/**
* Lucene (camel-lucene)
* Perform inserts or queries against Apache Lucene databases.
*
* Category: database,search
* Since: 2.2
* Maven coordinates: org.apache.camel:camel-lucene
*
* @return the dsl builder
*/
static LuceneComponentBuilder lucene() {
return new LuceneComponentBuilderImpl();
}
/**
* Builder for the Lucene component.
*/
| LuceneComponentBuilderFactory |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/processor/internals/PunctuationSchedule.java | {
"start": 971,
"end": 3607
} | class ____ extends Stamped<ProcessorNode<?, ?, ?, ?>> {
private final long interval;
private final Punctuator punctuator;
private boolean isCancelled = false;
// this Cancellable will be re-pointed at the successor schedule in next()
private final RepointableCancellable cancellable;
PunctuationSchedule(final ProcessorNode<?, ?, ?, ?> node,
final long time,
final long interval,
final Punctuator punctuator) {
this(node, time, interval, punctuator, new RepointableCancellable());
cancellable.setSchedule(this);
}
private PunctuationSchedule(final ProcessorNode<?, ?, ?, ?> node,
final long time,
final long interval,
final Punctuator punctuator,
final RepointableCancellable cancellable) {
super(node, time);
this.interval = interval;
this.punctuator = punctuator;
this.cancellable = cancellable;
}
public ProcessorNode<?, ?, ?, ?> node() {
return value;
}
public Punctuator punctuator() {
return punctuator;
}
public Cancellable cancellable() {
return cancellable;
}
void markCancelled() {
isCancelled = true;
}
boolean isCancelled() {
return isCancelled;
}
public PunctuationSchedule next(final long currTimestamp) {
long nextPunctuationTime = timestamp + interval;
if (currTimestamp >= nextPunctuationTime) {
// we missed one or more punctuations
// avoid scheduling a new punctuations immediately, this can happen:
// - when using STREAM_TIME punctuation and there was a gap i.e., no data was
// received for at least 2*interval
// - when using WALL_CLOCK_TIME and there was a gap i.e., punctuation was delayed for at least 2*interval (GC pause, overload, ...)
final long intervalsMissed = (currTimestamp - timestamp) / interval;
nextPunctuationTime = timestamp + (intervalsMissed + 1) * interval;
}
final PunctuationSchedule nextSchedule = new PunctuationSchedule(value, nextPunctuationTime, interval, punctuator, cancellable);
cancellable.setSchedule(nextSchedule);
return nextSchedule;
}
@Override
public boolean equals(final Object other) {
return super.equals(other);
}
@Override
public int hashCode() {
return super.hashCode();
}
private static | PunctuationSchedule |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableInfo.java | {
"start": 1110,
"end": 1956
} | class ____ {
private final Column[] columns;
private final Map<String, String> properties;
private final Transform[] partitions;
private final Constraint[] constraints;
/**
* Constructor for TableInfo used by the builder.
* @param builder Builder.
*/
private TableInfo(Builder builder) {
this.columns = builder.columns;
this.properties = builder.properties;
this.partitions = builder.partitions;
this.constraints = builder.constraints;
}
public Column[] columns() {
return columns;
}
public StructType schema() {
return CatalogV2Util.v2ColumnsToStructType(columns);
}
public Map<String, String> properties() {
return properties;
}
public Transform[] partitions() {
return partitions;
}
public Constraint[] constraints() { return constraints; }
public static | TableInfo |
java | google__dagger | javatests/dagger/functional/factory/FactoryRequiredModulesTest.java | {
"start": 1099,
"end": 1197
} | interface ____ {
long getLong();
@Component.Factory
| UninstantiableConcreteModuleComponent |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/ast/tree/predicate/InListPredicate.java | {
"start": 456,
"end": 1827
} | class ____ extends AbstractPredicate {
private final Expression testExpression;
private final List<Expression> listExpressions;
public InListPredicate(Expression testExpression) {
this( testExpression, new ArrayList<>() );
}
public InListPredicate(Expression testExpression, boolean negated, JdbcMappingContainer expressionType) {
this( testExpression, new ArrayList<>(), negated, expressionType );
}
public InListPredicate(Expression testExpression, Expression... listExpressions) {
this( testExpression, ArrayHelper.toExpandableList( listExpressions ) );
}
public InListPredicate(
Expression testExpression,
List<Expression> listExpressions) {
this( testExpression, listExpressions, false, null );
}
public InListPredicate(
Expression testExpression,
List<Expression> listExpressions,
boolean negated,
JdbcMappingContainer expressionType) {
super( expressionType, negated );
this.testExpression = testExpression;
this.listExpressions = listExpressions;
}
public Expression getTestExpression() {
return testExpression;
}
public List<Expression> getListExpressions() {
return listExpressions;
}
public void addExpression(Expression expression) {
listExpressions.add( expression );
}
@Override
public void accept(SqlAstWalker sqlTreeWalker) {
sqlTreeWalker.visitInListPredicate( this );
}
}
| InListPredicate |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/ser/impl/PropertyBasedObjectIdGenerator.java | {
"start": 262,
"end": 2684
} | class ____
extends ObjectIdGenerators.PropertyGenerator
{
private static final long serialVersionUID = 1L;
protected final BeanPropertyWriter _property;
public PropertyBasedObjectIdGenerator(ObjectIdInfo oid, BeanPropertyWriter prop)
{
this(oid.getScope(), prop);
}
protected PropertyBasedObjectIdGenerator(Class<?> scope, BeanPropertyWriter prop)
{
super(scope);
_property = prop;
}
/**
* We must override this method, to prevent errors when scopes are the same,
* but underlying class (on which to access property) is different.
*/
@Override
public boolean canUseFor(ObjectIdGenerator<?> gen) {
if (gen.getClass() == getClass()) {
PropertyBasedObjectIdGenerator other = (PropertyBasedObjectIdGenerator) gen;
if (other.getScope() == _scope) {
/* 26-Jul-2012, tatu: This is actually not enough, because the property
* accessor within BeanPropertyWriter won't work for other property fields
* (see [https://github.com/FasterXML/jackson-module-jaxb-annotations/issues/9]
* for details).
* So we need to verify that underlying property is actually the same.
*/
return (other._property == _property);
}
}
return false;
}
@Override
public Object generateId(Object forPojo) {
try {
return _property.get(forPojo);
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Problem accessing property '"
+_property.getName()+"': "+e.getMessage(), e);
}
}
@Override
public ObjectIdGenerator<Object> forScope(Class<?> scope) {
return (scope == _scope) ? this : new PropertyBasedObjectIdGenerator(scope, _property);
}
@Override
public ObjectIdGenerator<Object> newForSerialization(Object context) {
// No state, can return this
return this;
}
@Override
public com.fasterxml.jackson.annotation.ObjectIdGenerator.IdKey key(Object key) {
if (key == null) {
return null;
}
// should we use general type for all; or type of property itself?
return new IdKey(getClass(), _scope, key);
}
}
| PropertyBasedObjectIdGenerator |
java | elastic__elasticsearch | x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/core/CheckedConsumer.java | {
"start": 528,
"end": 613
} | interface ____<T, E extends Exception> {
void accept(T t) throws E;
}
| CheckedConsumer |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/logging/LoggingSystem.java | {
"start": 1270,
"end": 6494
} | class ____ {
/**
* A System property that can be used to indicate the {@link LoggingSystem} to use.
*/
public static final String SYSTEM_PROPERTY = LoggingSystem.class.getName();
/**
* The value of the {@link #SYSTEM_PROPERTY} that can be used to indicate that no
* {@link LoggingSystem} should be used.
*/
public static final String NONE = "none";
/**
* The name used for the root logger. LoggingSystem implementations should ensure that
* this is the name used to represent the root logger, regardless of the underlying
* implementation.
*/
public static final String ROOT_LOGGER_NAME = "ROOT";
private static final LoggingSystemFactory SYSTEM_FACTORY = LoggingSystemFactory.fromSpringFactories();
/**
* The name of an {@link Environment} property used to indicate that a correlation ID
* is expected to be logged at some point.
* @since 3.2.0
*/
public static final String EXPECT_CORRELATION_ID_PROPERTY = "logging.expect-correlation-id";
/**
* Return the {@link LoggingSystemProperties} that should be applied.
* @param environment the {@link ConfigurableEnvironment} used to obtain value
* @return the {@link LoggingSystemProperties} to apply
* @since 2.4.0
*/
public LoggingSystemProperties getSystemProperties(ConfigurableEnvironment environment) {
return new LoggingSystemProperties(environment);
}
/**
* Reset the logging system to be limit output. This method may be called before
* {@link #initialize(LoggingInitializationContext, String, LogFile)} to reduce
* logging noise until the system has been fully initialized.
*/
public abstract void beforeInitialize();
/**
* Fully initialize the logging system.
* @param initializationContext the logging initialization context
* @param configLocation a log configuration location or {@code null} if default
* initialization is required
* @param logFile the log output file that should be written or {@code null} for
* console only output
*/
public void initialize(LoggingInitializationContext initializationContext, @Nullable String configLocation,
@Nullable LogFile logFile) {
}
/**
* Clean up the logging system. The default implementation does nothing. Subclasses
* should override this method to perform any logging system-specific cleanup.
*/
public void cleanUp() {
}
/**
* Returns a {@link Runnable} that can handle shutdown of this logging system when the
* JVM exits. The default implementation returns {@code null}, indicating that no
* shutdown is required.
* @return the shutdown handler, or {@code null}
*/
public @Nullable Runnable getShutdownHandler() {
return null;
}
/**
* Returns a set of the {@link LogLevel LogLevels} that are actually supported by the
* logging system.
* @return the supported levels
*/
public Set<LogLevel> getSupportedLogLevels() {
return EnumSet.allOf(LogLevel.class);
}
/**
* Sets the logging level for a given logger.
* @param loggerName the name of the logger to set ({@code null} can be used for the
* root logger).
* @param level the log level ({@code null} can be used to remove any custom level for
* the logger and use the default configuration instead)
*/
public void setLogLevel(@Nullable String loggerName, @Nullable LogLevel level) {
throw new UnsupportedOperationException("Unable to set log level");
}
/**
* Returns a collection of the current configuration for all a {@link LoggingSystem}'s
* loggers.
* @return the current configurations
* @since 1.5.0
*/
public List<LoggerConfiguration> getLoggerConfigurations() {
throw new UnsupportedOperationException("Unable to get logger configurations");
}
/**
* Returns the current configuration for a {@link LoggingSystem}'s logger.
* @param loggerName the name of the logger
* @return the current configuration
* @since 1.5.0
*/
public @Nullable LoggerConfiguration getLoggerConfiguration(String loggerName) {
throw new UnsupportedOperationException("Unable to get logger configuration");
}
/**
* Detect and return the logging system in use. Supports Logback and Java Logging.
* @param classLoader the classloader
* @return the logging system
*/
public static LoggingSystem get(ClassLoader classLoader) {
String loggingSystemClassName = System.getProperty(SYSTEM_PROPERTY);
if (StringUtils.hasLength(loggingSystemClassName)) {
if (NONE.equals(loggingSystemClassName)) {
return new NoOpLoggingSystem();
}
return get(classLoader, loggingSystemClassName);
}
LoggingSystem loggingSystem = SYSTEM_FACTORY.getLoggingSystem(classLoader);
Assert.state(loggingSystem != null, "No suitable logging system located");
return loggingSystem;
}
private static LoggingSystem get(ClassLoader classLoader, String loggingSystemClassName) {
try {
Class<?> systemClass = ClassUtils.forName(loggingSystemClassName, classLoader);
Constructor<?> constructor = systemClass.getDeclaredConstructor(ClassLoader.class);
constructor.setAccessible(true);
return (LoggingSystem) constructor.newInstance(classLoader);
}
catch (Exception ex) {
throw new IllegalStateException(ex);
}
}
/**
* {@link LoggingSystem} that does nothing.
*/
static | LoggingSystem |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/context/config/StandardConfigDataResource.java | {
"start": 1159,
"end": 4233
} | class ____ extends ConfigDataResource {
private final StandardConfigDataReference reference;
private final Resource resource;
private final boolean emptyDirectory;
/**
* Create a new {@link StandardConfigDataResource} instance.
* @param reference the resource reference
* @param resource the underlying resource
*/
StandardConfigDataResource(StandardConfigDataReference reference, Resource resource) {
this(reference, resource, false);
}
/**
* Create a new {@link StandardConfigDataResource} instance.
* @param reference the resource reference
* @param resource the underlying resource
* @param emptyDirectory if the resource is an empty directory that we know exists
*/
StandardConfigDataResource(StandardConfigDataReference reference, Resource resource, boolean emptyDirectory) {
Assert.notNull(reference, "'reference' must not be null");
Assert.notNull(resource, "'resource' must not be null");
this.reference = reference;
this.resource = resource;
this.emptyDirectory = emptyDirectory;
}
StandardConfigDataReference getReference() {
return this.reference;
}
/**
* Return the underlying Spring {@link Resource} being loaded.
* @return the underlying resource
* @since 2.4.2
*/
public Resource getResource() {
return this.resource;
}
/**
* Return the profile or {@code null} if the resource is not profile specific.
* @return the profile or {@code null}
* @since 2.4.6
*/
public @Nullable String getProfile() {
return this.reference.getProfile();
}
boolean isEmptyDirectory() {
return this.emptyDirectory;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
StandardConfigDataResource other = (StandardConfigDataResource) obj;
return (this.emptyDirectory == other.emptyDirectory) && isSameUnderlyingResource(this.resource, other.resource);
}
private boolean isSameUnderlyingResource(Resource ours, Resource other) {
return ours.equals(other) || isSameFile(getUnderlyingFile(ours), getUnderlyingFile(other));
}
private boolean isSameFile(@Nullable File ours, @Nullable File other) {
return (ours != null) && ours.equals(other);
}
@Override
public int hashCode() {
File underlyingFile = getUnderlyingFile(this.resource);
return (underlyingFile != null) ? underlyingFile.hashCode() : this.resource.hashCode();
}
@Override
public String toString() {
if (this.resource instanceof FileSystemResource || this.resource instanceof FileUrlResource) {
try {
return "file [" + this.resource.getFile() + "]";
}
catch (IOException ex) {
// Ignore
}
}
return this.resource.toString();
}
private @Nullable File getUnderlyingFile(Resource resource) {
try {
if (resource instanceof ClassPathResource || resource instanceof FileSystemResource
|| resource instanceof FileUrlResource) {
return resource.getFile().getAbsoluteFile();
}
}
catch (IOException ex) {
// Ignore
}
return null;
}
}
| StandardConfigDataResource |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/StaticEndpointBuilders.java | {
"start": 549696,
"end": 550301
} | class ____ for the ContentHandler implementation to use.
*
* @param path contentHandlerClass
* @return the dsl builder
*/
public static StAXEndpointBuilderFactory.StAXEndpointBuilder stax(String path) {
return stax("stax", path);
}
/**
* StAX (camel-stax)
* Process XML payloads by a SAX ContentHandler.
*
* Category: transformation
* Since: 2.9
* Maven coordinates: org.apache.camel:camel-stax
*
* Syntax: <code>stax:contentHandlerClass</code>
*
* Path parameter: contentHandlerClass (required)
* The FQN | name |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/immutable/entitywithmutablecollection/Plan.java | {
"start": 276,
"end": 2255
} | class ____ implements Serializable {
private long id;
private long version;
private String description;
private Set contracts;
private Set infos;
private Owner owner;
public Plan() {
this( null );
}
public Plan(String description) {
this.description = description;
contracts = new HashSet();
infos = new HashSet();
}
public long getVersion() {
return version;
}
public void setVersion(long version) {
this.version = version;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Set getContracts() {
return contracts;
}
public void setContracts(Set contracts) {
this.contracts = contracts;
}
public void addContract(Contract contract) {
if ( ! contracts.add( contract ) ) {
return;
}
if ( contract.getParent() != null ) {
addContract( contract.getParent() );
}
contract.getPlans().add( this );
for ( Iterator it=contract.getSubcontracts().iterator(); it.hasNext(); ) {
Contract sub = ( Contract ) it.next();
addContract( sub );
}
}
public void removeContract(Contract contract) {
if ( contract.getParent() != null ) {
contract.getParent().getSubcontracts().remove( contract );
contract.setParent( null );
}
removeSubcontracts( contract );
contract.getPlans().remove( this );
contracts.remove( contract );
}
public void removeSubcontracts(Contract contract) {
for ( Iterator it=contract.getSubcontracts().iterator(); it.hasNext(); ) {
Contract sub = ( Contract ) it.next();
removeSubcontracts( sub );
sub.getPlans().remove( this );
contracts.remove( sub );
}
}
public Set getInfos() {
return infos;
}
public void setInfos(Set infos) {
this.infos = infos;
}
public Owner getOwner() {
return owner;
}
public void setOwner(Owner owner) {
this.owner = owner;
}
}
| Plan |
java | apache__camel | components/camel-telegram/src/main/java/org/apache/camel/component/telegram/model/EditMessageTextMessage.java | {
"start": 3500,
"end": 5014
} | class ____ {
protected String chatId;
private Integer messageId;
private String inlineMessageId;
private String text;
private String parseMode;
private Boolean disableWebPagePreview;
private InlineKeyboardMarkup replyMarkup;
private Builder() {
}
public Builder messageId(Integer messageId) {
this.messageId = messageId;
return this;
}
public Builder inlineMessageId(String inlineMessageId) {
this.inlineMessageId = inlineMessageId;
return this;
}
public Builder text(String text) {
this.text = text;
return this;
}
public Builder parseMode(String parseMode) {
this.parseMode = parseMode;
return this;
}
public Builder disableWebPagePreview(Boolean disableWebPagePreview) {
this.disableWebPagePreview = disableWebPagePreview;
return this;
}
public Builder replyMarkup(InlineKeyboardMarkup replyMarkup) {
this.replyMarkup = replyMarkup;
return this;
}
public Builder chatId(String chatId) {
this.chatId = chatId;
return this;
}
public EditMessageTextMessage build() {
return new EditMessageTextMessage(
chatId, messageId, inlineMessageId, text, parseMode, disableWebPagePreview, replyMarkup);
}
}
}
| Builder |
java | quarkusio__quarkus | extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionInitSqlTest.java | {
"start": 461,
"end": 1350
} | class ____ {
// Quarkus built object
@Inject
DataSource datasource;
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addAsResource("db/migration/V1.0.0__Quarkus.sql")
.addAsResource("init-sql-config.properties", "application.properties"));
@Test
@DisplayName("Check if initSql is invoked")
public void testFlywayInitSql() throws SQLException {
int var = 0;
try (Connection con = datasource.getConnection();
PreparedStatement ps = con.prepareStatement("SELECT ONE_HUNDRED");
ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
var = rs.getInt(1);
}
}
assertEquals(100, var, "Init SQL was not executed");
}
}
| FlywayExtensionInitSqlTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/function/array/OracleArrayGetFunction.java | {
"start": 467,
"end": 1151
} | class ____ extends ArrayGetUnnestFunction {
public OracleArrayGetFunction() {
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> walker) {
final String arrayTypeName = DdlTypeHelper.getTypeName(
( (Expression) sqlAstArguments.get( 0 ) ).getExpressionType(),
walker.getSessionFactory().getTypeConfiguration()
);
sqlAppender.append( arrayTypeName );
sqlAppender.append( "_get(" );
sqlAstArguments.get( 0 ).accept( walker );
sqlAppender.append( ',' );
sqlAstArguments.get( 1 ).accept( walker );
sqlAppender.append( ')' );
}
}
| OracleArrayGetFunction |
java | dropwizard__dropwizard | dropwizard-e2e/src/test/java/com/example/request_log/ClassicRequestLogIntegrationTest.java | {
"start": 499,
"end": 1712
} | class ____ extends AbstractRequestLogPatternIntegrationTest {
private static final Pattern REQUEST_LOG_PATTERN = Pattern.compile(
"127\\.0\\.0\\.1 - - \\[.+\\] \"GET /greet HTTP/1\\.1\" 200 15 \"\" \"TestApplication \\(test-request-logs\\)\" \\d+"
);
@Override
protected List<ConfigOverride> configOverrides() {
final List<ConfigOverride> configOverrides = new ArrayList<>(super.configOverrides());
configOverrides.add(ConfigOverride.config("server.requestLog.type", "classic"));
return configOverrides;
}
@Test
void testDefaultPattern() throws Exception {
String url = String.format("http://localhost:%d/greet?name=Charley", dropwizardAppRule.getLocalPort());
for (int i = 0; i < 100; i++) {
client.target(url).request().get();
}
dropwizardAppRule.getConfiguration().getLoggingFactory().stop();
dropwizardAppRule.getConfiguration().getLoggingFactory().reset();
Thread.sleep(100L);
List<String> logs = Files.readAllLines(requestLogFile, UTF_8);
assertThat(logs).hasSize(100).allMatch(s -> REQUEST_LOG_PATTERN.matcher(s).matches());
}
}
| ClassicRequestLogIntegrationTest |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/jdk/AtomicBooleanDeserializer.java | {
"start": 386,
"end": 1367
} | class ____ extends StdScalarDeserializer<AtomicBoolean>
{
public AtomicBooleanDeserializer() { super(AtomicBoolean.class); }
@Override
public LogicalType logicalType() { return LogicalType.Boolean; }
@Override
public Object getEmptyValue(DeserializationContext ctxt) {
return new AtomicBoolean(false);
}
@Override
public AtomicBoolean deserialize(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
JsonToken t = p.currentToken();
if (t == JsonToken.VALUE_TRUE) {
return new AtomicBoolean(true);
}
if (t == JsonToken.VALUE_FALSE) {
return new AtomicBoolean(false);
}
// 12-Jun-2020, tatu: May look convoluted, but need to work correctly with
// CoercionConfig
Boolean b = _parseBoolean(p, ctxt, AtomicBoolean.class);
return (b == null) ? null : new AtomicBoolean(b.booleanValue());
}
}
| AtomicBooleanDeserializer |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableSkipLastTimed.java | {
"start": 984,
"end": 1752
} | class ____<T> extends AbstractObservableWithUpstream<T, T> {
final long time;
final TimeUnit unit;
final Scheduler scheduler;
final int bufferSize;
final boolean delayError;
public ObservableSkipLastTimed(ObservableSource<T> source,
long time, TimeUnit unit, Scheduler scheduler, int bufferSize, boolean delayError) {
super(source);
this.time = time;
this.unit = unit;
this.scheduler = scheduler;
this.bufferSize = bufferSize;
this.delayError = delayError;
}
@Override
public void subscribeActual(Observer<? super T> t) {
source.subscribe(new SkipLastTimedObserver<>(t, time, unit, scheduler, bufferSize, delayError));
}
static final | ObservableSkipLastTimed |
java | apache__camel | components/camel-bean-validator/src/main/java/org/apache/camel/component/bean/validator/ValidationProviderResolverFactory.java | {
"start": 956,
"end": 1097
} | interface ____ {
ValidationProviderResolver createValidationProviderResolver(CamelContext camelContext);
}
| ValidationProviderResolverFactory |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/builder/DiffResult.java | {
"start": 1444,
"end": 6467
} | class ____<T> implements Iterable<Diff<?>> {
/**
* The {@link String} returned when the objects have no differences:
* {@value}
*/
public static final String OBJECTS_SAME_STRING = StringUtils.EMPTY;
private final List<Diff<?>> diffList;
private final T lhs;
private final T rhs;
private final ToStringStyle style;
private final String toStringFormat;
/**
* Creates a {@link DiffResult} containing the differences between two
* objects.
*
* @param lhs
* the left-hand side object
* @param rhs
* the right-hand side object
* @param diffList
* the list of differences, may be empty
* @param style
* the style to use for the {@link #toString()} method. May be
* {@code null}, in which case
* {@link ToStringStyle#DEFAULT_STYLE} is used
* @param toStringFormat
* Two-argument format string for {@link String#format(String, Object...)}, for example {@code "%s differs from %s"}.
* @throws NullPointerException if {@code lhs}, {@code rhs} or {@code diffs} are {@code null}.
*/
DiffResult(final T lhs, final T rhs, final List<Diff<?>> diffList, final ToStringStyle style, final String toStringFormat) {
this.diffList = Objects.requireNonNull(diffList, "diffList");
this.lhs = Objects.requireNonNull(lhs, "lhs");
this.rhs = Objects.requireNonNull(rhs, "rhs");
this.style = Objects.requireNonNull(style, "style");
this.toStringFormat = Objects.requireNonNull(toStringFormat, "toStringFormat");
}
/**
* Gets an unmodifiable list of {@link Diff}s. The list may be empty if
* there were no differences between the objects.
*
* @return an unmodifiable list of {@link Diff}s
*/
public List<Diff<?>> getDiffs() {
return Collections.unmodifiableList(diffList);
}
/**
* Gets the object the right object has been compared to.
*
* @return the left object of the diff
* @since 3.10
*/
public T getLeft() {
return this.lhs;
}
/**
* Gets the number of differences between the two objects.
*
* @return the number of differences
*/
public int getNumberOfDiffs() {
return diffList.size();
}
/**
* Gets the object the left object has been compared to.
*
* @return the right object of the diff
* @since 3.10
*/
public T getRight() {
return this.rhs;
}
/**
* Gets the style used by the {@link #toString()} method.
*
* @return the style
*/
public ToStringStyle getToStringStyle() {
return style;
}
/**
* Returns an iterator over the {@link Diff} objects contained in this list.
*
* @return the iterator
*/
@Override
public Iterator<Diff<?>> iterator() {
return diffList.iterator();
}
/**
* Builds a {@link String} description of the differences contained within
* this {@link DiffResult}. A {@link ToStringBuilder} is used for each object
* and the style of the output is governed by the {@link ToStringStyle}
* passed to the constructor.
*
* <p>
* If there are no differences stored in this list, the method will return
* {@link #OBJECTS_SAME_STRING}. Otherwise, using the example given in
* {@link Diffable} and {@link ToStringStyle#SHORT_PREFIX_STYLE}, an output
* might be:
* </p>
*
* <pre>
* Person[name=John Doe,age=32] differs from Person[name=Joe Bloggs,age=26]
* </pre>
*
* <p>
* This indicates that the objects differ in name and age, but not in
* smoking status.
* </p>
*
* <p>
* To use a different {@link ToStringStyle} for an instance of this class,
* use {@link #toString(ToStringStyle)}.
* </p>
*
* @return a {@link String} description of the differences.
*/
@Override
public String toString() {
return toString(style);
}
/**
* Builds a {@link String} description of the differences contained within
* this {@link DiffResult}, using the supplied {@link ToStringStyle}.
*
* @param style
* the {@link ToStringStyle} to use when outputting the objects
*
* @return a {@link String} description of the differences.
*/
public String toString(final ToStringStyle style) {
if (diffList.isEmpty()) {
return OBJECTS_SAME_STRING;
}
final ToStringBuilder lhsBuilder = new ToStringBuilder(lhs, style);
final ToStringBuilder rhsBuilder = new ToStringBuilder(rhs, style);
diffList.forEach(diff -> {
lhsBuilder.append(diff.getFieldName(), diff.getLeft());
rhsBuilder.append(diff.getFieldName(), diff.getRight());
});
return String.format(toStringFormat, lhsBuilder.build(), rhsBuilder.build());
}
}
| DiffResult |
java | quarkusio__quarkus | extensions/jaxb/deployment/src/main/java/io/quarkus/jaxb/deployment/FilteredJaxbClassesToBeBoundBuildItem.java | {
"start": 592,
"end": 1003
} | class ____ extends SimpleBuildItem {
private final List<Class<?>> classes;
public static Builder builder() {
return new Builder();
}
private FilteredJaxbClassesToBeBoundBuildItem(List<Class<?>> classes) {
this.classes = classes;
}
public List<Class<?>> getClasses() {
return new ArrayList<>(classes);
}
public static | FilteredJaxbClassesToBeBoundBuildItem |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/ssl/FixedTrustManagerFactory.java | {
"start": 1040,
"end": 1562
} | class ____ extends TrustManagerFactory {
private static final Provider PROVIDER = new FixedTrustManagerFactoryProvider();
private FixedTrustManagerFactory(FixedTrustManagersSpi spi, String algorithm) {
super(spi, PROVIDER, algorithm);
}
static FixedTrustManagerFactory of(TrustManagerFactory trustManagerFactory, TrustManager... trustManagers) {
return new FixedTrustManagerFactory(new FixedTrustManagersSpi(trustManagers),
trustManagerFactory.getAlgorithm());
}
private static final | FixedTrustManagerFactory |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/ShutdownPersistentTasksStatus.java | {
"start": 859,
"end": 2192
} | class ____ implements Writeable, ToXContentObject {
private final SingleNodeShutdownMetadata.Status status;
public ShutdownPersistentTasksStatus() {
this.status = SingleNodeShutdownMetadata.Status.COMPLETE;
}
public ShutdownPersistentTasksStatus(StreamInput in) throws IOException {
this.status = SingleNodeShutdownMetadata.Status.COMPLETE;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("status", status);
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
public SingleNodeShutdownMetadata.Status getStatus() {
return status;
}
@Override
public int hashCode() {
return status.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ShutdownPersistentTasksStatus other = (ShutdownPersistentTasksStatus) obj;
return status.equals(other.status);
}
@Override
public String toString() {
return Strings.toString(this);
}
}
| ShutdownPersistentTasksStatus |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/method/annotation/HandlerMethodValidationException.java | {
"start": 2298,
"end": 6781
} | class ____ extends ResponseStatusException implements MethodValidationResult {
private final MethodValidationResult validationResult;
private final Predicate<MethodParameter> modelAttributePredicate;
private final Predicate<MethodParameter> requestParamPredicate;
public HandlerMethodValidationException(MethodValidationResult validationResult) {
this(validationResult,
param -> param.hasParameterAnnotation(ModelAttribute.class),
param -> param.hasParameterAnnotation(RequestParam.class));
}
public HandlerMethodValidationException(MethodValidationResult validationResult,
Predicate<MethodParameter> modelAttributePredicate, Predicate<MethodParameter> requestParamPredicate) {
super(initHttpStatus(validationResult), "Validation failure", null, null, null);
this.validationResult = validationResult;
this.modelAttributePredicate = modelAttributePredicate;
this.requestParamPredicate = requestParamPredicate;
}
private static HttpStatus initHttpStatus(MethodValidationResult validationResult) {
return (validationResult.isForReturnValue() ? HttpStatus.INTERNAL_SERVER_ERROR : HttpStatus.BAD_REQUEST);
}
@Override
public Object[] getDetailMessageArguments(MessageSource messageSource, Locale locale) {
return new Object[] { BindErrorUtils.resolveAndJoin(getAllErrors(), messageSource, locale) };
}
@Override
public Object[] getDetailMessageArguments() {
return new Object[] { BindErrorUtils.resolveAndJoin(getAllErrors()) };
}
@Override
public Object getTarget() {
return this.validationResult.getTarget();
}
@Override
public Method getMethod() {
return this.validationResult.getMethod();
}
@Override
public boolean isForReturnValue() {
return this.validationResult.isForReturnValue();
}
@Override
public List<ParameterValidationResult> getParameterValidationResults() {
return this.validationResult.getParameterValidationResults();
}
@Override
public List<MessageSourceResolvable> getCrossParameterValidationResults() {
return this.validationResult.getCrossParameterValidationResults();
}
/**
* Provide a {@link Visitor Visitor} to handle {@link ParameterValidationResult}s
* through callback methods organized by controller method parameter type.
*/
public void visitResults(Visitor visitor) {
for (ParameterValidationResult result : getParameterValidationResults()) {
MethodParameter param = result.getMethodParameter();
CookieValue cookieValue = param.getParameterAnnotation(CookieValue.class);
if (cookieValue != null) {
visitor.cookieValue(cookieValue, result);
continue;
}
MatrixVariable matrixVariable = param.getParameterAnnotation(MatrixVariable.class);
if (matrixVariable != null) {
visitor.matrixVariable(matrixVariable, result);
continue;
}
if (this.modelAttributePredicate.test(param)) {
ModelAttribute modelAttribute = param.getParameterAnnotation(ModelAttribute.class);
visitor.modelAttribute(modelAttribute, asErrors(result));
continue;
}
PathVariable pathVariable = param.getParameterAnnotation(PathVariable.class);
if (pathVariable != null) {
visitor.pathVariable(pathVariable, result);
continue;
}
RequestBody requestBody = param.getParameterAnnotation(RequestBody.class);
if (requestBody != null) {
if (result instanceof ParameterErrors errors) {
visitor.requestBody(requestBody, errors);
}
else {
visitor.requestBodyValidationResult(requestBody, result);
}
continue;
}
RequestHeader requestHeader = param.getParameterAnnotation(RequestHeader.class);
if (requestHeader != null) {
visitor.requestHeader(requestHeader, result);
continue;
}
if (this.requestParamPredicate.test(param)) {
RequestParam requestParam = param.getParameterAnnotation(RequestParam.class);
visitor.requestParam(requestParam, result);
continue;
}
RequestPart requestPart = param.getParameterAnnotation(RequestPart.class);
if (requestPart != null) {
visitor.requestPart(requestPart, asErrors(result));
continue;
}
visitor.other(result);
}
}
private static ParameterErrors asErrors(ParameterValidationResult result) {
Assert.state(result instanceof ParameterErrors, "Expected ParameterErrors");
return (ParameterErrors) result;
}
/**
* Contract to handle validation results with callbacks by controller method
* parameter type, with {@link #other} serving as the fallthrough.
*/
public | HandlerMethodValidationException |
java | apache__thrift | lib/java/src/main/java/org/apache/thrift/server/ServerContext.java | {
"start": 937,
"end": 1019
} | interface ____ {
/**
* Returns an object that implements the given | ServerContext |
java | apache__camel | components/camel-google/camel-google-drive/src/test/java/org/apache/camel/component/google/drive/FileConverterIT.java | {
"start": 1741,
"end": 3145
} | class ____ extends AbstractGoogleDriveTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(FileConverterIT.class);
private static final String PATH_PREFIX
= GoogleDriveApiCollection.getCollection().getApiName(DriveFilesApiMethod.class).getName();
@Override
@BeforeEach
public void doPreSetup() {
deleteDirectory("target/convertertest");
}
@Test
public void testFileConverter() throws Exception {
template.sendBodyAndHeader("file://target/convertertest", "Hello!", "CamelFileName", "greeting.txt");
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
MockEndpoint.assertIsSatisfied(context);
File file = mock.getReceivedExchanges().get(0).getIn().getBody(com.google.api.services.drive.model.File.class);
assertEquals("Hello!", context.getTypeConverter().convertTo(String.class, mock.getReceivedExchanges().get(0), file));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("file://target/convertertest?noop=true")
.convertBodyTo(File.class)
.to("google-drive://drive-files/insert?inBody=content")
.to("mock:result");
}
};
}
}
| FileConverterIT |
java | grpc__grpc-java | util/src/main/java/io/grpc/util/RoundRobinLoadBalancer.java | {
"start": 1508,
"end": 4005
} | class ____ extends MultiChildLoadBalancer {
private final AtomicInteger sequence = new AtomicInteger(new Random().nextInt());
private SubchannelPicker currentPicker = new FixedResultPicker(PickResult.withNoResult());
public RoundRobinLoadBalancer(Helper helper) {
super(helper);
}
/**
* Updates picker with the list of active subchannels (state == READY).
*/
@Override
protected void updateOverallBalancingState() {
List<ChildLbState> activeList = getReadyChildren();
if (activeList.isEmpty()) {
// No READY subchannels
// RRLB will request connection immediately on subchannel IDLE.
boolean isConnecting = false;
for (ChildLbState childLbState : getChildLbStates()) {
ConnectivityState state = childLbState.getCurrentState();
if (state == CONNECTING || state == IDLE) {
isConnecting = true;
break;
}
}
if (isConnecting) {
updateBalancingState(CONNECTING, new FixedResultPicker(PickResult.withNoResult()));
} else {
updateBalancingState(TRANSIENT_FAILURE, createReadyPicker(getChildLbStates()));
}
} else {
updateBalancingState(READY, createReadyPicker(activeList));
}
}
private void updateBalancingState(ConnectivityState state, SubchannelPicker picker) {
if (state != currentConnectivityState || !picker.equals(currentPicker)) {
getHelper().updateBalancingState(state, picker);
currentConnectivityState = state;
currentPicker = picker;
}
}
private SubchannelPicker createReadyPicker(Collection<ChildLbState> children) {
List<SubchannelPicker> pickerList = new ArrayList<>();
for (ChildLbState child : children) {
SubchannelPicker picker = child.getCurrentPicker();
pickerList.add(picker);
}
return new ReadyPicker(pickerList, sequence);
}
@Override
protected ChildLbState createChildLbState(Object key) {
return new ChildLbState(key, pickFirstLbProvider) {
@Override
protected ChildLbStateHelper createChildHelper() {
return new ChildLbStateHelper() {
@Override
public void updateBalancingState(ConnectivityState newState, SubchannelPicker newPicker) {
super.updateBalancingState(newState, newPicker);
if (!resolvingAddresses && newState == IDLE) {
getLb().requestConnection();
}
}
};
}
};
}
@VisibleForTesting
static | RoundRobinLoadBalancer |
java | apache__kafka | connect/runtime/src/test/resources/test-plugins/subclass-of-classpath/test/plugins/SubclassOfClasspathConverter.java | {
"start": 906,
"end": 1046
} | class ____ testing classloading isolation.
* See {@link org.apache.kafka.connect.runtime.isolation.TestPlugins}.
* <p>Subclasses a non-API | for |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/ClassUtils.java | {
"start": 41484,
"end": 41673
} | class ____ the package name
*/
public static String getShortName(Class<?> clazz) {
return getShortName(getQualifiedName(clazz));
}
/**
* Return the short string name of a Java | without |
java | google__guice | extensions/assistedinject/test/com/google/inject/assistedinject/FactoryProvider2Test.java | {
"start": 15491,
"end": 16294
} | interface ____ {
Car createCar() throws FireException, ExplosionException;
}
@Test
public void testConstructorExceptionsAreThrownByFactory() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(CorrectDefectiveCarFactory.class)
.toProvider(
FactoryProvider.newFactory(
CorrectDefectiveCarFactory.class, DefectiveCar.class));
}
});
try {
injector.getInstance(CorrectDefectiveCarFactory.class).createCar();
fail();
} catch (FireException e) {
fail();
} catch (ExplosionException expected) {
}
}
public static | CorrectDefectiveCarFactory |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/idclass/NestedIdClassTest.java | {
"start": 5607,
"end": 6453
} | class ____ implements Serializable {
private Long assetId;
private Long assetTypeAttributeId;
private Long tenantId;
public AssetAttributeId() {}
public AssetAttributeId(Long assetId, Long assetTypeAttributeId, Long tenantId) {
this.assetId = assetId;
this.assetTypeAttributeId = assetTypeAttributeId;
this.tenantId = tenantId;
}
public Long getAssetId() {
return assetId;
}
public void setAssetId(Long assetId) {
this.assetId = assetId;
}
public Long getAssetTypeAttributeId() {
return assetTypeAttributeId;
}
public void setAssetTypeAttributeId(Long assetTypeAttributeId) {
this.assetTypeAttributeId = assetTypeAttributeId;
}
public Long getTenantId() {
return tenantId;
}
public void setTenantId(Long tenantId) {
this.tenantId = tenantId;
}
}
public static | AssetAttributeId |
java | grpc__grpc-java | util/src/test/java/io/grpc/util/ForwardingClientStreamTracerTest.java | {
"start": 978,
"end": 1435
} | class ____ {
private final ClientStreamTracer mockDelegate = mock(ClientStreamTracer.class);
@Test
public void allMethodsForwarded() throws Exception {
ForwardingTestUtil.testMethodsForwarded(
ClientStreamTracer.class,
mockDelegate,
new ForwardingClientStreamTracerTest.TestClientStreamTracer(),
Collections.<Method>emptyList());
}
@SuppressWarnings("deprecation")
private final | ForwardingClientStreamTracerTest |
java | quarkusio__quarkus | extensions/elytron-security-jdbc/runtime/src/main/java/io/quarkus/elytron/security/jdbc/JdbcRecorder.java | {
"start": 658,
"end": 3393
} | class ____ {
private static final Provider[] PROVIDERS = new Provider[] { new WildFlyElytronPasswordProvider() };
private final RuntimeValue<JdbcSecurityRealmRuntimeConfig> runtimeConfig;
public JdbcRecorder(final RuntimeValue<JdbcSecurityRealmRuntimeConfig> runtimeConfig) {
this.runtimeConfig = runtimeConfig;
}
/**
* Create a runtime value for a {@linkplain JdbcSecurityRealm}
*
* @return - runtime value wrapper for the SecurityRealm
*/
public RuntimeValue<SecurityRealm> createRealm() {
Supplier<Provider[]> providers = new Supplier<Provider[]>() {
@Override
public Provider[] get() {
return PROVIDERS;
}
};
JdbcSecurityRealmBuilder builder = JdbcSecurityRealm.builder().setProviders(providers);
PrincipalQueriesConfig principalQueries = runtimeConfig.getValue().principalQueries();
registerPrincipalQuery(principalQueries.defaultPrincipalQuery(), builder);
principalQueries.namedPrincipalQueries()
.forEach((name, principalQuery) -> registerPrincipalQuery(principalQuery, builder));
return new RuntimeValue<>(builder.build());
}
private void registerPrincipalQuery(PrincipalQueryConfig principalQuery, JdbcSecurityRealmBuilder builder) {
QueryBuilder queryBuilder = builder.principalQuery(principalQuery.sql().orElseThrow(
() -> new IllegalStateException("quarkus.security.jdbc.principal-query.sql property must be set")))
.from(getDataSource(principalQuery));
AttributeMapper[] mappers = principalQuery.attributeMappings().entrySet()
.stream()
.map(entry -> new AttributeMapper(entry.getValue().index(), entry.getValue().to()))
.toArray(size -> new AttributeMapper[size]);
queryBuilder.withMapper(mappers);
if (principalQuery.clearPasswordMapperConfig().enabled()) {
queryBuilder.withMapper(principalQuery.clearPasswordMapperConfig().toPasswordKeyMapper());
}
if (principalQuery.bcryptPasswordKeyMapperConfig().enabled()) {
queryBuilder.withMapper(principalQuery.bcryptPasswordKeyMapperConfig().toPasswordKeyMapper());
}
}
private DataSource getDataSource(PrincipalQueryConfig principalQuery) {
if (principalQuery.datasource().isPresent()) {
return Arc.container()
.instance(DataSource.class,
new io.quarkus.agroal.DataSource.DataSourceLiteral(principalQuery.datasource().get()))
.get();
}
return Arc.container().instance(DataSource.class).get();
}
}
| JdbcRecorder |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/jdk/CollectionSerializationTest.java | {
"start": 1415,
"end": 1809
} | class ____ extends StdSerializer<List<String>>
{
public ListSerializer() { super(List.class); }
@Override
public void serialize(List<String> value, JsonGenerator gen, SerializationContext provider)
{
// just use standard List.toString(), output as JSON String
gen.writeString(value.toString());
}
}
static | ListSerializer |
java | spring-projects__spring-boot | module/spring-boot-data-cassandra/src/test/java/org/springframework/boot/data/cassandra/autoconfigure/DataCassandraRepositoriesAutoConfigurationTests.java | {
"start": 4609,
"end": 4852
} | class ____ {
}
@Configuration(proxyBeanMethods = false)
@TestAutoConfigurationPackage(City.class)
@EnableCassandraRepositories(basePackageClasses = CityRepository.class)
@Import(CassandraMockConfiguration.class)
static | DefaultConfiguration |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/std/DelegatingDeserializer.java | {
"start": 761,
"end": 5636
} | class ____
extends StdDeserializer<Object>
{
protected final ValueDeserializer<?> _delegatee;
/*
/**********************************************************************
/* Construction
/**********************************************************************
*/
public DelegatingDeserializer(ValueDeserializer<?> d)
{
super(d.handledType());
_delegatee = d;
}
/*
/**********************************************************************
/* Abstract methods to implement
/**********************************************************************
*/
protected abstract ValueDeserializer<?> newDelegatingInstance(ValueDeserializer<?> newDelegatee);
/*
/**********************************************************************
/* Overridden methods for contextualization, resolving
/**********************************************************************
*/
@Override
public void resolve(DeserializationContext ctxt) {
if (_delegatee != null) {
_delegatee.resolve(ctxt);
}
}
@Override
public ValueDeserializer<?> createContextual(DeserializationContext ctxt,
BeanProperty property)
{
JavaType vt = ctxt.constructType(_delegatee.handledType());
ValueDeserializer<?> del = ctxt.handleSecondaryContextualization(_delegatee,
property, vt);
if (del == _delegatee) {
return this;
}
return newDelegatingInstance(del);
}
@SuppressWarnings("unchecked")
@Override
public ValueDeserializer<Object> unwrappingDeserializer(DeserializationContext ctxt,
NameTransformer unwrapper)
{
ValueDeserializer<?> unwrapping = _delegatee.unwrappingDeserializer(ctxt, unwrapper);
if (unwrapping == _delegatee) {
return this;
}
return (ValueDeserializer<Object>) newDelegatingInstance(unwrapping);
}
@Override
public ValueDeserializer<?> replaceDelegatee(ValueDeserializer<?> delegatee)
{
if (delegatee == _delegatee) {
return this;
}
return newDelegatingInstance(delegatee);
}
/*
/**********************************************************************
/* Overridden deserialization methods
/**********************************************************************
*/
@Override
public Object deserialize(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
return _delegatee.deserialize(p, ctxt);
}
@SuppressWarnings("unchecked")
@Override
public Object deserialize(JsonParser p, DeserializationContext ctxt,
Object intoValue)
throws JacksonException
{
return ((ValueDeserializer<Object>)_delegatee).deserialize(p, ctxt, intoValue);
}
@Override
public Object deserializeWithType(JsonParser p, DeserializationContext ctxt,
TypeDeserializer typeDeserializer)
throws JacksonException
{
return _delegatee.deserializeWithType(p, ctxt, typeDeserializer);
}
/*
/**********************************************************************
/* Overridden other methods
/**********************************************************************
*/
@Override
public ValueDeserializer<?> getDelegatee() {
return _delegatee;
}
@Override
public AccessPattern getNullAccessPattern() {
return _delegatee.getNullAccessPattern();
}
@Override
public Object getNullValue(DeserializationContext ctxt) {
return _delegatee.getNullValue(ctxt);
}
@Override
public Object getAbsentValue(DeserializationContext ctxt) {
return _delegatee.getAbsentValue(ctxt);
}
@Override
public Object getEmptyValue(DeserializationContext ctxt) {
return _delegatee.getEmptyValue(ctxt);
}
@Override
public AccessPattern getEmptyAccessPattern() {
return _delegatee.getEmptyAccessPattern();
}
@Override
public LogicalType logicalType() {
return _delegatee.logicalType();
}
@Override
public boolean isCachable() {
return (_delegatee != null) && _delegatee.isCachable();
}
@Override
public Collection<Object> getKnownPropertyNames() { return _delegatee.getKnownPropertyNames(); }
@Override
public ObjectIdReader getObjectIdReader(DeserializationContext ctxt) {
return _delegatee.getObjectIdReader(ctxt);
}
@Override
public SettableBeanProperty findBackReference(String logicalName) {
return _delegatee.findBackReference(logicalName);
}
@Override
public Boolean supportsUpdate(DeserializationConfig config) {
return _delegatee.supportsUpdate(config);
}
}
| DelegatingDeserializer |
java | apache__kafka | tools/src/test/java/org/apache/kafka/tools/consumer/group/ResetConsumerGroupOffsetTest.java | {
"start": 5449,
"end": 45391
} | class ____ {
private static final String TOPIC_PREFIX = "foo-";
private static final String GROUP_PREFIX = "test.group-";
private String[] basicArgs(ClusterInstance cluster) {
return new String[]{"--reset-offsets",
"--bootstrap-server", cluster.bootstrapServers(),
"--timeout", Long.toString(DEFAULT_MAX_WAIT_MS)};
}
private String[] buildArgsForGroups(ClusterInstance cluster, List<String> groups, String... args) {
List<String> res = new ArrayList<>(List.of(basicArgs(cluster)));
for (String group : groups) {
res.add("--group");
res.add(group);
}
res.addAll(List.of(args));
return res.toArray(new String[0]);
}
private String[] buildArgsForGroup(ClusterInstance cluster, String group, String... args) {
return buildArgsForGroups(cluster, List.of(group), args);
}
private String[] buildArgsForAllGroups(ClusterInstance cluster, String... args) {
List<String> res = new ArrayList<>(List.of(basicArgs(cluster)));
res.add("--all-groups");
res.addAll(List.of(args));
return res.toArray(new String[0]);
}
@ClusterTest
public void testResetOffsetsNotExistingGroup(ClusterInstance cluster) throws Exception {
String topic = generateRandomTopic();
String group = "missing.group";
String[] args = buildArgsForGroup(cluster, group, "--all-topics", "--to-current", "--execute");
try (ConsumerGroupCommand.ConsumerGroupService service = getConsumerGroupService(args)) {
Map<TopicPartition, OffsetAndMetadata> resetOffsets = service.resetOffsets().get(group);
assertTrue(resetOffsets.isEmpty());
assertTrue(committedOffsets(cluster, topic, group).isEmpty());
}
}
@ClusterTest(
brokers = 2,
serverProperties = {
@ClusterConfigProperty(key = OFFSETS_TOPIC_REPLICATION_FACTOR_CONFIG, value = "2"),
}
)
public void testResetOffsetsWithOfflinePartitionNotInResetTarget(ClusterInstance cluster) throws Exception {
String topic = generateRandomTopic();
String group = "new.group";
String[] args = buildArgsForGroup(cluster, group, "--to-earliest", "--execute", "--topic", topic + ":0");
try (Admin admin = cluster.admin(); ConsumerGroupCommand.ConsumerGroupService service = getConsumerGroupService(args)) {
admin.createTopics(List.of(new NewTopic(topic, Map.of(0, List.of(0), 1, List.of(1)))));
cluster.waitTopicCreation(topic, 2);
cluster.shutdownBroker(1);
Map<TopicPartition, OffsetAndMetadata> resetOffsets = service.resetOffsets().get(group);
assertEquals(Set.of(new TopicPartition(topic, 0)), resetOffsets.keySet());
}
}
@ClusterTest
public void testResetOffsetsExistingTopic(ClusterInstance cluster) {
String topic = generateRandomTopic();
String group = "new.group";
String[] args = buildArgsForGroup(cluster, group, "--topic", topic, "--to-offset", "50");
produceMessages(cluster, topic, 100);
resetAndAssertOffsets(cluster, args, 50, true, List.of(topic));
resetAndAssertOffsets(cluster, addTo(args, "--dry-run"),
50, true, List.of(topic));
resetAndAssertOffsets(cluster, addTo(args, "--execute"),
50, false, List.of(topic));
}
@ClusterTest
public void testResetOffsetsExistingTopicSelectedGroups(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String topic = generateRandomTopic();
produceMessages(cluster, topic, 100);
List<String> groups = generateIds(topic);
for (String group : groups) {
try (AutoCloseable consumerGroupCloseable =
consumerGroupClosable(cluster, 1, topic, group, groupProtocol)) {
awaitConsumerProgress(cluster, topic, group, 100L);
}
}
String[] args = buildArgsForGroups(cluster, groups, "--topic", topic, "--to-offset", "50");
resetAndAssertOffsets(cluster, args, 50, true, List.of(topic));
resetAndAssertOffsets(cluster, addTo(args, "--dry-run"),
50, true, List.of(topic));
resetAndAssertOffsets(cluster, addTo(args, "--execute"),
50, false, List.of(topic));
}
}
@ClusterTest
public void testResetOffsetsExistingTopicAllGroups(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String topic = generateRandomTopic();
String[] args = buildArgsForAllGroups(cluster, "--topic", topic, "--to-offset", "50");
produceMessages(cluster, topic, 100);
for (int i = 1; i <= 3; i++) {
String group = generateRandomGroupId();
try (AutoCloseable consumerGroupCloseable =
consumerGroupClosable(cluster, 1, topic, group, groupProtocol)) {
awaitConsumerProgress(cluster, topic, group, 100L);
}
}
resetAndAssertOffsets(cluster, args, 50, true, List.of(topic));
resetAndAssertOffsets(cluster, addTo(args, "--dry-run"),
50, true, List.of(topic));
resetAndAssertOffsets(cluster, addTo(args, "--execute"),
50, false, List.of(topic));
}
}
@ClusterTest
public void testResetOffsetsAllTopicsAllGroups(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String groupId = generateRandomGroupId();
String topicId = generateRandomTopic();
String[] args = buildArgsForAllGroups(cluster, "--all-topics", "--to-offset", "50");
List<String> topics = generateIds(groupId);
List<String> groups = generateIds(topicId);
topics.forEach(topic -> produceMessages(cluster, topic, 100));
for (String topic : topics) {
for (String group : groups) {
try (AutoCloseable consumerGroupCloseable =
consumerGroupClosable(cluster, 3, topic, group, groupProtocol)) {
awaitConsumerProgress(cluster, topic, group, 100);
}
}
}
resetAndAssertOffsets(cluster, args, 50, true, topics);
resetAndAssertOffsets(cluster, addTo(args, "--dry-run"),
50, true, topics);
resetAndAssertOffsets(cluster, addTo(args, "--execute"),
50, false, topics);
try (Admin admin = cluster.admin()) {
admin.deleteConsumerGroups(groups).all().get();
}
}
}
@ClusterTest
public void testResetOffsetsToLocalDateTime(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
DateTimeFormatter format = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSS");
LocalDateTime dateTime = now().minusDays(1);
String[] args = buildArgsForGroup(cluster, group,
"--all-topics", "--to-datetime",
format.format(dateTime), "--execute");
produceMessages(cluster, topic, 100);
try (AutoCloseable consumerGroupCloseable =
consumerGroupClosable(cluster, 1, topic, group, groupProtocol)) {
awaitConsumerProgress(cluster, topic, group, 100L);
}
resetAndAssertOffsets(cluster, topic, args, 0);
}
}
@ClusterTest
public void testResetOffsetsToZonedDateTime(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
DateTimeFormatter format = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSXXX");
produceMessages(cluster, topic, 50);
ZonedDateTime checkpoint = now().atZone(ZoneId.systemDefault());
produceMessages(cluster, topic, 50);
String[] args = buildArgsForGroup(cluster, group,
"--all-topics", "--to-datetime", format.format(checkpoint),
"--execute");
try (AutoCloseable consumerGroupCloseable =
consumerGroupClosable(cluster, 1, topic, group, groupProtocol)) {
awaitConsumerProgress(cluster, topic, group, 100L);
}
resetAndAssertOffsets(cluster, topic, args, 50);
}
}
@ClusterTest
public void testResetOffsetsByDuration(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--all-topics", "--by-duration", "PT1M", "--execute");
produceConsumeAndShutdown(cluster, topic, group, 1, groupProtocol);
resetAndAssertOffsets(cluster, topic, args, 0);
}
}
@ClusterTest
public void testResetOffsetsByDurationToEarliest(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--all-topics", "--by-duration", "PT0.1S", "--execute");
produceConsumeAndShutdown(cluster, topic, group, 1, groupProtocol);
resetAndAssertOffsets(cluster, topic, args, 100);
}
}
@ClusterTest
public void testResetOffsetsByDurationFallbackToLatestWhenNoRecords(ClusterInstance cluster) throws ExecutionException, InterruptedException {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--topic", topic, "--by-duration", "PT1M", "--execute");
try (Admin admin = cluster.admin()) {
admin.createTopics(Set.of(new NewTopic(topic, 1, (short) 1))).all().get();
resetAndAssertOffsets(cluster, args, 0, false, List.of(topic));
admin.deleteTopics(Set.of(topic)).all().get();
}
}
@ClusterTest
public void testResetOffsetsToEarliest(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--all-topics", "--to-earliest", "--execute");
produceConsumeAndShutdown(cluster, topic, group, 1, groupProtocol);
resetAndAssertOffsets(cluster, topic, args, 0);
}
}
@ClusterTest
public void testResetOffsetsToLatest(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--all-topics", "--to-latest", "--execute");
produceConsumeAndShutdown(cluster, topic, group, 1, groupProtocol);
produceMessages(cluster, topic, 100);
resetAndAssertOffsets(cluster, topic, args, 200);
}
}
@ClusterTest
public void testResetOffsetsToCurrentOffset(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--all-topics", "--to-current", "--execute");
produceConsumeAndShutdown(cluster, topic, group, 1, groupProtocol);
produceMessages(cluster, topic, 100);
resetAndAssertOffsets(cluster, topic, args, 100);
}
}
@ClusterTest
public void testResetOffsetsToSpecificOffset(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--all-topics", "--to-offset", "1", "--execute");
produceConsumeAndShutdown(cluster, topic, group, 1, groupProtocol);
resetAndAssertOffsets(cluster, topic, args, 1);
}
}
@ClusterTest
public void testResetOffsetsShiftPlus(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--all-topics", "--shift-by", "50", "--execute");
produceConsumeAndShutdown(cluster, topic, group, 1, groupProtocol);
produceMessages(cluster, topic, 100);
resetAndAssertOffsets(cluster, topic, args, 150);
}
}
@ClusterTest
public void testResetOffsetsShiftMinus(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--all-topics", "--shift-by", "-50", "--execute");
produceConsumeAndShutdown(cluster, topic, group, 1, groupProtocol);
produceMessages(cluster, topic, 100);
resetAndAssertOffsets(cluster, topic, args, 50);
}
}
@ClusterTest
public void testResetOffsetsShiftByLowerThanEarliest(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--all-topics", "--shift-by", "-150", "--execute");
produceConsumeAndShutdown(cluster, topic, group, 1, groupProtocol);
produceMessages(cluster, topic, 100);
resetAndAssertOffsets(cluster, topic, args, 0);
}
}
@ClusterTest
public void testResetOffsetsShiftByHigherThanLatest(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--all-topics", "--shift-by", "150", "--execute");
produceConsumeAndShutdown(cluster, topic, group, 1, groupProtocol);
produceMessages(cluster, topic, 100);
resetAndAssertOffsets(cluster, topic, args, 200);
}
}
@ClusterTest
public void testResetOffsetsToEarliestOnOneTopic(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--topic", topic, "--to-earliest", "--execute");
produceConsumeAndShutdown(cluster, topic, group, 1, groupProtocol);
resetAndAssertOffsets(cluster, topic, args, 0);
}
}
@ClusterTest
public void testResetOffsetsToEarliestOnOneTopicAndPartition(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--topic", topic + ":1",
"--to-earliest", "--execute");
try (Admin admin = cluster.admin();
ConsumerGroupCommand.ConsumerGroupService service = getConsumerGroupService(args)) {
admin.createTopics(Set.of(new NewTopic(topic, 2, (short) 1))).all().get();
produceConsumeAndShutdown(cluster, topic, group, 2, groupProtocol);
Map<TopicPartition, Long> priorCommittedOffsets = committedOffsets(cluster, topic, group);
TopicPartition tp0 = new TopicPartition(topic, 0);
TopicPartition tp1 = new TopicPartition(topic, 1);
Map<TopicPartition, Long> expectedOffsets = new HashMap<>();
expectedOffsets.put(tp0, priorCommittedOffsets.get(tp0));
expectedOffsets.put(tp1, 0L);
resetAndAssertOffsetsCommitted(cluster, service, expectedOffsets, topic);
admin.deleteTopics(Set.of(topic)).all().get();
}
}
}
@ClusterTest
public void testResetOffsetsToEarliestOnTopics(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic1 = generateRandomTopic();
String topic2 = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group,
"--topic", topic1,
"--topic", topic2,
"--to-earliest", "--execute");
try (Admin admin = cluster.admin();
ConsumerGroupCommand.ConsumerGroupService service = getConsumerGroupService(args)) {
admin.createTopics(List.of(new NewTopic(topic1, 1, (short) 1),
new NewTopic(topic2, 1, (short) 1))).all().get();
produceConsumeAndShutdown(cluster, topic1, group, 1, groupProtocol);
produceConsumeAndShutdown(cluster, topic2, group, 1, groupProtocol);
TopicPartition tp1 = new TopicPartition(topic1, 0);
TopicPartition tp2 = new TopicPartition(topic2, 0);
Map<TopicPartition, Long> allResetOffsets = toOffsetMap(resetOffsets(service).get(group));
Map<TopicPartition, Long> expMap = new HashMap<>();
expMap.put(tp1, 0L);
expMap.put(tp2, 0L);
assertEquals(expMap, allResetOffsets);
assertEquals(Map.of(tp1, 0L), committedOffsets(cluster, topic1, group));
assertEquals(Map.of(tp2, 0L), committedOffsets(cluster, topic2, group));
admin.deleteTopics(List.of(topic1, topic2)).all().get();
}
}
}
@ClusterTest
public void testResetOffsetsToEarliestOnTopicsAndPartitions(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic1 = generateRandomTopic();
String topic2 = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group,
"--topic", topic1 + ":1",
"--topic", topic2 + ":1",
"--to-earliest", "--execute");
try (Admin admin = cluster.admin();
ConsumerGroupCommand.ConsumerGroupService service = getConsumerGroupService(args)) {
admin.createTopics(List.of(new NewTopic(topic1, 2, (short) 1),
new NewTopic(topic2, 2, (short) 1))).all().get();
produceConsumeAndShutdown(cluster, topic1, group, 2, groupProtocol);
produceConsumeAndShutdown(cluster, topic2, group, 2, groupProtocol);
Map<TopicPartition, Long> priorCommittedOffsets1 =
committedOffsets(cluster, topic1, group);
Map<TopicPartition, Long> priorCommittedOffsets2 =
committedOffsets(cluster, topic2, group);
TopicPartition tp1 = new TopicPartition(topic1, 1);
TopicPartition tp2 = new TopicPartition(topic2, 1);
Map<TopicPartition, Long> allResetOffsets = toOffsetMap(resetOffsets(service).get(group));
Map<TopicPartition, Long> expMap = new HashMap<>();
expMap.put(tp1, 0L);
expMap.put(tp2, 0L);
assertEquals(expMap, allResetOffsets);
priorCommittedOffsets1.put(tp1, 0L);
assertEquals(priorCommittedOffsets1, committedOffsets(cluster, topic1, group));
priorCommittedOffsets2.put(tp2, 0L);
assertEquals(priorCommittedOffsets2, committedOffsets(cluster, topic2, group));
admin.deleteTopics(List.of(topic1, topic2)).all().get();
}
}
}
@ClusterTest
// This one deals with old CSV export/import format for a single --group arg:
// "topic,partition,offset" to support old behavior
public void testResetOffsetsExportImportPlanSingleGroupArg(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
TopicPartition tp0 = new TopicPartition(topic, 0);
TopicPartition tp1 = new TopicPartition(topic, 1);
String[] cgcArgs = buildArgsForGroup(cluster, group, "--all-topics", "--to-offset", "2", "--export");
File file = TestUtils.tempFile("reset", ".csv");
try (Admin admin = cluster.admin();
ConsumerGroupCommand.ConsumerGroupService service = getConsumerGroupService(cgcArgs)) {
admin.createTopics(Set.of(new NewTopic(topic, 2, (short) 1))).all().get();
produceConsumeAndShutdown(cluster, topic, group, 2, groupProtocol);
Map<String, Map<TopicPartition, OffsetAndMetadata>> exportedOffsets = service.resetOffsets();
writeContentToFile(file, service.exportOffsetsToCsv(exportedOffsets));
Map<TopicPartition, Long> exp1 = new HashMap<>();
exp1.put(tp0, 2L);
exp1.put(tp1, 2L);
assertEquals(exp1, toOffsetMap(exportedOffsets.get(group)));
String[] cgcArgsExec = buildArgsForGroup(cluster, group, "--all-topics",
"--from-file", file.getCanonicalPath(), "--dry-run");
try (ConsumerGroupCommand.ConsumerGroupService serviceExec = getConsumerGroupService(cgcArgsExec)) {
Map<String, Map<TopicPartition, OffsetAndMetadata>> importedOffsets = serviceExec.resetOffsets();
assertEquals(exp1, toOffsetMap(importedOffsets.get(group)));
}
admin.deleteTopics(Set.of(topic));
}
}
}
@ClusterTest
// This one deals with universal CSV export/import file format "group,topic,partition,offset",
// supporting multiple --group args or --all-groups arg
public void testResetOffsetsExportImportPlan(ClusterInstance cluster) throws Exception {
for (GroupProtocol groupProtocol : cluster.supportedGroupProtocols()) {
String group1 = generateRandomGroupId();
String group2 = generateRandomGroupId();
String topic1 = generateRandomTopic();
String topic2 = generateRandomTopic();
TopicPartition t1p0 = new TopicPartition(topic1, 0);
TopicPartition t1p1 = new TopicPartition(topic1, 1);
TopicPartition t2p0 = new TopicPartition(topic2, 0);
TopicPartition t2p1 = new TopicPartition(topic2, 1);
String[] cgcArgs = buildArgsForGroups(cluster, List.of(group1, group2),
"--all-topics", "--to-offset", "2", "--export");
File file = TestUtils.tempFile("reset", ".csv");
try (Admin admin = cluster.admin();
ConsumerGroupCommand.ConsumerGroupService service = getConsumerGroupService(cgcArgs)) {
admin.createTopics(List.of(new NewTopic(topic1, 2, (short) 1),
new NewTopic(topic2, 2, (short) 1))).all().get();
produceConsumeAndShutdown(cluster, topic1, group1, 1, groupProtocol);
produceConsumeAndShutdown(cluster, topic2, group2, 1, groupProtocol);
awaitConsumerGroupInactive(service, group1);
awaitConsumerGroupInactive(service, group2);
Map<String, Map<TopicPartition, OffsetAndMetadata>> exportedOffsets = service.resetOffsets();
writeContentToFile(file, service.exportOffsetsToCsv(exportedOffsets));
Map<TopicPartition, Long> exp1 = new HashMap<>();
exp1.put(t1p0, 2L);
exp1.put(t1p1, 2L);
Map<TopicPartition, Long> exp2 = new HashMap<>();
exp2.put(t2p0, 2L);
exp2.put(t2p1, 2L);
assertEquals(exp1, toOffsetMap(exportedOffsets.get(group1)));
assertEquals(exp2, toOffsetMap(exportedOffsets.get(group2)));
// Multiple --group's offset import
String[] cgcArgsExec = buildArgsForGroups(cluster, List.of(group1, group2),
"--all-topics",
"--from-file", file.getCanonicalPath(), "--dry-run");
try (ConsumerGroupCommand.ConsumerGroupService serviceExec = getConsumerGroupService(cgcArgsExec)) {
Map<String, Map<TopicPartition, OffsetAndMetadata>> importedOffsets = serviceExec.resetOffsets();
assertEquals(exp1, toOffsetMap(importedOffsets.get(group1)));
assertEquals(exp2, toOffsetMap(importedOffsets.get(group2)));
}
// Single --group offset import using "group,topic,partition,offset" csv format
String[] cgcArgsExec2 = buildArgsForGroup(cluster, group1, "--all-topics",
"--from-file", file.getCanonicalPath(), "--dry-run");
try (ConsumerGroupCommand.ConsumerGroupService serviceExec2 = getConsumerGroupService(cgcArgsExec2)) {
Map<String, Map<TopicPartition, OffsetAndMetadata>> importedOffsets2 = serviceExec2.resetOffsets();
assertEquals(exp1, toOffsetMap(importedOffsets2.get(group1)));
}
admin.deleteTopics(List.of(topic1, topic2));
}
}
}
@ClusterTest
public void testResetWithUnrecognizedNewConsumerOption(ClusterInstance cluster) {
String group = generateRandomGroupId();
String[] cgcArgs = new String[]{"--new-consumer",
"--bootstrap-server", cluster.bootstrapServers(),
"--reset-offsets", "--group", group, "--all-topics",
"--to-offset", "2", "--export"};
assertThrows(OptionException.class, () -> getConsumerGroupService(cgcArgs));
}
@ClusterTest(brokers = 3, serverProperties = {@ClusterConfigProperty(key = OFFSETS_TOPIC_REPLICATION_FACTOR_CONFIG, value = "2")})
public void testResetOffsetsWithPartitionNoneLeader(ClusterInstance cluster) throws Exception {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--topic", topic + ":0,1,2",
"--to-earliest", "--execute");
try (Admin admin = cluster.admin();
ConsumerGroupCommand.ConsumerGroupService service = getConsumerGroupService(args)) {
admin.createTopics(Set.of(new NewTopic(topic, 3, (short) 1))).all().get();
produceConsumeAndShutdown(cluster, topic, group, 2, GroupProtocol.CLASSIC);
assertDoesNotThrow(() -> resetOffsets(service));
// shutdown a broker to make some partitions missing leader
cluster.shutdownBroker(0);
assertThrows(LeaderNotAvailableException.class, () -> resetOffsets(service));
}
}
@ClusterTest
public void testResetOffsetsWithPartitionNotExist(ClusterInstance cluster) throws Exception {
String group = generateRandomGroupId();
String topic = generateRandomTopic();
String[] args = buildArgsForGroup(cluster, group, "--topic", topic + ":2,3",
"--to-earliest", "--execute");
try (Admin admin = cluster.admin();
ConsumerGroupCommand.ConsumerGroupService service = getConsumerGroupService(args)) {
admin.createTopics(Set.of(new NewTopic(topic, 1, (short) 1))).all().get();
produceConsumeAndShutdown(cluster, topic, group, 2, GroupProtocol.CLASSIC);
assertThrows(UnknownTopicOrPartitionException.class, () -> resetOffsets(service));
}
}
private String generateRandomTopic() {
return TOPIC_PREFIX + TestUtils.randomString(10);
}
private String generateRandomGroupId() {
return GROUP_PREFIX + TestUtils.randomString(10);
}
private Map<TopicPartition, Long> committedOffsets(ClusterInstance cluster,
String topic,
String group) {
try (Admin admin = Admin.create(Map.of(BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers()))) {
return admin.listConsumerGroupOffsets(group)
.all().get()
.get(group).entrySet()
.stream()
.filter(e -> e.getKey().topic().equals(topic))
.collect(toMap(Map.Entry::getKey, e -> e.getValue().offset()));
} catch (ExecutionException | InterruptedException e) {
throw new RuntimeException(e);
}
}
private ConsumerGroupCommand.ConsumerGroupService getConsumerGroupService(String[] args) {
return new ConsumerGroupCommand.ConsumerGroupService(
ConsumerGroupCommandOptions.fromArgs(args),
Map.of(RETRIES_CONFIG, Integer.toString(Integer.MAX_VALUE)));
}
private void produceMessages(ClusterInstance cluster, String topic, int numMessages) {
List<ProducerRecord<byte[], byte[]>> records = IntStream.range(0, numMessages)
.mapToObj(i -> new ProducerRecord<byte[], byte[]>(topic, new byte[100 * 1000]))
.toList();
produceMessages(cluster, records);
}
private void produceMessages(ClusterInstance cluster, List<ProducerRecord<byte[], byte[]>> records) {
try (Producer<byte[], byte[]> producer = createProducer(cluster)) {
records.forEach(producer::send);
}
}
private Producer<byte[], byte[]> createProducer(ClusterInstance cluster) {
Properties props = new Properties();
props.put(BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers());
props.put(ACKS_CONFIG, "1");
props.put(KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
props.put(VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
return new KafkaProducer<>(props);
}
private void resetAndAssertOffsets(ClusterInstance cluster,
String topic,
String[] args,
long expectedOffset) {
resetAndAssertOffsets(cluster, args, expectedOffset, false, List.of(topic));
}
private void resetAndAssertOffsets(ClusterInstance cluster,
String[] args,
long expectedOffset,
boolean dryRun,
List<String> topics) {
try (ConsumerGroupCommand.ConsumerGroupService service = getConsumerGroupService(args)) {
Map<String, Map<TopicPartition, Long>> topicToExpectedOffsets = getTopicExceptOffsets(topics, expectedOffset);
Map<String, Map<TopicPartition, OffsetAndMetadata>> resetOffsetsResultByGroup =
resetOffsets(service);
for (final String topic : topics) {
resetOffsetsResultByGroup.forEach((group, partitionInfo) -> {
Map<TopicPartition, Long> priorOffsets = committedOffsets(cluster, topic, group);
assertEquals(topicToExpectedOffsets.get(topic), partitionToOffsets(topic, partitionInfo));
assertEquals(dryRun ? priorOffsets : topicToExpectedOffsets.get(topic),
committedOffsets(cluster, topic, group));
});
}
}
}
private Map<String, Map<TopicPartition, Long>> getTopicExceptOffsets(List<String> topics,
long expectedOffset) {
return topics.stream()
.collect(toMap(Function.identity(),
topic -> Map.of(new TopicPartition(topic, 0),
expectedOffset)));
}
private Map<String, Map<TopicPartition, OffsetAndMetadata>> resetOffsets(
ConsumerGroupCommand.ConsumerGroupService consumerGroupService) {
return consumerGroupService.resetOffsets();
}
private Map<TopicPartition, Long> partitionToOffsets(String topic,
Map<TopicPartition, OffsetAndMetadata> partitionInfo) {
return partitionInfo.entrySet()
.stream()
.filter(entry -> Objects.equals(entry.getKey().topic(), topic))
.collect(toMap(Map.Entry::getKey, e -> e.getValue().offset()));
}
private static List<String> generateIds(String name) {
return IntStream.rangeClosed(1, 2)
.mapToObj(id -> name + id)
.toList();
}
private void produceConsumeAndShutdown(ClusterInstance cluster,
String topic,
String group,
int numConsumers,
GroupProtocol groupProtocol) throws Exception {
produceMessages(cluster, topic, 100);
try (AutoCloseable consumerGroupCloseable =
consumerGroupClosable(cluster, numConsumers, topic, group, groupProtocol)) {
awaitConsumerProgress(cluster, topic, group, 100);
}
}
private void writeContentToFile(File file, String content) throws IOException {
try (BufferedWriter bw = new BufferedWriter(new FileWriter(file))) {
bw.write(content);
}
}
private AutoCloseable consumerGroupClosable(ClusterInstance cluster,
int numConsumers,
String topic,
String group,
GroupProtocol groupProtocol) {
Map<String, Object> configs = composeConsumerConfigs(cluster, group, groupProtocol);
return ConsumerGroupCommandTestUtils.buildConsumers(
numConsumers,
false,
topic,
() -> new KafkaConsumer<String, String>(configs));
}
private Map<String, Object> composeConsumerConfigs(ClusterInstance cluster,
String group,
GroupProtocol groupProtocol) {
HashMap<String, Object> configs = new HashMap<>();
configs.put(BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers());
configs.put(GROUP_ID_CONFIG, group);
configs.put(GROUP_PROTOCOL_CONFIG, groupProtocol.name);
configs.put(KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
configs.put(VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
configs.put(AUTO_COMMIT_INTERVAL_MS_CONFIG, 1000);
configs.put(GROUP_INITIAL_REBALANCE_DELAY_MS_CONFIG, 1000);
if (GroupProtocol.CLASSIC == groupProtocol) {
configs.put(PARTITION_ASSIGNMENT_STRATEGY_CONFIG, RangeAssignor.class.getName());
}
return configs;
}
private void awaitConsumerProgress(ClusterInstance cluster,
String topic,
String group,
long count) throws Exception {
try (Admin admin = Admin.create(Map.of(BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers()))) {
Supplier<Long> offsets = () -> {
try {
return admin.listConsumerGroupOffsets(group)
.all().get().get(group)
.entrySet()
.stream()
.filter(e -> e.getKey().topic().equals(topic))
.mapToLong(e -> e.getValue().offset())
.sum();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
};
TestUtils.waitForCondition(() -> offsets.get() == count,
"Expected that consumer group has consumed all messages from topic/partition. " +
"Expected offset: " + count +
". Actual offset: " + offsets.get());
}
}
private void awaitConsumerGroupInactive(ConsumerGroupCommand.ConsumerGroupService service,
String group) throws Exception {
TestUtils.waitForCondition(() -> {
GroupState state = service.collectGroupState(group).groupState();
return Objects.equals(state, GroupState.EMPTY) || Objects.equals(state, GroupState.DEAD);
}, "Expected that consumer group is inactive. Actual state: " +
service.collectGroupState(group).groupState());
}
private void resetAndAssertOffsetsCommitted(ClusterInstance cluster,
ConsumerGroupCommand.ConsumerGroupService service,
Map<TopicPartition, Long> expectedOffsets,
String topic) {
Map<String, Map<TopicPartition, OffsetAndMetadata>> allResetOffsets = resetOffsets(service);
allResetOffsets.forEach((group, offsetsInfo) -> offsetsInfo.forEach((tp, offsetMetadata) -> {
assertEquals(expectedOffsets.get(tp), offsetMetadata.offset());
assertEquals(expectedOffsets, committedOffsets(cluster, topic, group));
}));
}
private Map<TopicPartition, Long> toOffsetMap(Map<TopicPartition, OffsetAndMetadata> map) {
return map.entrySet()
.stream()
.collect(toMap(Map.Entry::getKey, e -> e.getValue().offset()));
}
private String[] addTo(String[] args, String... extra) {
List<String> res = new ArrayList<>(List.of(args));
res.addAll(List.of(extra));
return res.toArray(new String[0]);
}
}
| ResetConsumerGroupOffsetTest |
java | elastic__elasticsearch | x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichUsageTransportAction.java | {
"start": 1043,
"end": 1757
} | class ____ extends XPackUsageFeatureTransportAction {
@Inject
public EnrichUsageTransportAction(
TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool,
ActionFilters actionFilters
) {
super(XPackUsageFeatureAction.ENRICH.name(), transportService, clusterService, threadPool, actionFilters);
}
@Override
protected void localClusterStateOperation(
Task task,
XPackUsageRequest request,
ClusterState state,
ActionListener<XPackUsageFeatureResponse> listener
) {
listener.onResponse(new XPackUsageFeatureResponse(new EnrichFeatureSetUsage()));
}
}
| EnrichUsageTransportAction |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/elastic/ElasticCompletionPayloadTests.java | {
"start": 809,
"end": 5366
} | class ____ extends ElasticPayloadTestCase<ElasticCompletionPayload> {
@Override
protected ElasticCompletionPayload payload() {
return new ElasticCompletionPayload();
}
@Override
protected Set<TaskType> expectedSupportedTaskTypes() {
return Set.of(TaskType.CHAT_COMPLETION, TaskType.COMPLETION);
}
public void testNonStreamingResponse() throws Exception {
var responseJson = """
{
"completion": [
{
"result": "hello"
}
]
}
""";
var chatCompletionResults = payload.responseBody(mockModel(), invokeEndpointResponse(responseJson));
assertThat(chatCompletionResults.getResults().size(), is(1));
assertThat(chatCompletionResults.getResults().get(0).content(), is("hello"));
}
public void testStreamingResponse() throws Exception {
var responseJson = """
{
"completion": [
{
"delta": "hola"
}
]
}
""";
var chatCompletionResults = payload.streamResponseBody(mockModel(), SdkBytes.fromUtf8String(responseJson));
assertThat(chatCompletionResults.results().size(), is(1));
assertThat(chatCompletionResults.results().iterator().next().delta(), is("hola"));
}
public void testChatCompletionRequest() throws Exception {
var message = new UnifiedCompletionRequest.Message(new UnifiedCompletionRequest.ContentString("Hello, world!"), "user", null, null);
var unifiedRequest = new UnifiedCompletionRequest(
List.of(message),
"i am ignored",
10L,
List.of("right meow"),
1.0F,
null,
null,
null
);
var sdkBytes = payload.chatCompletionRequestBytes(mockModel(), unifiedRequest);
assertJsonSdkBytes(sdkBytes, """
{
"messages": [
{
"content": "Hello, world!",
"role": "user"
}
],
"stop": [
"right meow"
],
"temperature": 1.0,
"max_completion_tokens": 10
}
""");
}
public void testChatCompletionResponse() throws Exception {
var responseJson = """
{
"id": "chunk1",
"choices": [
{
"delta": {
"content": "example_content",
"refusal": "example_refusal",
"role": "assistant",
"tool_calls": [
{
"index": 1,
"id": "tool1",
"function": {
"arguments": "example_arguments",
"name": "example_function"
},
"type": "function"
}
]
},
"finish_reason": "example_reason",
"index": 0
}
],
"model": "example_model",
"object": "example_object",
"usage": {
"completion_tokens": 10,
"prompt_tokens": 5,
"total_tokens": 15
}
}
""";
var chatCompletionResponse = payload.chatCompletionResponseBody(mockModel(), SdkBytes.fromUtf8String(responseJson));
XContentBuilder builder = JsonXContent.contentBuilder();
chatCompletionResponse.toXContentChunked(null).forEachRemaining(xContent -> {
try {
xContent.toXContent(builder, null);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
assertEquals(XContentHelper.stripWhitespace(responseJson), Strings.toString(builder).trim());
}
}
| ElasticCompletionPayloadTests |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/eval/EvalSelectTest.java | {
"start": 237,
"end": 585
} | class ____ extends TestCase {
public void test_select() throws Exception {
List<List<Object>> rows = new ArrayList<List<Object>>();
List<Object> row = new ArrayList<Object>();
row.add(1);
rows.add(row);
assertEquals(rows, SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "SELECT 1"));
}
}
| EvalSelectTest |
java | hibernate__hibernate-orm | hibernate-spatial/src/test/java/org/hibernate/spatial/testing/dialects/db2/DB2ExpectationsFactory.java | {
"start": 722,
"end": 9951
} | class ____ extends AbstractExpectationsFactory {
private final DB2GeometryType desc = new DB2GeometryType( 4326 );
public DB2ExpectationsFactory() {
super();
}
/**
* Returns the expected extent of all testsuite-suite geometries.
*
* @return map of identifier, extent
*
* @throws SQLException
*/
public Map<Integer, Geometry> getExtent() throws SQLException {
throw new UnsupportedOperationException();
}
public NativeSQLStatement createNativeExtentStatement() {
return createNativeSQLStatement(
"select max(t.id), db2gse.ST_GetAggrResult(MAX(db2gse.st_BuildMBRAggr(t.geom))) from GeomTest t where db2gse.st_srid(t.geom) = 4326" );
}
@Override
public NativeSQLStatement createNativeTouchesStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, DB2GSE.ST_touches(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) from GeomTest t where DB2GSE.ST_touches(t.geom, DB2GSE.ST_geomFromText(?, 4326)) = 1 and db2gse.st_srid(t.geom) = 4326",
geom.toText()
);
}
@Override
public NativeSQLStatement createNativeOverlapsStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, DB2GSE.ST_overlaps(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) from GeomTest t where DB2GSE.ST_overlaps(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) = 1 and db2gse.st_srid(t.geom) = 4326",
geom.toText()
);
}
@Override
public NativeSQLStatement createNativeRelateStatement(Geometry geom, String matrix) {
String sql = "select t.id, DB2GSE.ST_relate(t.geom, DB2GSE.ST_GeomFromText(?, 4326), '" + matrix + "' ) from GeomTest t where DB2GSE.ST_relate(t.geom, DB2GSE.ST_GeomFromText(?, 4326), '" + matrix + "') = 1 and db2gse.st_srid(t.geom) = 4326";
return createNativeSQLStatementAllWKTParams( sql, geom.toText() );
}
@Override
public NativeSQLStatement createNativeDwithinStatement(Point geom, double distance) {
String sql = "select t.id, DB2GSE.ST_dwithin(DB2GSE.ST_GeomFromText(?, 4326), t.geom, " + distance + " , 'METER') from GeomTest t where DB2GSE.ST_dwithin(DB2GSE.ST_GeomFromText(?, 4326), t.geom, " + distance + ", 'METER') = 1 and db2gse.st_srid(t.geom) = 4326";
return createNativeSQLStatementAllWKTParams( sql, geom.toText() );
}
@Override
public NativeSQLStatement createNativeIntersectsStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, DB2GSE.ST_intersects(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) from GeomTest t where DB2GSE.ST_intersects(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) = 1 and db2gse.st_srid(t.geom) = 4326",
geom.toText()
);
}
@Override
public NativeSQLStatement createNativeFilterStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, t.geom && ST_GeomFromText(?, 4326) from GeomTest t where DB2GSE.ST_intersects(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) = 1 and db2gse.st_srid(t.geom) = 4326",
geom.toText()
);
}
@Override
public NativeSQLStatement createNativeDistanceStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, DB2GSE.ST_distance(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) from GeomTest t where DB2GSE.ST_SRID(t.geom) = 4326",
geom.toText()
);
}
@Override
public NativeSQLStatement createNativeDimensionSQL() {
return createNativeSQLStatement( "select id, DB2GSE.ST_dimension(geom) from geomtest" );
}
@Override
public NativeSQLStatement createNativeBufferStatement(Double distance) {
return createNativeSQLStatement(
"select t.id, DB2GSE.ST_buffer(t.geom,?) from GeomTest t where DB2GSE.ST_SRID(t.geom) = 4326",
new Object[] { distance }
);
}
@Override
public NativeSQLStatement createNativeConvexHullStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, DB2GSE.ST_convexhull(DB2GSE.ST_Union(t.geom, DB2GSE.ST_GeomFromText(?, 4326))) from GeomTest t where DB2GSE.ST_SRID(t.geom) = 4326",
geom.toText()
);
}
@Override
public NativeSQLStatement createNativeIntersectionStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, DB2GSE.ST_intersection(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) from GeomTest t where DB2GSE.ST_SRID(t.geom) = 4326",
geom.toText()
);
}
@Override
public NativeSQLStatement createNativeDifferenceStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, DB2GSE.ST_difference(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) from GeomTest t where DB2GSE.ST_SRID(t.geom) = 4326",
geom.toText()
);
}
@Override
public NativeSQLStatement createNativeSymDifferenceStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, DB2GSE.ST_symdifference(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) from GeomTest t where DB2GSE.ST_SRID(t.geom) = 4326",
geom.toText()
);
}
@Override
public NativeSQLStatement createNativeGeomUnionStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, DB2GSE.ST_union(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) from GeomTest t where DB2GSE.ST_SRID(t.geom) = 4326",
geom.toText()
);
}
@Override
public NativeSQLStatement createNativeTransformStatement(int epsg) {
return createNativeSQLStatement(
"select t.id, DB2GSE.ST_transform(t.geom," + epsg + ") from GeomTest t where DB2GSE.ST_SRID(t.geom) = 4326"
);
}
@Override
public NativeSQLStatement createNativeHavingSRIDStatement(int srid) {
return createNativeSQLStatement(
"select t.id, DB2GSE.st_srid(t.geom) from GeomTest t where DB2GSE.ST_SRID(t.geom) = " + srid );
}
@Override
public NativeSQLStatement createNativeAsTextStatement() {
return createNativeSQLStatement( "select id, DB2GSE.st_astext(geom) from geomtest" );
}
@Override
public NativeSQLStatement createNativeSridStatement() {
return createNativeSQLStatement( "select id, DB2GSE.ST_SRID(geom) from geomtest" );
}
@Override
public NativeSQLStatement createNativeIsSimpleStatement() {
return createNativeSQLStatement( "select id, DB2GSE.ST_issimple(geom) from geomtest" );
}
@Override
public NativeSQLStatement createNativeIsEmptyStatement() {
return createNativeSQLStatement(
"select id, DB2GSE.ST_isempty(geom) from geomtest where db2gse.ST_IsEmpty(geom) = 1" );
}
@Override
public NativeSQLStatement createNativeIsNotEmptyStatement() { // return 'not ST_IsEmpty', 'not' is not supported by DB2
return createNativeSQLStatement(
"select id, case when DB2GSE.ST_isempty(geom) = 0 then 1 else 0 end from geomtest where db2gse.ST_IsEmpty(geom) = 0" );
}
@Override
public NativeSQLStatement createNativeBoundaryStatement() {
return createNativeSQLStatement( "select id, DB2GSE.ST_boundary(geom) from geomtest" );
}
@Override
public NativeSQLStatement createNativeEnvelopeStatement() {
return createNativeSQLStatement( "select id, DB2GSE.ST_envelope(geom) from geomtest" );
}
@Override
public NativeSQLStatement createNativeAsBinaryStatement() {
return createNativeSQLStatement( "select id, DB2GSE.ST_asbinary(geom) from geomtest" );
}
@Override
public NativeSQLStatement createNativeGeometryTypeStatement() {
return createNativeSQLStatement( "select id, DB2GSE.ST_GeometryType(geom) from geomtest" );
}
@Override
public NativeSQLStatement createNativeWithinStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, DB2GSE.ST_within(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) from GeomTest t where DB2GSE.ST_within(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) = 1 and db2gse.st_srid(t.geom) = 4326",
geom.toText()
);
}
@Override
public NativeSQLStatement createNativeEqualsStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, DB2GSE.ST_equals(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) from GeomTest t where DB2GSE.ST_equals(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) = 1 and db2gse.st_srid(t.geom) = 4326",
geom.toText()
);
}
@Override
public NativeSQLStatement createNativeCrossesStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, DB2GSE.ST_crosses(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) from GeomTest t where DB2GSE.ST_crosses(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) = 1 and db2gse.st_srid(t.geom) = 4326",
geom.toText()
);
}
@Override
public NativeSQLStatement createNativeContainsStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, DB2GSE.ST_contains(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) from GeomTest t where DB2GSE.ST_contains(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) = 1 and db2gse.st_srid(t.geom) = 4326",
geom.toText()
);
}
@Override
public NativeSQLStatement createNativeDisjointStatement(Geometry geom) {
return createNativeSQLStatementAllWKTParams(
"select t.id, DB2GSE.ST_disjoint(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) from GeomTest t where DB2GSE.ST_disjoint(t.geom, DB2GSE.ST_GeomFromText(?, 4326)) = 1 and db2gse.st_srid(t.geom) = 4326",
geom.toText()
);
}
@Override
protected Geometry decode(Object o) {
org.geolatte.geom.Geometry<?> geometry = desc.toGeometry( o );
return geometry == null ? null : JTS.to( geometry );
}
}
| DB2ExpectationsFactory |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.