language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/AnnotationUtilsTests.java | {
"start": 53667,
"end": 53765
} | class ____ extends MyRepeatableClass {
}
static | SubMyRepeatableWithAdditionalLocalDeclarationsClass |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/api/operators/KeyedProcessOperatorTest.java | {
"start": 17912,
"end": 19241
} | class ____
extends KeyedProcessFunction<Integer, Integer, String> {
private static final long serialVersionUID = 1L;
private final TimeDomain expectedTimeDomain;
public QueryingFlatMapFunction(TimeDomain timeDomain) {
this.expectedTimeDomain = timeDomain;
}
@Override
public void processElement(Integer value, Context ctx, Collector<String> out)
throws Exception {
if (expectedTimeDomain.equals(TimeDomain.EVENT_TIME)) {
out.collect(
value
+ "TIME:"
+ ctx.timerService().currentWatermark()
+ " TS:"
+ ctx.timestamp());
} else {
out.collect(
value
+ "TIME:"
+ ctx.timerService().currentProcessingTime()
+ " TS:"
+ ctx.timestamp());
}
}
@Override
public void onTimer(long timestamp, OnTimerContext ctx, Collector<String> out)
throws Exception {
// Do nothing
}
}
private static | QueryingFlatMapFunction |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/service/NullServiceException.java | {
"start": 219,
"end": 536
} | class ____ extends ServiceException {
public final Class<?> serviceRole;
public NullServiceException(Class<?> serviceRole) {
super( "Unknown service requested [" + serviceRole.getName() + "]" );
this.serviceRole = serviceRole;
}
public Class<?> getServiceRole() {
return serviceRole;
}
}
| NullServiceException |
java | spring-projects__spring-security | saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/provider/service/metadata/RequestMatcherMetadataResponseResolver.java | {
"start": 1347,
"end": 1988
} | class ____ extends
org.springframework.security.saml2.provider.service.web.metadata.RequestMatcherMetadataResponseResolver {
/**
* Construct a
* {@link org.springframework.security.saml2.provider.service.web.metadata.RequestMatcherMetadataResponseResolver}
* @param registrations the source for relying party metadata
* @param metadata the strategy for converting {@link RelyingPartyRegistration}s into
* metadata
*/
public RequestMatcherMetadataResponseResolver(RelyingPartyRegistrationRepository registrations,
Saml2MetadataResolver metadata) {
super(registrations, metadata);
}
}
| RequestMatcherMetadataResponseResolver |
java | quarkusio__quarkus | independent-projects/arc/processor/src/test/java/io/quarkus/arc/processor/DotNamesTest.java | {
"start": 359,
"end": 1262
} | class ____ {
@Test
public void testCreate() {
DotName nested = DotNames.create(Nested.class);
assertTrue(nested.isComponentized());
assertEquals("io.quarkus.arc.processor.DotNamesTest$Nested", nested.toString());
assertEquals("DotNamesTest$Nested", nested.local());
assertEquals("DotNamesTest$Nested", nested.withoutPackagePrefix());
assertFalse(nested.isInner());
DotName nestedNested = DotNames.create(NestedNested.class);
assertTrue(nestedNested.isComponentized());
assertEquals("io.quarkus.arc.processor.DotNamesTest$Nested$NestedNested", nestedNested.toString());
assertEquals("DotNamesTest$Nested$NestedNested", nestedNested.local());
assertEquals("DotNamesTest$Nested$NestedNested", nestedNested.withoutPackagePrefix());
assertFalse(nestedNested.isInner());
}
static final | DotNamesTest |
java | elastic__elasticsearch | x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/WatcherRestartIT.java | {
"start": 710,
"end": 3621
} | class ____ extends AbstractUpgradeTestCase {
public void testWatcherRestart() throws Exception {
client().performRequest(new Request("POST", "/_watcher/_stop"));
ensureWatcherStopped();
client().performRequest(new Request("POST", "/_watcher/_start"));
ensureWatcherStarted();
}
public void testEnsureWatcherDeletesLegacyTemplates() throws Exception {
if (CLUSTER_TYPE.equals(ClusterType.UPGRADED)) {
// legacy index template created in previous releases should not be present anymore
assertBusy(() -> {
Request request = new Request("GET", "/_template/*watch*");
try {
Response response = client().performRequest(request);
Map<String, Object> responseLevel = entityAsMap(response);
assertNotNull(responseLevel);
assertThat(responseLevel.containsKey(".watches"), is(false));
assertThat(responseLevel.containsKey(".triggered_watches"), is(false));
assertThat(responseLevel.containsKey(".watch-history-9"), is(false));
} catch (ResponseException e) {
// Not found is fine
assertThat(
"Unexpected failure getting templates: " + e.getResponse().getStatusLine(),
e.getResponse().getStatusLine().getStatusCode(),
is(404)
);
}
}, 30, TimeUnit.SECONDS);
}
}
private void ensureWatcherStopped() throws Exception {
assertBusy(() -> {
Response stats = client().performRequest(new Request("GET", "_watcher/stats"));
String responseBody = EntityUtils.toString(stats.getEntity(), StandardCharsets.UTF_8);
assertThat(responseBody, containsString("\"watcher_state\":\"stopped\""));
assertThat(responseBody, not(containsString("\"watcher_state\":\"starting\"")));
assertThat(responseBody, not(containsString("\"watcher_state\":\"started\"")));
assertThat(responseBody, not(containsString("\"watcher_state\":\"stopping\"")));
});
}
private void ensureWatcherStarted() throws Exception {
assertBusy(() -> {
Response response = client().performRequest(new Request("GET", "_watcher/stats"));
String responseBody = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8);
assertThat(responseBody, containsString("\"watcher_state\":\"started\""));
assertThat(responseBody, not(containsString("\"watcher_state\":\"starting\"")));
assertThat(responseBody, not(containsString("\"watcher_state\":\"stopping\"")));
assertThat(responseBody, not(containsString("\"watcher_state\":\"stopped\"")));
});
}
}
| WatcherRestartIT |
java | elastic__elasticsearch | modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/direct/TransportPutDatabaseConfigurationActionTests.java | {
"start": 896,
"end": 3910
} | class ____ extends ESTestCase {
public void testValidatePrerequisites() {
ProjectId projectId = randomProjectIdOrDefault();
// Test that we reject two configurations with the same database name but different ids:
String name = randomAlphaOfLengthBetween(1, 50);
IngestGeoIpMetadata ingestGeoIpMetadata = randomIngestGeoIpMetadata(name);
ClusterState state = ClusterState.builder(DEFAULT)
.putProjectMetadata(ProjectMetadata.builder(projectId).putCustom(IngestGeoIpMetadata.TYPE, ingestGeoIpMetadata).build())
.build();
DatabaseConfiguration databaseConfiguration = randomDatabaseConfiguration(randomIdentifier(), name);
expectThrows(
IllegalArgumentException.class,
() -> TransportPutDatabaseConfigurationAction.validatePrerequisites(projectId, databaseConfiguration, state)
);
// Test that we do not reject two configurations with different database names:
String differentName = randomValueOtherThan(name, () -> randomAlphaOfLengthBetween(1, 50));
DatabaseConfiguration databaseConfigurationForDifferentName = randomDatabaseConfiguration(randomIdentifier(), differentName);
TransportPutDatabaseConfigurationAction.validatePrerequisites(projectId, databaseConfigurationForDifferentName, state);
// Test that we do not reject a configuration if none already exists:
TransportPutDatabaseConfigurationAction.validatePrerequisites(
projectId,
databaseConfiguration,
ClusterState.builder(DEFAULT).putProjectMetadata(ProjectMetadata.builder(projectId)).build()
);
// Test that we do not reject a configuration if one with the same database name AND id already exists:
DatabaseConfiguration databaseConfigurationSameNameSameId = ingestGeoIpMetadata.getDatabases()
.values()
.iterator()
.next()
.database();
TransportPutDatabaseConfigurationAction.validatePrerequisites(projectId, databaseConfigurationSameNameSameId, state);
}
private IngestGeoIpMetadata randomIngestGeoIpMetadata(String name) {
Map<String, DatabaseConfigurationMetadata> databases = new HashMap<>();
String databaseId = randomIdentifier();
databases.put(databaseId, randomDatabaseConfigurationMetadata(databaseId, name));
return new IngestGeoIpMetadata(databases);
}
private DatabaseConfigurationMetadata randomDatabaseConfigurationMetadata(String id, String name) {
return new DatabaseConfigurationMetadata(
randomDatabaseConfiguration(id, name),
randomNonNegativeLong(),
randomPositiveTimeValue().millis()
);
}
private DatabaseConfiguration randomDatabaseConfiguration(String id, String name) {
return new DatabaseConfiguration(id, name, new DatabaseConfiguration.Maxmind(randomAlphaOfLength(10)));
}
}
| TransportPutDatabaseConfigurationActionTests |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/Aws2RedshiftDataComponentBuilderFactory.java | {
"start": 16371,
"end": 20812
} | class ____
extends AbstractComponentBuilder<RedshiftData2Component>
implements Aws2RedshiftDataComponentBuilder {
@Override
protected RedshiftData2Component buildConcreteComponent() {
return new RedshiftData2Component();
}
private org.apache.camel.component.aws2.redshift.data.RedshiftData2Configuration getOrCreateConfiguration(RedshiftData2Component component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.aws2.redshift.data.RedshiftData2Configuration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "configuration": ((RedshiftData2Component) component).setConfiguration((org.apache.camel.component.aws2.redshift.data.RedshiftData2Configuration) value); return true;
case "lazyStartProducer": ((RedshiftData2Component) component).setLazyStartProducer((boolean) value); return true;
case "operation": getOrCreateConfiguration((RedshiftData2Component) component).setOperation((org.apache.camel.component.aws2.redshift.data.RedshiftData2Operations) value); return true;
case "overrideEndpoint": getOrCreateConfiguration((RedshiftData2Component) component).setOverrideEndpoint((boolean) value); return true;
case "pojoRequest": getOrCreateConfiguration((RedshiftData2Component) component).setPojoRequest((boolean) value); return true;
case "profileCredentialsName": getOrCreateConfiguration((RedshiftData2Component) component).setProfileCredentialsName((java.lang.String) value); return true;
case "region": getOrCreateConfiguration((RedshiftData2Component) component).setRegion((java.lang.String) value); return true;
case "trustAllCertificates": getOrCreateConfiguration((RedshiftData2Component) component).setTrustAllCertificates((boolean) value); return true;
case "uriEndpointOverride": getOrCreateConfiguration((RedshiftData2Component) component).setUriEndpointOverride((java.lang.String) value); return true;
case "useDefaultCredentialsProvider": getOrCreateConfiguration((RedshiftData2Component) component).setUseDefaultCredentialsProvider((boolean) value); return true;
case "useProfileCredentialsProvider": getOrCreateConfiguration((RedshiftData2Component) component).setUseProfileCredentialsProvider((boolean) value); return true;
case "autowiredEnabled": ((RedshiftData2Component) component).setAutowiredEnabled((boolean) value); return true;
case "awsRedshiftDataClient": getOrCreateConfiguration((RedshiftData2Component) component).setAwsRedshiftDataClient((software.amazon.awssdk.services.redshiftdata.RedshiftDataClient) value); return true;
case "healthCheckConsumerEnabled": ((RedshiftData2Component) component).setHealthCheckConsumerEnabled((boolean) value); return true;
case "healthCheckProducerEnabled": ((RedshiftData2Component) component).setHealthCheckProducerEnabled((boolean) value); return true;
case "proxyHost": getOrCreateConfiguration((RedshiftData2Component) component).setProxyHost((java.lang.String) value); return true;
case "proxyPort": getOrCreateConfiguration((RedshiftData2Component) component).setProxyPort((java.lang.Integer) value); return true;
case "proxyProtocol": getOrCreateConfiguration((RedshiftData2Component) component).setProxyProtocol((software.amazon.awssdk.core.Protocol) value); return true;
case "accessKey": getOrCreateConfiguration((RedshiftData2Component) component).setAccessKey((java.lang.String) value); return true;
case "secretKey": getOrCreateConfiguration((RedshiftData2Component) component).setSecretKey((java.lang.String) value); return true;
case "sessionToken": getOrCreateConfiguration((RedshiftData2Component) component).setSessionToken((java.lang.String) value); return true;
case "useSessionCredentials": getOrCreateConfiguration((RedshiftData2Component) component).setUseSessionCredentials((boolean) value); return true;
default: return false;
}
}
}
} | Aws2RedshiftDataComponentBuilderImpl |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/event/spi/ReplicateEvent.java | {
"start": 180,
"end": 314
} | class ____ {@link org.hibernate.Session#replicate}.
*
* @author Steve Ebersole
*
* @see org.hibernate.Session#replicate
*/
public | for |
java | elastic__elasticsearch | x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/processor/Processors.java | {
"start": 1788,
"end": 3575
} | class ____ {
private Processors() {}
/**
* All of the named writeables needed to deserialize the instances of
* {@linkplain Processors}.
*/
public static List<NamedWriteableRegistry.Entry> getNamedWriteables() {
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
// base
entries.add(new Entry(Converter.class, DefaultConverter.NAME, DefaultConverter::read));
entries.add(new Entry(Processor.class, ConstantProcessor.NAME, ConstantProcessor::new));
entries.add(new Entry(Processor.class, HitExtractorProcessor.NAME, HitExtractorProcessor::new));
entries.add(new Entry(Processor.class, BucketExtractorProcessor.NAME, BucketExtractorProcessor::new));
entries.add(new Entry(Processor.class, ChainingProcessor.NAME, ChainingProcessor::new));
// logical
entries.add(new Entry(Processor.class, BinaryLogicProcessor.NAME, BinaryLogicProcessor::new));
entries.add(new Entry(Processor.class, NotProcessor.NAME, NotProcessor::new));
// arithmetic
// binary arithmetics are pluggable
entries.add(
new Entry(BinaryArithmeticOperation.class, DefaultBinaryArithmeticOperation.NAME, DefaultBinaryArithmeticOperation::read)
);
entries.add(new Entry(Processor.class, BinaryArithmeticProcessor.NAME, BinaryArithmeticProcessor::new));
entries.add(new Entry(Processor.class, UnaryArithmeticProcessor.NAME, UnaryArithmeticProcessor::new));
// comparators
entries.add(new Entry(Processor.class, BinaryComparisonProcessor.NAME, BinaryComparisonProcessor::new));
// regex
entries.add(new Entry(Processor.class, RegexProcessor.NAME, RegexProcessor::new));
return entries;
}
}
| Processors |
java | spring-projects__spring-framework | spring-messaging/src/test/java/org/springframework/messaging/handler/annotation/support/HeaderMethodArgumentResolverTests.java | {
"start": 1962,
"end": 7971
} | class ____ {
private HeaderMethodArgumentResolver resolver;
private final ResolvableMethod resolvable = ResolvableMethod.on(getClass()).named("handleMessage").build();
@BeforeEach
public void setup() {
GenericApplicationContext context = new GenericApplicationContext();
context.refresh();
this.resolver = new HeaderMethodArgumentResolver(new DefaultConversionService(), context.getBeanFactory());
}
@Test
void supportsParameter() {
assertThat(this.resolver.supportsParameter(this.resolvable.annot(headerPlain()).arg())).isTrue();
assertThat(this.resolver.supportsParameter(this.resolvable.annotNotPresent(Header.class).arg())).isFalse();
}
@Test
void resolveArgument() throws Exception {
Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).setHeader("param1", "foo").build();
Object result = this.resolver.resolveArgument(this.resolvable.annot(headerPlain()).arg(), message);
assertThat(result).isEqualTo("foo");
}
@Test // SPR-11326
public void resolveArgumentNativeHeader() throws Exception {
TestMessageHeaderAccessor headers = new TestMessageHeaderAccessor();
headers.setNativeHeader("param1", "foo");
Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
assertThat(this.resolver.resolveArgument(this.resolvable.annot(headerPlain()).arg(), message)).isEqualTo("foo");
}
@Test
void resolveArgumentNativeHeaderAmbiguity() throws Exception {
TestMessageHeaderAccessor headers = new TestMessageHeaderAccessor();
headers.setHeader("param1", "foo");
headers.setNativeHeader("param1", "native-foo");
Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
assertThat(this.resolver.resolveArgument(
this.resolvable.annot(headerPlain()).arg(), message)).isEqualTo("foo");
assertThat(this.resolver.resolveArgument(
this.resolvable.annot(header("nativeHeaders.param1")).arg(), message)).isEqualTo("native-foo");
}
@Test
void resolveArgumentNotFound() {
Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).build();
assertThatExceptionOfType(MessageHandlingException.class).isThrownBy(() ->
this.resolver.resolveArgument(this.resolvable.annot(headerPlain()).arg(), message));
}
@Test
void resolveArgumentDefaultValue() throws Exception {
Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).build();
Object result = this.resolver.resolveArgument(this.resolvable.annot(header("name", "bar")).arg(), message);
assertThat(result).isEqualTo("bar");
}
@Test
void resolveDefaultValueSystemProperty() throws Exception {
try {
System.setProperty("systemProperty", "sysbar");
Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).build();
MethodParameter param = this.resolvable.annot(header("name", "#{systemProperties.systemProperty}")).arg();
Object result = resolver.resolveArgument(param, message);
assertThat(result).isEqualTo("sysbar");
}
finally {
System.clearProperty("systemProperty");
}
}
@Test
void resolveNameFromSystemProperty() throws Exception {
try {
System.setProperty("systemProperty", "sysbar");
Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).setHeader("sysbar", "foo").build();
MethodParameter param = this.resolvable.annot(header("#{systemProperties.systemProperty}")).arg();
Object result = resolver.resolveArgument(param, message);
assertThat(result).isEqualTo("foo");
}
finally {
System.clearProperty("systemProperty");
}
}
@Test
void missingParameterFromSystemPropertyThroughPlaceholder() {
try {
String expected = "sysbar";
System.setProperty("systemProperty", expected);
Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).build();
MethodParameter param = this.resolvable.annot(header("#{systemProperties.systemProperty}")).arg();
assertThatExceptionOfType(MessageHandlingException.class)
.isThrownBy(() -> resolver.resolveArgument(param, message))
.withMessageContaining(expected);
}
finally {
System.clearProperty("systemProperty");
}
}
@Test
void notNullablePrimitiveParameterFromSystemPropertyThroughPlaceholder() {
try {
String expected = "sysbar";
System.setProperty("systemProperty", expected);
Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).build();
MethodParameter param = this.resolvable.annot(header("${systemProperty}").required(false)).arg();
assertThatIllegalStateException()
.isThrownBy(() -> resolver.resolveArgument(param, message))
.withMessageContaining(expected);
}
finally {
System.clearProperty("systemProperty");
}
}
@Test
void resolveOptionalHeaderWithValue() throws Exception {
Message<String> message = MessageBuilder.withPayload("foo").setHeader("foo", "bar").build();
MethodParameter param = this.resolvable.annot(header("foo")).arg(Optional.class, String.class);
Object result = resolver.resolveArgument(param, message);
assertThat(result).isEqualTo(Optional.of("bar"));
}
@Test
void resolveOptionalHeaderAsEmpty() throws Exception {
Message<String> message = MessageBuilder.withPayload("foo").build();
MethodParameter param = this.resolvable.annot(header("foo")).arg(Optional.class, String.class);
Object result = resolver.resolveArgument(param, message);
assertThat(result).isEqualTo(Optional.empty());
}
@SuppressWarnings({"unused", "OptionalUsedAsFieldOrParameterType"})
public void handleMessage(
@Header String param1,
@Header(name = "name", defaultValue = "bar") String param2,
@Header(name = "name", defaultValue = "#{systemProperties.systemProperty}") String param3,
@Header(name = "#{systemProperties.systemProperty}") String param4,
String param5,
@Header("foo") Optional<String> param6,
@Header("nativeHeaders.param1") String nativeHeaderParam1,
@Header(name = "${systemProperty}", required = false) int primitivePlaceholderParam) {
}
public static | HeaderMethodArgumentResolverTests |
java | redisson__redisson | redisson-hibernate/redisson-hibernate-5/src/main/java/org/redisson/hibernate/strategy/TransactionalEntityRegionAccessStrategy.java | {
"start": 1275,
"end": 3863
} | class ____ extends BaseRegionAccessStrategy implements EntityRegionAccessStrategy {
public TransactionalEntityRegionAccessStrategy(Settings settings, GeneralDataRegion region) {
super(settings, region);
}
@Override
public Object get(SessionImplementor session, Object key, long txTimestamp) throws CacheException {
return region.get(session, key);
}
@Override
public boolean putFromLoad(SessionImplementor session, Object key, Object value, long txTimestamp, Object version, boolean minimalPutOverride)
throws CacheException {
if (minimalPutOverride && region.contains(key)) {
return false;
}
region.put(session, key, value);
return true;
}
@Override
public SoftLock lockItem(SessionImplementor session, Object key, Object version) throws CacheException {
return null;
}
@Override
public void unlockItem(SessionImplementor session, Object key, SoftLock lock) throws CacheException {
}
@Override
public EntityRegion getRegion() {
return (EntityRegion) region;
}
@Override
public boolean insert(SessionImplementor session, Object key, Object value, Object version) throws CacheException {
region.put(session, key, value);
return true;
}
@Override
public boolean afterInsert(SessionImplementor session, Object key, Object value, Object version) throws CacheException {
return false;
}
@Override
public void remove(SessionImplementor session, Object key) throws CacheException {
region.evict(key);
}
@Override
public boolean update(SessionImplementor session, Object key, Object value, Object currentVersion, Object previousVersion)
throws CacheException {
return insert(session, key, value, currentVersion);
}
@Override
public boolean afterUpdate(SessionImplementor session, Object key, Object value, Object currentVersion, Object previousVersion, SoftLock lock)
throws CacheException {
return false;
}
@Override
public Object generateCacheKey(Object id, EntityPersister persister, SessionFactoryImplementor factory, String tenantIdentifier) {
return ((RedissonEntityRegion)region).getCacheKeysFactory().createEntityKey(id, persister, factory, tenantIdentifier);
}
@Override
public Object getCacheKeyId(Object cacheKey) {
return ((RedissonEntityRegion)region).getCacheKeysFactory().getEntityId(cacheKey);
}
}
| TransactionalEntityRegionAccessStrategy |
java | square__moshi | moshi/src/test/java/com/squareup/moshi/MoshiTest.java | {
"start": 49267,
"end": 49743
} | class ____ {
final Pizza pizza;
final String drink;
MealDeal(Pizza pizza, String drink) {
this.pizza = pizza;
this.drink = drink;
}
@Override
public boolean equals(Object o) {
return o instanceof MealDeal
&& ((MealDeal) o).pizza.equals(pizza)
&& ((MealDeal) o).drink.equals(drink);
}
@Override
public int hashCode() {
return pizza.hashCode() + (31 * drink.hashCode());
}
}
static | MealDeal |
java | redisson__redisson | redisson-hibernate/redisson-hibernate-4/src/test/java/org/redisson/hibernate/ReadWriteTest.java | {
"start": 533,
"end": 7915
} | class ____ extends BaseCoreFunctionalTestCase {
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class[] { ItemReadWrite.class};
}
@Override
protected void configure(Configuration cfg) {
super.configure(cfg);
cfg.setProperty(Environment.DRIVER, org.h2.Driver.class.getName());
cfg.setProperty(Environment.URL, "jdbc:h2:mem:db1;DB_CLOSE_DELAY=-1");
cfg.setProperty(Environment.USER, "sa");
cfg.setProperty(Environment.PASS, "");
cfg.setProperty(Environment.CACHE_REGION_PREFIX, "");
cfg.setProperty(Environment.GENERATE_STATISTICS, "true");
cfg.setProperty(Environment.USE_SECOND_LEVEL_CACHE, "true");
cfg.setProperty(Environment.USE_QUERY_CACHE, "true");
cfg.setProperty(Environment.CACHE_REGION_FACTORY, RedissonRegionFactory.class.getName());
cfg.setProperty("hibernate.cache.redisson.item.eviction.max_entries", "100");
cfg.setProperty("hibernate.cache.redisson.item.expiration.time_to_live", "1500");
cfg.setProperty("hibernate.cache.redisson.item.expiration.max_idle_time", "1000");
}
@Before
public void before() {
sessionFactory().getCache().evictEntityRegions();
sessionFactory().getStatistics().clear();
}
@Test
public void testTimeToLive() throws InterruptedException {
Statistics stats = sessionFactory().getStatistics();
Long id = null;
Session s = openSession();
s.beginTransaction();
ItemReadWrite item = new ItemReadWrite( "data" );
id = (Long) s.save( item );
s.flush();
s.getTransaction().commit();
s.close();
Thread.sleep(900);
s = openSession();
s.beginTransaction();
item = (ItemReadWrite) s.get(ItemReadWrite.class, id);
Assert.assertEquals("data", item.getName());
s.getTransaction().commit();
s.close();
Assert.assertEquals(1, stats.getSecondLevelCacheStatistics("item").getHitCount());
Assert.assertEquals(0, stats.getSecondLevelCacheStatistics("item").getMissCount());
Thread.sleep(600);
s = openSession();
s.beginTransaction();
item = (ItemReadWrite) s.get(ItemReadWrite.class, id);
Assert.assertEquals("data", item.getName());
s.delete(item);
s.getTransaction().commit();
s.close();
Assert.assertEquals(1, stats.getSecondLevelCacheStatistics("item").getHitCount());
Assert.assertEquals(1, stats.getSecondLevelCacheStatistics("item").getMissCount());
}
@Test
public void testQuery() {
Statistics stats = sessionFactory().getStatistics();
Session s = openSession();
s.beginTransaction();
ItemReadWrite item = new ItemReadWrite("data");
item.getEntries().addAll(Arrays.asList("a", "b", "c"));
s.save(item);
s.flush();
s.getTransaction().commit();
s = openSession();
s.beginTransaction();
Query query = s.getNamedQuery("testQuery");
query.setCacheable(true);
query.setCacheRegion("myTestQuery");
query.setParameter("name", "data");
item = (ItemReadWrite) query.uniqueResult();
s.getTransaction().commit();
s.close();
Assert.assertEquals(1, stats.getSecondLevelCacheStatistics("myTestQuery").getPutCount());
s = openSession();
s.beginTransaction();
Query query2 = s.getNamedQuery("testQuery");
query2.setCacheable(true);
query2.setCacheRegion("myTestQuery");
query2.setParameter("name", "data");
item = (ItemReadWrite) query2.uniqueResult();
s.delete(item);
s.getTransaction().commit();
s.close();
Assert.assertEquals(1, stats.getSecondLevelCacheStatistics("myTestQuery").getHitCount());
stats.logSummary();
}
@Test
public void testCollection() {
Long id = null;
Statistics stats = sessionFactory().getStatistics();
Session s = openSession();
s.beginTransaction();
ItemReadWrite item = new ItemReadWrite("data");
item.getEntries().addAll(Arrays.asList("a", "b", "c"));
id = (Long) s.save(item);
s.flush();
s.getTransaction().commit();
s = openSession();
s.beginTransaction();
item = (ItemReadWrite) s.get(ItemReadWrite.class, id);
assertThat(item.getEntries()).containsExactly("a", "b", "c");
s.getTransaction().commit();
s.close();
Assert.assertEquals(1, stats.getSecondLevelCacheStatistics("item_entries").getPutCount());
s = openSession();
s.beginTransaction();
item = (ItemReadWrite) s.get(ItemReadWrite.class, id);
assertThat(item.getEntries()).containsExactly("a", "b", "c");
s.delete(item);
s.getTransaction().commit();
s.close();
Assert.assertEquals(1, stats.getSecondLevelCacheStatistics("item_entries").getHitCount());
}
@Test
public void testNaturalId() {
Statistics stats = sessionFactory().getStatistics();
Session s = openSession();
s.beginTransaction();
ItemReadWrite item = new ItemReadWrite("data");
item.setNid("123");
s.save(item);
s.flush();
s.getTransaction().commit();
Assert.assertEquals(1, stats.getSecondLevelCacheStatistics("item").getPutCount());
Assert.assertEquals(1, stats.getNaturalIdCacheStatistics("item##NaturalId").getPutCount());
s = openSession();
s.beginTransaction();
item = (ItemReadWrite) s.bySimpleNaturalId(ItemReadWrite.class).load("123");
assertThat(item).isNotNull();
s.delete(item);
s.getTransaction().commit();
s.close();
Assert.assertEquals(1, stats.getSecondLevelCacheStatistics("item").getHitCount());
Assert.assertEquals(1, stats.getNaturalIdCacheStatistics("item##NaturalId").getHitCount());
sessionFactory().getStatistics().logSummary();
}
@Test
public void testUpdateWithRefreshThenRollback() {
Statistics stats = sessionFactory().getStatistics();
Long id = null;
Session s = openSession();
s.beginTransaction();
ItemReadWrite item = new ItemReadWrite( "data" );
id = (Long) s.save( item );
s.flush();
s.getTransaction().commit();
Assert.assertEquals(1, stats.getSecondLevelCacheStatistics("item").getPutCount());
s = openSession();
s.beginTransaction();
item = (ItemReadWrite) s.get(ItemReadWrite.class, id);
item.setName("newdata");
s.update(item);
s.flush();
s.refresh(item);
s.getTransaction().rollback();
s.clear();
s.close();
s = openSession();
s.beginTransaction();
item = (ItemReadWrite) s.get(ItemReadWrite.class, id);
Assert.assertEquals("data", item.getName());
s.delete(item);
s.getTransaction().commit();
s.close();
Assert.assertEquals(1, stats.getSecondLevelCacheStatistics("item").getHitCount());
}
}
| ReadWriteTest |
java | alibaba__nacos | api/src/main/java/com/alibaba/nacos/api/ai/constant/AiConstants.java | {
"start": 2448,
"end": 3138
} | class ____ {
public static final String A2A_DEFAULT_NAMESPACE = "public";
/**
* Default endpoint type using `url` field of agent card directly when discovery
* a2a agent.
*/
public static final String A2A_ENDPOINT_TYPE_URL = "URL";
/**
* Default endpoint type using `backend` service of agent when discovery a2a
* agent.
*/
public static final String A2A_ENDPOINT_TYPE_SERVICE = "SERVICE";
public static final String A2A_ENDPOINT_DEFAULT_TRANSPORT = "JSONRPC";
public static final String A2A_ENDPOINT_DEFAULT_PROTOCOL = "HTTP";
}
}
| A2a |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/jackson2/AnonymousAuthenticationTokenMixinTests.java | {
"start": 1338,
"end": 3750
} | class ____ extends AbstractMixinTests {
private static final String HASH_KEY = "key";
// @formatter:off
private static final String ANONYMOUS_JSON = "{"
+ "\"@class\": \"org.springframework.security.authentication.AnonymousAuthenticationToken\", "
+ "\"details\": null,"
+ "\"principal\": " + UserDeserializerTests.USER_JSON + ","
+ "\"authenticated\": true, "
+ "\"keyHash\": " + HASH_KEY.hashCode() + ","
+ "\"authorities\": " + SimpleGrantedAuthorityMixinTests.AUTHORITIES_ARRAYLIST_JSON
+ "}";
// @formatter:on
@Test
public void serializeAnonymousAuthenticationTokenTest() throws JsonProcessingException, JSONException {
User user = createDefaultUser();
AnonymousAuthenticationToken token = new AnonymousAuthenticationToken(HASH_KEY, user, user.getAuthorities());
String actualJson = this.mapper.writeValueAsString(token);
JSONAssert.assertEquals(ANONYMOUS_JSON, actualJson, true);
}
@Test
public void deserializeAnonymousAuthenticationTokenTest() throws IOException {
AnonymousAuthenticationToken token = this.mapper.readValue(ANONYMOUS_JSON, AnonymousAuthenticationToken.class);
assertThat(token).isNotNull();
assertThat(token.getKeyHash()).isEqualTo(HASH_KEY.hashCode());
assertThat(token.getAuthorities()).isNotNull().hasSize(1).contains(new SimpleGrantedAuthority("ROLE_USER"));
}
@Test
public void deserializeAnonymousAuthenticationTokenWithoutAuthoritiesTest() throws IOException {
String jsonString = "{\"@class\": \"org.springframework.security.authentication.AnonymousAuthenticationToken\", \"details\": null,"
+ "\"principal\": \"user\", \"authenticated\": true, \"keyHash\": " + HASH_KEY.hashCode() + ","
+ "\"authorities\": [\"java.util.ArrayList\", []]}";
assertThatExceptionOfType(JsonMappingException.class)
.isThrownBy(() -> this.mapper.readValue(jsonString, AnonymousAuthenticationToken.class));
}
@Test
public void serializeAnonymousAuthenticationTokenMixinAfterEraseCredentialTest()
throws JsonProcessingException, JSONException {
User user = createDefaultUser();
AnonymousAuthenticationToken token = new AnonymousAuthenticationToken(HASH_KEY, user, user.getAuthorities());
token.eraseCredentials();
String actualJson = this.mapper.writeValueAsString(token);
JSONAssert.assertEquals(ANONYMOUS_JSON.replace(UserDeserializerTests.USER_PASSWORD, "null"), actualJson, true);
}
}
| AnonymousAuthenticationTokenMixinTests |
java | google__guice | core/test/com/google/inject/BinderTest.java | {
"start": 5122,
"end": 11225
} | class ____ {
@Inject Runnable runnable;
}
public void testDanglingConstantBinding() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
public void configure() {
bindConstant();
}
});
fail();
} catch (CreationException expected) {
assertContains(
expected.getMessage(),
"Missing constant value. Please call to(...).",
"at BinderTest$8.configure");
}
}
public void testRecursiveBinding() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
public void configure() {
bind(Runnable.class).to(Runnable.class);
}
});
fail();
} catch (CreationException expected) {
assertContains(
expected.getMessage(), "Binding points to itself.", "at BinderTest$9.configure");
}
}
public void testBindingNullConstant() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
public void configure() {
String none = null;
bindConstant().annotatedWith(Names.named("nullOne")).to(none);
bind(String.class).annotatedWith(Names.named("nullTwo")).toInstance(none);
}
});
fail();
} catch (CreationException expected) {
assertContains(
expected.getMessage(),
"1) Binding to null instances is not allowed. Use toProvider(() -> null)",
"2) Binding to null instances is not allowed. Use toProvider(() -> null)");
}
}
public void testToStringOnBinderApi() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
public void configure() {
assertEquals("Binder", binder().toString());
assertEquals("Provider<java.lang.Integer>", getProvider(Integer.class).toString());
assertEquals(
"Provider<java.util.List<java.lang.String>>",
getProvider(Key.get(new TypeLiteral<List<String>>() {})).toString());
assertEquals("BindingBuilder<java.lang.Integer>", bind(Integer.class).toString());
assertEquals(
"BindingBuilder<java.lang.Integer>",
bind(Integer.class).annotatedWith(Names.named("a")).toString());
assertEquals("ConstantBindingBuilder", bindConstant().toString());
assertEquals(
"ConstantBindingBuilder",
bindConstant().annotatedWith(Names.named("b")).toString());
assertEquals(
"AnnotatedElementBuilder",
binder().newPrivateBinder().expose(Integer.class).toString());
}
});
fail();
} catch (CreationException ignored) {
}
}
public void testNothingIsSerializableInBinderApi() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
public void configure() {
try {
assertNotSerializable(binder());
assertNotSerializable(getProvider(Integer.class));
assertNotSerializable(getProvider(Key.get(new TypeLiteral<List<String>>() {})));
assertNotSerializable(bind(Integer.class));
assertNotSerializable(bind(Integer.class).annotatedWith(Names.named("a")));
assertNotSerializable(bindConstant());
assertNotSerializable(bindConstant().annotatedWith(Names.named("b")));
} catch (IOException e) {
fail(e.getMessage());
}
}
});
fail();
} catch (CreationException ignored) {
}
}
/**
* Although {@code String[].class} isn't equal to {@code new GenericArrayTypeImpl(String.class)},
* Guice should treat these two types interchangeably.
*/
public void testArrayTypeCanonicalization() {
final String[] strings = new String[] {"A"};
final Integer[] integers = new Integer[] {1};
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(String[].class).toInstance(strings);
bind(new TypeLiteral<Integer[]>() {}).toInstance(integers);
}
});
assertSame(integers, injector.getInstance(Key.get(new TypeLiteral<Integer[]>() {})));
assertSame(integers, injector.getInstance(new Key<Integer[]>() {}));
assertSame(integers, injector.getInstance(Integer[].class));
assertSame(strings, injector.getInstance(Key.get(new TypeLiteral<String[]>() {})));
assertSame(strings, injector.getInstance(new Key<String[]>() {}));
assertSame(strings, injector.getInstance(String[].class));
try {
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(String[].class).toInstance(new String[] {"A"});
bind(new TypeLiteral<String[]>() {}).toInstance(new String[] {"B"});
}
});
fail();
} catch (CreationException expected) {
assertContains(
expected.getMessage(),
"String[] was bound multiple times.",
"1 : BinderTest$18.configure",
"2 : BinderTest$18.configure");
assertContains(expected.getMessage(), "1 error");
}
// passes because duplicates are ignored
injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(String[].class).toInstance(strings);
bind(new TypeLiteral<String[]>() {}).toInstance(strings);
}
});
assertSame(strings, injector.getInstance(Key.get(new TypeLiteral<String[]>() {})));
assertSame(strings, injector.getInstance(new Key<String[]>() {}));
assertSame(strings, injector.getInstance(String[].class));
}
static | NeedsRunnable |
java | redisson__redisson | redisson/src/test/java/org/redisson/RedissonReactiveListMultimapTest.java | {
"start": 1944,
"end": 16262
} | class ____ implements Serializable {
private String value;
public SimpleValue() {
}
public SimpleValue(String field) {
this.value = field;
}
public void setValue(String field) {
this.value = field;
}
public String getValue() {
return value;
}
@Override
public String toString() {
return "value: " + value;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SimpleValue other = (SimpleValue) obj;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
return true;
}
}
@Test
public void testSizeInMemory() {
RListMultimapReactive<Object, Object> list = redisson.getListMultimap("test");
sync(list.put("1", "2"));
assertThat(sync(list.sizeInMemory())).isEqualTo(32);
sync(list.put("1", "3"));
assertThat(sync(list.sizeInMemory())).isEqualTo(40 );
}
@Test
public void testDelete() {
RListMultimapReactive<Object, Object> testList = redisson.getListMultimap("test");
sync(testList.put("1", "01"));
sync(testList.put("1", "02"));
sync(testList.put("1", "03"));
RListReactive<Object> list = testList.get("1");
sync(list.delete());
assertThat(sync(testList.size())).isZero();
assertThat(sync(testList.get("1").size())).isZero();
}
@Test
public void testReadAllKeySet() {
RListMultimapReactive<Object, Object> map = redisson.getListMultimap("test1");
sync(map.put("1", "4"));
sync(map.put("2", "5"));
sync(map.put("3", "6"));
assertThat(sync(map.readAllKeySet())).containsExactly("1", "2", "3");
}
@Test
public void testSize() {
RListMultimapReactive<SimpleKey, SimpleValue> map = redisson.getListMultimap("test1");
sync(map.put(new SimpleKey("0"), new SimpleValue("1")));
sync(map.put(new SimpleKey("0"), new SimpleValue("2")));
sync(map.put(new SimpleKey("1"), new SimpleValue("4")));
assertThat(sync(map.size())).isEqualTo(3);
assertThat(sync(map.fastRemove(new SimpleKey("0")))).isEqualTo(1);
RListReactive<SimpleValue> s = map.get(new SimpleKey("0"));
assertThat(sync(s.size())).isZero();
assertThat(sync(map.size())).isOne();
}
@Test
public void testKeySize() {
RListMultimapReactive<SimpleKey, SimpleValue> map = redisson.getListMultimap("test1");
sync(map.put(new SimpleKey("0"), new SimpleValue("1")));
sync(map.put(new SimpleKey("0"), new SimpleValue("2")));
sync(map.put(new SimpleKey("1"), new SimpleValue("4")));
assertThat(sync(map.keySize())).isEqualTo(2);
assertThat(sync(map.fastRemove(new SimpleKey("0")))).isOne();
RListReactive<SimpleValue> s = map.get(new SimpleKey("0"));
assertThat(sync(s.size())).isZero();
assertThat(sync(map.size())).isOne();
}
@Test
public void testPut() {
RListMultimapReactive<SimpleKey, SimpleValue> map = redisson.getListMultimap("{multi.map}.some.key");
sync(map.put(new SimpleKey("0"), new SimpleValue("1")));
sync(map.put(new SimpleKey("0"), new SimpleValue("2")));
sync(map.put(new SimpleKey("0"), new SimpleValue("3")));
sync(map.put(new SimpleKey("0"), new SimpleValue("3")));
sync(map.put(new SimpleKey("3"), new SimpleValue("4")));
assertThat(sync(map.size())).isEqualTo(5);
RListReactive<SimpleValue> s1 = map.get(new SimpleKey("0"));
assertThat(sync(s1)).containsExactly(new SimpleValue("1"), new SimpleValue("2"), new SimpleValue("3"), new SimpleValue("3"));
Mono<List<SimpleValue>> allValues = map.getAll(new SimpleKey("0"));
assertThat(sync(allValues)).containsExactly(new SimpleValue("1"), new SimpleValue("2"), new SimpleValue("3"),
new SimpleValue("3"));
RListReactive<SimpleValue> s2 = map.get(new SimpleKey("3"));
assertThat(sync(s2)).containsExactly(new SimpleValue("4"));
}
@Test
public void testRemoveAllFromCollection() {
RListMultimapReactive<SimpleKey, SimpleValue> map = redisson.getListMultimap("test1");
sync(map.put(new SimpleKey("0"), new SimpleValue("1")));
sync(map.put(new SimpleKey("0"), new SimpleValue("2")));
sync(map.put(new SimpleKey("0"), new SimpleValue("3")));
Collection<SimpleValue> values = Arrays.asList(new SimpleValue("1"), new SimpleValue("2"));
assertThat(sync(map.get(new SimpleKey("0")).removeAll(values))).isTrue();
assertThat(sync(map.get(new SimpleKey("0")).size())).isOne();
assertThat(sync(map.get(new SimpleKey("0")).removeAll(Arrays.asList(new SimpleValue("3"))))).isTrue();
assertThat(sync(map.get(new SimpleKey("0")).size())).isZero();
assertThat(sync(map.get(new SimpleKey("0")).removeAll(Arrays.asList(new SimpleValue("3"))))).isFalse();
}
@Test
public void testRemoveAll() {
RListMultimapReactive<String, String> map = redisson.getListMultimap("test1");
sync(map.put("0", "1"));
sync(map.put("0", "1"));
sync(map.put("0", "2"));
sync(map.put("0", "3"));
RListReactive<String> set = map.get("0");
sync(set.removeAll(Arrays.asList("4", "5")));
assertThat(sync(map.size())).isEqualTo(4);
sync(set.removeAll(Arrays.asList("3")));
assertThat(sync(map.size())).isEqualTo(3);
List<String> values = sync(map.removeAll("0"));
assertThat(values).containsExactly("1", "1", "2");
assertThat(sync(map.size())).isZero();
List<String> values2 = sync(map.removeAll("0"));
assertThat(values2).isEmpty();
}
@Test
public void testFastRemove() {
RListMultimapReactive<SimpleKey, SimpleValue> map = redisson.getListMultimap("test1");
assertThat(sync(map.put(new SimpleKey("0"), new SimpleValue("1")))).isTrue();
assertThat(sync(map.put(new SimpleKey("0"), new SimpleValue("2")))).isTrue();
assertThat(sync(map.put(new SimpleKey("0"), new SimpleValue("2")))).isTrue();
assertThat(sync(map.put(new SimpleKey("0"), new SimpleValue("3")))).isTrue();
long removed = sync(map.fastRemove(new SimpleKey("0"), new SimpleKey("1")));
assertThat(removed).isOne();
assertThat(sync(map.size())).isZero();
}
@Test
public void testContainsKey() {
RListMultimapReactive<SimpleKey, SimpleValue> map = redisson.getListMultimap("test1");
sync(map.put(new SimpleKey("0"), new SimpleValue("1")));
assertThat(sync(map.containsKey(new SimpleKey("0")))).isTrue();
assertThat(sync(map.containsKey(new SimpleKey("1")))).isFalse();
}
@Test
public void testContainsValue() {
RListMultimapReactive<SimpleKey, SimpleValue> map = redisson.getListMultimap("{1}test1");
sync(map.put(new SimpleKey("0"), new SimpleValue("1")));
assertThat(sync(map.containsValue(new SimpleValue("1")))).isTrue();
assertThat(sync(map.containsValue(new SimpleValue("0")))).isFalse();
}
@Test
public void testContainsEntry() {
RListMultimapReactive<SimpleKey, SimpleValue> map = redisson.getListMultimap("test1");
sync(map.put(new SimpleKey("0"), new SimpleValue("1")));
assertThat(sync(map.containsEntry(new SimpleKey("0"), new SimpleValue("1")))).isTrue();
assertThat(sync(map.containsEntry(new SimpleKey("0"), new SimpleValue("2")))).isFalse();
}
@Test
public void testRange() {
RListMultimapReactive<Object, Object> map = redisson.getListMultimap("test1");
sync(map.put(1, 1));
sync(map.put(1, 2));
sync(map.put(1, 3));
sync(map.put(1, 4));
sync(map.put(1, 5));
assertThat(sync(map.get(1).range(1))).containsExactly(1, 2);
assertThat(sync(map.get(1).range(1, 3))).containsExactly(2, 3, 4);
}
@Test
public void testListener() {
testWithParams(redisson -> {
Queue<Integer> nfs = new ConcurrentLinkedQueue<>();
RListMultimapReactive<Object, Object> map = redisson.reactive().getListMultimap("test1");
sync(map.addListener((MapPutListener) name -> nfs.add(1)));
sync(map.addListener((MapRemoveListener) name -> nfs.add(2)));
sync(map.addListener((ListAddListener) name -> nfs.add(3)));
sync(map.addListener((ListRemoveListener) name -> nfs.add(4)));
sync(map.put(1, 5));
sync(map.put(1, 8));
sync(map.remove(1, 5));
sync(map.remove(1, 8));
Awaitility.waitAtMost(Duration.ofSeconds(1))
.untilAsserted(() -> assertThat(nfs).containsExactlyInAnyOrder(1, 3, 3, 2, 4, 4));
}, NOTIFY_KEYSPACE_EVENTS, "Ehl");
}
@Test
public void testRemove() {
RListMultimapReactive<SimpleKey, SimpleValue> map = redisson.getListMultimap("test1");
sync(map.put(new SimpleKey("0"), new SimpleValue("1")));
sync(map.put(new SimpleKey("0"), new SimpleValue("2")));
sync(map.put(new SimpleKey("0"), new SimpleValue("3")));
assertThat(sync(map.remove(new SimpleKey("0"), new SimpleValue("2")))).isTrue();
assertThat(sync(map.remove(new SimpleKey("0"), new SimpleValue("5")))).isFalse();
assertThat(sync(map.get(new SimpleKey("0")).size())).isEqualTo(2);
assertThat(sync(map.getAll(new SimpleKey("0"))).size()).isEqualTo(2);
}
@Test
public void testPutAll() {
RListMultimapReactive<SimpleKey, SimpleValue> map = redisson.getListMultimap("test1");
List<SimpleValue> values =
Arrays.asList(new SimpleValue("1"), new SimpleValue("2"), new SimpleValue("3"), new SimpleValue("3"));
assertThat(sync(map.putAll(new SimpleKey("0"), values))).isTrue();
assertThat(sync(map.putAll(new SimpleKey("0"), Arrays.asList(new SimpleValue("1"))))).isTrue();
List<SimpleValue> testValues =
Arrays.asList(new SimpleValue("1"), new SimpleValue("2"), new SimpleValue("3"), new SimpleValue("3"),
new SimpleValue("1"));
assertThat(sync(map.get(new SimpleKey("0")))).containsExactlyElementsOf(testValues);
}
@Test
public void testKeySet() {
RListMultimapReactive<SimpleKey, SimpleValue> map = redisson.getListMultimap("test1");
sync(map.put(new SimpleKey("0"), new SimpleValue("1")));
sync(map.put(new SimpleKey("3"), new SimpleValue("4")));
assertThat(sync(map.readAllKeySet())).containsExactlyInAnyOrder(new SimpleKey("0"), new SimpleKey("3"));
}
@Test
public void testReplaceValues() {
RListMultimapReactive<SimpleKey, SimpleValue> map = redisson.getListMultimap("test1");
sync(map.put(new SimpleKey("0"), new SimpleValue("1")));
sync(map.put(new SimpleKey("3"), new SimpleValue("4")));
List<SimpleValue> values = Arrays.asList(new SimpleValue("11"), new SimpleValue("12"), new SimpleValue("12"));
List<SimpleValue> oldValues = sync(map.replaceValues(new SimpleKey("0"), values));
assertThat(oldValues).containsExactly(new SimpleValue("1"));
List<SimpleValue> allValues = sync(map.getAll(new SimpleKey("0")));
assertThat(allValues).containsExactlyElementsOf(values);
List<SimpleValue> oldValues2 = sync(map.replaceValues(new SimpleKey("0"), Collections.emptyList()));
assertThat(oldValues2).containsExactlyElementsOf(values);
List<SimpleValue> vals = sync(map.getAll(new SimpleKey("0")));
assertThat(vals).isEmpty();
}
@Test
public void testFastReplaceValues() {
RListMultimapReactive<SimpleKey, SimpleValue> map = redisson.getListMultimap("testFastReplace");
sync(map.put(new SimpleKey("0"), new SimpleValue("1")));
sync(map.put(new SimpleKey("3"), new SimpleValue("4")));
List<SimpleValue> values = Arrays.asList(new SimpleValue("11"), new SimpleValue("12"), new SimpleValue("12"));
sync(map.fastReplaceValues(new SimpleKey("0"), values));
List<SimpleValue> allValues = sync(map.getAll(new SimpleKey("0")));
assertThat(allValues).containsExactlyElementsOf(values);
sync(map.fastReplaceValues(new SimpleKey("0"), Collections.emptyList()));
List<SimpleValue> vals = sync(map.getAll(new SimpleKey("0")));
assertThat(vals).isEmpty();
}
@Test
void testAddAllUsingCollection() {
RListMultimapReactive<String, String> map = redisson.getListMultimap("testAddAllUsingCollection");
sync(map.get("1").addAll(List.of("2", "3", "4")));
assertThat(sync(map.get("1").size())).isEqualTo(3);
assertThat(sync(map.get("1").readAll())).containsExactly("2", "3", "4");
}
@Test
void testAddAllUsingCollectionWithIndex() {
RListMultimapReactive<String, String> map = redisson.getListMultimap("testAddAllUsingCollection");
sync(map.get("1").addAll(List.of("2", "3", "4")));
sync(map.get("1").addAll(2, List.of("5", "6", "7")));
assertThat(sync(map.get("1").size())).isEqualTo(6);
assertThat(sync(map.get("1").readAll())).containsExactly("2", "3", "5", "6", "7", "4");
}
@Test
void testAddAllUsingPublisher() {
RListMultimapReactive<String, String> map = redisson.getListMultimap("testAddAllUsingPublisher");
sync(map.get("1").addAll(Flux.just("2", "3", "4")));
assertThat(sync(map.get("1").size())).isEqualTo(3);
assertThat(sync(map.get("1").readAll())).containsExactly("2", "3", "4");
}
}
| SimpleValue |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/api/operators/co/CoBroadcastWithKeyedOperatorTest.java | {
"start": 3049,
"end": 7415
} | class ____
extends KeyedBroadcastProcessFunction<
Integer, Tuple2<Integer, String>, String, String> {
@Override
public void processElement(
Tuple2<Integer, String> value, ReadOnlyContext ctx, Collector<String> out)
throws Exception {
assertThat(ctx.getCurrentKey()).isEqualTo(value.f0);
// we check that we receive this output, to ensure that the assert was actually
// checked
out.collect(value.f1);
}
@Override
public void processBroadcastElement(String value, Context ctx, Collector<String> out)
throws Exception {}
}
CoBroadcastWithKeyedOperator<Integer, Tuple2<Integer, String>, String, String> operator =
new CoBroadcastWithKeyedOperator<>(
new KeyQueryingProcessFunction(), Collections.emptyList());
try (TwoInputStreamOperatorTestHarness<Tuple2<Integer, String>, String, String>
testHarness =
new KeyedTwoInputStreamOperatorTestHarness<>(
operator, (in) -> in.f0, null, BasicTypeInfo.INT_TYPE_INFO)) {
testHarness.setup();
testHarness.open();
testHarness.processElement1(new StreamRecord<>(Tuple2.of(5, "5"), 12L));
testHarness.processElement1(new StreamRecord<>(Tuple2.of(42, "42"), 13L));
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new StreamRecord<>("5", 12L));
expectedOutput.add(new StreamRecord<>("42", 13L));
TestHarnessUtil.assertOutputEquals(
"Output was not correct.", expectedOutput, testHarness.getOutput());
}
}
/** Test the iteration over the keyed state on the broadcast side. */
@Test
void testAccessToKeyedStateIt() throws Exception {
final List<String> test1content = new ArrayList<>();
test1content.add("test1");
test1content.add("test1");
final List<String> test2content = new ArrayList<>();
test2content.add("test2");
test2content.add("test2");
test2content.add("test2");
test2content.add("test2");
final List<String> test3content = new ArrayList<>();
test3content.add("test3");
test3content.add("test3");
test3content.add("test3");
final Map<String, List<String>> expectedState = new HashMap<>();
expectedState.put("test1", test1content);
expectedState.put("test2", test2content);
expectedState.put("test3", test3content);
try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness =
getInitializedTestHarness(
BasicTypeInfo.STRING_TYPE_INFO,
new IdentityKeySelector<>(),
new StatefulFunctionWithKeyedStateAccessedOnBroadcast(expectedState))) {
// send elements to the keyed state
testHarness.processElement1(new StreamRecord<>("test1", 12L));
testHarness.processElement1(new StreamRecord<>("test1", 12L));
testHarness.processElement1(new StreamRecord<>("test2", 13L));
testHarness.processElement1(new StreamRecord<>("test2", 13L));
testHarness.processElement1(new StreamRecord<>("test2", 13L));
testHarness.processElement1(new StreamRecord<>("test3", 14L));
testHarness.processElement1(new StreamRecord<>("test3", 14L));
testHarness.processElement1(new StreamRecord<>("test3", 14L));
testHarness.processElement1(new StreamRecord<>("test2", 13L));
// this is the element on the broadcast side that will trigger the verification
// check the StatefulFunctionWithKeyedStateAccessedOnBroadcast#processBroadcastElement()
testHarness.processElement2(new StreamRecord<>(1, 13L));
}
}
/**
* Simple {@link KeyedBroadcastProcessFunction} that adds all incoming elements in the
* non-broadcast side to a listState and at the broadcast side it verifies if the stored data is
* the expected ones.
*/
private static | KeyQueryingProcessFunction |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/RMHATestBase.java | {
"start": 5967,
"end": 8525
} | class ____ extends RMAppManager {
private Configuration conf;
private RMContext rmContext;
public MyRMAppManager(RMContext context, YarnScheduler scheduler,
ApplicationMasterService masterService,
ApplicationACLsManager applicationACLsManager, Configuration conf) {
super(context, scheduler, masterService, applicationACLsManager, conf);
this.conf = conf;
this.rmContext = context;
}
@Override
protected void submitApplication(
ApplicationSubmissionContext submissionContext, long submitTime,
UserGroupInformation userUgi) throws YarnException {
String user = userUgi.getShortUserName();
//Do nothing, just add the application to RMContext
RMAppImpl application =
new RMAppImpl(submissionContext.getApplicationId(), this.rmContext,
this.conf, submissionContext.getApplicationName(), user,
submissionContext.getQueue(), submissionContext,
this.rmContext.getScheduler(),
this.rmContext.getApplicationMasterService(),
submitTime, submissionContext.getApplicationType(),
submissionContext.getApplicationTags(), null);
this.rmContext.getRMApps().put(submissionContext.getApplicationId(),
application);
//Do not send RMAppEventType.START event
//so the state of Application will not reach to NEW_SAVING state.
}
}
protected boolean isFinalState(RMAppState state) {
return state.equals(RMAppState.FINISHING)
|| state.equals(RMAppState.FINISHED) || state.equals(RMAppState.FAILED)
|| state.equals(RMAppState.KILLED);
}
protected void explicitFailover() throws IOException {
rm1.adminService.transitionToStandby(requestInfo);
rm2.adminService.transitionToActive(requestInfo);
assertTrue(rm1.getRMContext().getHAServiceState()
== HAServiceState.STANDBY);
assertTrue(rm2.getRMContext().getHAServiceState()
== HAServiceState.ACTIVE);
}
protected void startRMs(MockRM rm1, Configuration confForRM1, MockRM rm2,
Configuration confForRM2) throws IOException {
rm1.init(confForRM1);
rm1.start();
assertTrue(rm1.getRMContext().getHAServiceState()
== HAServiceState.STANDBY);
rm2.init(confForRM2);
rm2.start();
assertTrue(rm2.getRMContext().getHAServiceState()
== HAServiceState.STANDBY);
rm1.adminService.transitionToActive(requestInfo);
assertTrue(rm1.getRMContext().getHAServiceState()
== HAServiceState.ACTIVE);
}
}
| MyRMAppManager |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/Chain.java | {
"start": 2486,
"end": 2865
} | class ____ the chain job's JobConf.
* <p/>
* The configuration properties of the chain job have precedence over the
* configuration properties of the Mapper.
*
* @param isMap indicates if the Chain is for a Mapper or for a
* Reducer.
* @param jobConf chain job's JobConf to add the Mapper class.
* @param klass the Mapper | to |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/json/JsonArray.java | {
"start": 21141,
"end": 21554
} | class ____ implements Iterator<Object> {
final Iterator<Object> listIter;
Iter(Iterator<Object> listIter) {
this.listIter = listIter;
}
@Override
public boolean hasNext() {
return listIter.hasNext();
}
@Override
public Object next() {
return wrapJsonValue(listIter.next());
}
@Override
public void remove() {
listIter.remove();
}
}
}
| Iter |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/CharType.java | {
"start": 1737,
"end": 5337
} | class ____ extends LogicalType {
private static final long serialVersionUID = 1L;
public static final int EMPTY_LITERAL_LENGTH = 0;
public static final int MIN_LENGTH = 1;
public static final int MAX_LENGTH = Integer.MAX_VALUE;
public static final int DEFAULT_LENGTH = 1;
private static final String FORMAT = "CHAR(%d)";
private static final Set<String> INPUT_OUTPUT_CONVERSION =
conversionSet(
String.class.getName(), byte[].class.getName(), StringData.class.getName());
private static final Class<?> DEFAULT_CONVERSION = String.class;
private final int length;
public CharType(boolean isNullable, int length) {
super(isNullable, LogicalTypeRoot.CHAR);
if (length < MIN_LENGTH) {
throw new ValidationException(
String.format(
"Character string length must be between %d and %d (both inclusive).",
MIN_LENGTH, MAX_LENGTH));
}
this.length = length;
}
public CharType(int length) {
this(true, length);
}
public CharType() {
this(DEFAULT_LENGTH);
}
/** Helper constructor for {@link #ofEmptyLiteral()} and {@link #copy(boolean)}. */
private CharType(int length, boolean isNullable) {
super(isNullable, LogicalTypeRoot.CHAR);
this.length = length;
}
/**
* The SQL standard defines that character string literals are allowed to be zero-length strings
* (i.e., to contain no characters) even though it is not permitted to declare a type that is
* zero.
*
* <p>This method enables this special kind of character string.
*
* <p>Zero-length character strings have no serializable string representation.
*/
public static CharType ofEmptyLiteral() {
return new CharType(EMPTY_LITERAL_LENGTH, false);
}
public int getLength() {
return length;
}
@Override
public LogicalType copy(boolean isNullable) {
return new CharType(length, isNullable);
}
@Override
public String asSerializableString() {
if (length == EMPTY_LITERAL_LENGTH) {
throw new TableException(
"Zero-length character strings have no serializable string representation.");
}
return withNullability(FORMAT, length);
}
@Override
public String asSummaryString() {
return withNullability(FORMAT, length);
}
@Override
public boolean supportsInputConversion(Class<?> clazz) {
return INPUT_OUTPUT_CONVERSION.contains(clazz.getName());
}
@Override
public boolean supportsOutputConversion(Class<?> clazz) {
return INPUT_OUTPUT_CONVERSION.contains(clazz.getName());
}
@Override
public Class<?> getDefaultConversion() {
return DEFAULT_CONVERSION;
}
@Override
public List<LogicalType> getChildren() {
return Collections.emptyList();
}
@Override
public <R> R accept(LogicalTypeVisitor<R> visitor) {
return visitor.visit(this);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
CharType charType = (CharType) o;
return length == charType.length;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), length);
}
}
| CharType |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cache/CacheOneToManyAndIdClassTest.java | {
"start": 1512,
"end": 4729
} | class ____ {
public static final Long CONTENT_ID = 200l;
public static final String CONTENT_NAME = "Important";
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Content content = new Content( CONTENT_ID, CONTENT_NAME );
Detail detail = new Detail( 300l, 400l, "detail" );
content.addDetail( detail );
session.persist( content );
session.persist( detail );
}
);
}
@AfterEach
public void tearDown(SessionFactoryScope scope){
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testIt(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Content content = session.get( Content.class, PkComposite.withId( CONTENT_ID ) );
assertThat( content ).isNotNull();
assertThat( content.getName() ).isEqualTo( CONTENT_NAME );
List<Detail> details = content.getDetailList();
assertThat( details.size() ).isEqualTo( 1 );
}
);
scope.inTransaction(
session -> {
Content content = session.get( Content.class, PkComposite.withId( CONTENT_ID ) );
assertThat( content ).isNotNull();
assertThat( content.getName() ).isEqualTo( CONTENT_NAME );
List<Detail> details = content.getDetailList();
assertThat( details.size() ).isEqualTo( 1 );
details.remove( details.get( 0 ) );
Detail newDetail = new Detail( 301l, 901l, "New detail" );
content.addDetail( newDetail );
}
);
scope.inTransaction(
session -> {
Content content = session.get( Content.class, PkComposite.withId( CONTENT_ID ) );
assertThat( content ).isNotNull();
assertThat( content.getName() ).isEqualTo( CONTENT_NAME );
List<Detail> details = content.getDetailList();
assertThat( details.size() ).isEqualTo( 1 );
assertThat( details.get( 0 ).getId() ).isEqualTo( 301l );
assertThat( details.get( 0 ).getId2() ).isEqualTo( 901l );
assertThat( details.get( 0 ).getName() ).isEqualTo( "New detail" );
}
);
}
@Test
public void testIt2(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Content content = session.get( Content.class, PkComposite.withId( CONTENT_ID ) );
assertThat( content ).isNotNull();
assertThat( content.getName() ).isEqualTo( CONTENT_NAME );
List<Detail> details = content.getDetailList();
assertThat( details.size() ).isEqualTo( 1 );
details.remove( details.get( 0 ) );
Detail newDetail = new Detail( 301l, 901l, "New detail" );
content.addDetail( newDetail );
}
);
scope.inTransaction(
session -> {
Content content = session.get( Content.class, PkComposite.withId( CONTENT_ID ) );
assertThat( content ).isNotNull();
assertThat( content.getName() ).isEqualTo( CONTENT_NAME );
List<Detail> details = content.getDetailList();
assertThat( details.size() ).isEqualTo( 1 );
assertThat( details.get( 0 ).getId() ).isEqualTo( 301l );
assertThat( details.get( 0 ).getId2() ).isEqualTo( 901l );
assertThat( details.get( 0 ).getName() ).isEqualTo( "New detail" );
}
);
}
@Entity(name = "Content")
@Table(name = "CONTENT_TABLE")
public static | CacheOneToManyAndIdClassTest |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Executable.java | {
"start": 492,
"end": 615
} | interface ____ {
List<Attribute> output();
void execute(Session session, ActionListener<Page> listener);
}
| Executable |
java | alibaba__nacos | persistence/src/test/java/com/alibaba/nacos/persistence/datasource/ClusterExternalStorageTest.java | {
"start": 1620,
"end": 5479
} | class ____ {
DatasourceConfiguration datasourceConfig;
@InjectMocks
private DynamicDataSource dataSource;
private MockEnvironment environment;
@Mock
private LocalDataSourceServiceImpl localDataSourceService;
@Mock
private ExternalDataSourceServiceImpl basicDataSourceService;
@BeforeEach
void setUp() throws Exception {
environment = new MockEnvironment();
EnvUtil.setEnvironment(environment);
datasourceConfig = new DatasourceConfiguration();
dataSource = DynamicDataSource.getInstance();
ReflectionTestUtils.setField(dataSource, "localDataSourceService", localDataSourceService);
ReflectionTestUtils.setField(dataSource, "basicDataSourceService", basicDataSourceService);
}
@Test
void test005WithClusterAndNullDatabase() {
// 模拟设置环境05:指定集群,未指定数据库,UseExternalDB是true,数据库类型是""
System.setProperty(Constants.STANDALONE_MODE_PROPERTY_NAME, "false");
environment.setProperty(PersistenceConstant.DATASOURCE_PLATFORM_PROPERTY_OLD, "");
EnvUtil.setIsStandalone(Boolean.getBoolean(Constants.STANDALONE_MODE_PROPERTY_NAME));
DatasourceConfiguration.setEmbeddedStorage(EnvUtil.getStandaloneMode());
// 模拟初始化
datasourceConfig.initialize(null);
assertFalse(EnvUtil.getStandaloneMode());
assertTrue(DatasourceConfiguration.isUseExternalDb());
assertTrue(dataSource.getDataSource() instanceof ExternalDataSourceServiceImpl);
}
@Test
void test006WithClusterAndMysqlDatabase() {
// 模拟设置环境06:指定集群,指定数据库mysql,UseExternalDB是true,数据库类型是mysql
System.setProperty(Constants.STANDALONE_MODE_PROPERTY_NAME, "false");
environment.setProperty(PersistenceConstant.DATASOURCE_PLATFORM_PROPERTY_OLD, "mysql");
EnvUtil.setIsStandalone(Boolean.getBoolean(Constants.STANDALONE_MODE_PROPERTY_NAME));
DatasourceConfiguration.setEmbeddedStorage(EnvUtil.getStandaloneMode());
// 模拟初始化
datasourceConfig.initialize(null);
assertFalse(EnvUtil.getStandaloneMode());
assertTrue(DatasourceConfiguration.isUseExternalDb());
assertTrue(dataSource.getDataSource() instanceof ExternalDataSourceServiceImpl);
}
@Test
void test007WithClusterAndDerbyDatabase() {
// 模拟设置环境07:指定集群,指定数据库derby,UseExternalDB是false,数据库类型是derby
System.setProperty(Constants.STANDALONE_MODE_PROPERTY_NAME, "false");
environment.setProperty(PersistenceConstant.DATASOURCE_PLATFORM_PROPERTY_OLD, "derby");
EnvUtil.setIsStandalone(Boolean.getBoolean(Constants.STANDALONE_MODE_PROPERTY_NAME));
DatasourceConfiguration.setEmbeddedStorage(true);
// 模拟初始化
datasourceConfig.initialize(null);
assertFalse(EnvUtil.getStandaloneMode());
assertFalse(DatasourceConfiguration.isUseExternalDb());
assertTrue(dataSource.getDataSource() instanceof LocalDataSourceServiceImpl);
}
@Test
void test008WithClusterAndOtherDatabase() {
// 模拟设置环境08: 指定集群,指定数据库其他,UseExternalDB是true,数据库类型是其他
System.setProperty(Constants.STANDALONE_MODE_PROPERTY_NAME, "false");
environment.setProperty(PersistenceConstant.DATASOURCE_PLATFORM_PROPERTY_OLD, "postgresql");
EnvUtil.setIsStandalone(Boolean.getBoolean(Constants.STANDALONE_MODE_PROPERTY_NAME));
DatasourceConfiguration.setEmbeddedStorage(EnvUtil.getStandaloneMode());
// 模拟初始化
datasourceConfig.initialize(null);
assertFalse(EnvUtil.getStandaloneMode());
assertTrue(DatasourceConfiguration.isUseExternalDb());
assertTrue(dataSource.getDataSource() instanceof ExternalDataSourceServiceImpl);
}
}
| ClusterExternalStorageTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DuplicateDateFormatFieldTest.java | {
"start": 4087,
"end": 4505
} | class ____ {
// BUG: Diagnostic contains: uses the field 'W' more than once
SimpleDateFormat format = new SimpleDateFormat("Week W ' of ' L");
}
""")
.doTest();
}
@Test
public void withOptionalGroup() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import java.text.SimpleDateFormat;
| Test |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/QuestionAnsweringProcessor.java | {
"start": 1424,
"end": 13638
} | class ____ extends NlpTask.Processor {
QuestionAnsweringProcessor(NlpTokenizer tokenizer) {
super(tokenizer);
}
@Override
public void validateInputs(List<String> inputs) {
// nothing to validate
}
@Override
public NlpTask.RequestBuilder getRequestBuilder(NlpConfig nlpConfig) {
if (nlpConfig instanceof QuestionAnsweringConfig questionAnsweringConfig) {
return new RequestBuilder(tokenizer, questionAnsweringConfig.getQuestion());
}
throw ExceptionsHelper.badRequestException(
"please provide configuration update for question_answering task including the desired [question]"
);
}
@Override
public NlpTask.ResultProcessor getResultProcessor(NlpConfig nlpConfig) {
if (nlpConfig instanceof QuestionAnsweringConfig questionAnsweringConfig) {
int maxAnswerLength = questionAnsweringConfig.getMaxAnswerLength();
int numTopClasses = questionAnsweringConfig.getNumTopClasses();
String resultsFieldValue = questionAnsweringConfig.getResultsField();
return new ResultProcessor(questionAnsweringConfig.getQuestion(), maxAnswerLength, numTopClasses, resultsFieldValue);
}
throw ExceptionsHelper.badRequestException(
"please provide configuration update for question_answering task including the desired [question]"
);
}
record RequestBuilder(NlpTokenizer tokenizer, String question) implements NlpTask.RequestBuilder {
@Override
public NlpTask.Request buildRequest(
List<String> inputs,
String requestId,
Tokenization.Truncate truncate,
int span,
Integer windowSize
) throws IOException {
if (inputs.size() > 1) {
throw ExceptionsHelper.badRequestException("Unable to do question answering on more than one text input at a time");
}
if (question == null) {
throw ExceptionsHelper.badRequestException("Question is required for question answering");
}
String context = inputs.get(0);
List<TokenizationResult.Tokens> tokenizations = tokenizer.tokenize(question, context, truncate, span, 0);
TokenizationResult result = tokenizer.buildTokenizationResult(tokenizations);
return result.buildRequest(requestId, truncate);
}
}
record ResultProcessor(String question, int maxAnswerLength, int numTopClasses, String resultsField)
implements
NlpTask.ResultProcessor {
@Override
public InferenceResults processResult(TokenizationResult tokenization, PyTorchInferenceResult pyTorchResult, boolean chunkResult) {
if (chunkResult) {
throw chunkingNotSupportedException(TaskType.NER);
}
if (pyTorchResult.getInferenceResult().length < 1) {
throw new ElasticsearchStatusException("question answering result has no data", RestStatus.INTERNAL_SERVER_ERROR);
}
// The result format is pairs of 'start' and 'end' logits,
// one pair for each span.
// Multiple spans occur where the context text is longer than
// the max sequence length, so the input must be windowed with
// overlap and evaluated in multiple calls.
// Note the response format changed in 8.9 due to the change in
// pytorch_inference to not process requests in batches.
// The output tensor is a 3d array of doubles.
// 1. The 1st index is the pairs of start and end for each span.
// If there is 1 span there will be 2 elements in this dimension,
// for 2 spans 4 elements
// 2. The 2nd index is the number results per span.
// This dimension is always equal to 1.
// 3. The 3rd index is the actual scores.
// This is an array of doubles equal in size to the number of
// input tokens plus and delimiters (e.g. SEP and CLS tokens)
// added by the tokenizer.
//
// inferenceResult[span_index_start_end][0][scores]
// Should be a collection of "starts" and "ends"
if (pyTorchResult.getInferenceResult().length % 2 != 0) {
throw new ElasticsearchStatusException(
"question answering result has invalid dimension, number of dimensions must be a multiple of 2 found [{}]",
RestStatus.CONFLICT,
pyTorchResult.getInferenceResult().length
);
}
final int numAnswersToGather = Math.max(numTopClasses, 1);
ScoreAndIndicesPriorityQueue finalEntries = new ScoreAndIndicesPriorityQueue(numAnswersToGather);
List<TokenizationResult.Tokens> tokensList = tokenization.getTokensBySequenceId().get(0);
int numberOfSpans = pyTorchResult.getInferenceResult().length / 2;
if (numberOfSpans != tokensList.size()) {
throw new ElasticsearchStatusException(
"question answering result has invalid dimensions; the number of spans [{}] does not match batched token size [{}]",
RestStatus.CONFLICT,
numberOfSpans,
tokensList.size()
);
}
for (int spanIndex = 0; spanIndex < numberOfSpans; spanIndex++) {
double[][] starts = pyTorchResult.getInferenceResult()[spanIndex * 2];
double[][] ends = pyTorchResult.getInferenceResult()[(spanIndex * 2) + 1];
assert starts.length == 1;
assert ends.length == 1;
if (starts.length != ends.length) {
throw new ElasticsearchStatusException(
"question answering result has invalid dimensions; start positions [{}] must equal potential end [{}]",
RestStatus.CONFLICT,
starts.length,
ends.length
);
}
topScores(
starts[0], // always 1 element in this dimension
ends[0],
numAnswersToGather,
finalEntries::insertWithOverflow,
tokensList.get(spanIndex).seqPairOffset(),
tokensList.get(spanIndex).tokenIds().length,
maxAnswerLength,
spanIndex
);
}
QuestionAnsweringInferenceResults.TopAnswerEntry[] topAnswerList =
new QuestionAnsweringInferenceResults.TopAnswerEntry[numAnswersToGather];
for (int i = numAnswersToGather - 1; i >= 0; i--) {
ScoreAndIndices scoreAndIndices = finalEntries.pop();
TokenizationResult.Tokens tokens = tokensList.get(scoreAndIndices.spanIndex());
int startOffset = tokens.tokens().get(1).get(scoreAndIndices.startToken).startOffset();
int endOffset = tokens.tokens().get(1).get(scoreAndIndices.endToken).endOffset();
topAnswerList[i] = new QuestionAnsweringInferenceResults.TopAnswerEntry(
tokens.input().get(1).substring(startOffset, endOffset),
scoreAndIndices.score(),
startOffset,
endOffset
);
}
QuestionAnsweringInferenceResults.TopAnswerEntry finalAnswer = topAnswerList[0];
return new QuestionAnsweringInferenceResults(
finalAnswer.answer(),
finalAnswer.startOffset(),
finalAnswer.endOffset(),
numTopClasses > 0 ? Arrays.asList(topAnswerList) : List.of(),
Optional.ofNullable(resultsField).orElse(DEFAULT_RESULTS_FIELD),
finalAnswer.score(),
tokenization.anyTruncated()
);
}
}
/**
*
* @param start The starting token index scores. May include padded tokens.
* @param end The ending token index scores. May include padded tokens.
* @param numAnswersToGather How many top answers to return
* @param topScoresCollector Called when a score is collected. May be called many more times than numAnswersToGather
* @param seq2Start The token position of where the context sequence starts. This is AFTER the sequence separation special tokens.
* @param tokenSize The true total tokenization size. This should NOT include padded tokens.
* @param maxAnswerLength The maximum answer length to consider.
* @param spanIndex Which sequence span is this.
*/
static void topScores(
double[] start,
double[] end,
int numAnswersToGather,
Consumer<ScoreAndIndices> topScoresCollector,
int seq2Start,
int tokenSize,
int maxAnswerLength,
int spanIndex
) {
if (start.length != end.length) {
throw new ElasticsearchStatusException(
"question answering result has invalid dimensions; possible start tokens [{}] must equal possible end tokens [{}]",
RestStatus.CONFLICT,
start.length,
end.length
);
}
// This needs to be the start of the second sequence skipping the separator tokens
// Example seq1 </s> </s> seq2, seq2Start should be (len(seq1) + 2)
// This predicate ensures the following
// - we include the cls token
// - we exclude the first sequence, which is always the question
// - we exclude the final token, which is a sep token
double[] startNormalized = normalizeWith(start, i -> {
if (i == 0) {
return true;
}
return i >= seq2Start && i < tokenSize - 1;
}, -10000.0);
double[] endNormalized = normalizeWith(end, i -> {
if (i == 0) {
return true;
}
return i >= seq2Start && i < tokenSize - 1;
}, -10000.0);
// We use CLS in the softmax, but then remove it from being considered a possible position
endNormalized[0] = startNormalized[0] = 0.0;
if (numAnswersToGather == 1) {
ScoreAndIndices toReturn = new ScoreAndIndices(0, 0, 0.0, spanIndex);
double maxScore = 0.0;
for (int i = seq2Start; i < tokenSize; i++) {
if (startNormalized[i] == 0) {
continue;
}
for (int j = i; j < (maxAnswerLength + i) && j < tokenSize; j++) {
double score = startNormalized[i] * endNormalized[j];
if (score > maxScore) {
maxScore = score;
toReturn = new ScoreAndIndices(i - seq2Start, j - seq2Start, score, spanIndex);
}
}
}
topScoresCollector.accept(toReturn);
return;
}
for (int i = seq2Start; i < tokenSize; i++) {
for (int j = i; j < (maxAnswerLength + i) && j < tokenSize; j++) {
topScoresCollector.accept(
new ScoreAndIndices(i - seq2Start, j - seq2Start, startNormalized[i] * endNormalized[j], spanIndex)
);
}
}
}
static double[] normalizeWith(double[] values, IntPredicate mutateIndex, double predicateValue) {
double[] toReturn = new double[values.length];
for (int i = 0; i < values.length; i++) {
toReturn[i] = values[i];
if (mutateIndex.test(i) == false) {
toReturn[i] = predicateValue;
}
}
double expSum = 0.0;
for (double v : toReturn) {
expSum += Math.exp(v);
}
double diff = Math.log(expSum);
for (int i = 0; i < toReturn.length; i++) {
toReturn[i] = Math.exp(toReturn[i] - diff);
}
return toReturn;
}
static | QuestionAnsweringProcessor |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/AbstractConfiguredSecurityBuilderTests.java | {
"start": 7651,
"end": 7915
} | class ____ extends SecurityConfigurerAdapter<Object, TestConfiguredSecurityBuilder> {
@Override
public void init(TestConfiguredSecurityBuilder http) {
http.with(new DoubleNestedConfigurer(), Customizer.withDefaults());
}
}
private static | NestedConfigurer |
java | elastic__elasticsearch | x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SslConfig.java | {
"start": 942,
"end": 7571
} | class ____ {
public static final String SSL = "ssl";
private static final String SSL_DEFAULT = "false";
public static final String SSL_PROTOCOL = "ssl.protocol";
private static final String SSL_PROTOCOL_DEFAULT = "TLS"; // SSL alternative
public static final String SSL_KEYSTORE_LOCATION = "ssl.keystore.location";
private static final String SSL_KEYSTORE_LOCATION_DEFAULT = "";
public static final String SSL_KEYSTORE_PASS = "ssl.keystore.pass";
private static final String SSL_KEYSTORE_PASS_DEFAULT = "";
public static final String SSL_KEYSTORE_TYPE = "ssl.keystore.type";
private static final String SSL_KEYSTORE_TYPE_DEFAULT = "JKS"; // PCKS12
public static final String SSL_TRUSTSTORE_LOCATION = "ssl.truststore.location";
private static final String SSL_TRUSTSTORE_LOCATION_DEFAULT = "";
public static final String SSL_TRUSTSTORE_PASS = "ssl.truststore.pass";
private static final String SSL_TRUSTSTORE_PASS_DEFAULT = "";
public static final String SSL_TRUSTSTORE_TYPE = "ssl.truststore.type";
private static final String SSL_TRUSTSTORE_TYPE_DEFAULT = "JKS";
static final Set<String> OPTION_NAMES = new LinkedHashSet<>(
Arrays.asList(
SSL,
SSL_PROTOCOL,
SSL_KEYSTORE_LOCATION,
SSL_KEYSTORE_PASS,
SSL_KEYSTORE_TYPE,
SSL_TRUSTSTORE_LOCATION,
SSL_TRUSTSTORE_PASS,
SSL_TRUSTSTORE_TYPE
)
);
private final boolean enabled;
private final String protocol, keystoreLocation, keystorePass, keystoreType;
private final String truststoreLocation, truststorePass, truststoreType;
private final SSLContext sslContext;
public SslConfig(Properties settings, URI baseURI) {
boolean isSchemaPresent = baseURI.getScheme() != null;
boolean isSSLPropertyPresent = settings.getProperty(SSL) != null;
boolean isHttpsScheme = "https".equals(baseURI.getScheme());
if (isSSLPropertyPresent == false && isSchemaPresent == false) {
enabled = StringUtils.parseBoolean(SSL_DEFAULT);
} else {
if (isSSLPropertyPresent && isHttpsScheme && StringUtils.parseBoolean(settings.getProperty(SSL)) == false) {
throw new ClientException("Cannot enable SSL: HTTPS protocol being used in the URL and SSL disabled in properties");
}
enabled = isHttpsScheme || StringUtils.parseBoolean(settings.getProperty(SSL, SSL_DEFAULT));
}
protocol = settings.getProperty(SSL_PROTOCOL, SSL_PROTOCOL_DEFAULT);
keystoreLocation = settings.getProperty(SSL_KEYSTORE_LOCATION, SSL_KEYSTORE_LOCATION_DEFAULT);
keystorePass = settings.getProperty(SSL_KEYSTORE_PASS, SSL_KEYSTORE_PASS_DEFAULT);
keystoreType = settings.getProperty(SSL_KEYSTORE_TYPE, SSL_KEYSTORE_TYPE_DEFAULT);
truststoreLocation = settings.getProperty(SSL_TRUSTSTORE_LOCATION, SSL_TRUSTSTORE_LOCATION_DEFAULT);
truststorePass = settings.getProperty(SSL_TRUSTSTORE_PASS, SSL_TRUSTSTORE_PASS_DEFAULT);
truststoreType = settings.getProperty(SSL_TRUSTSTORE_TYPE, SSL_TRUSTSTORE_TYPE_DEFAULT);
sslContext = enabled ? createSSLContext() : null;
}
// ssl
boolean isEnabled() {
return enabled;
}
SSLSocketFactory sslSocketFactory() {
return sslContext.getSocketFactory();
}
private SSLContext createSSLContext() {
SSLContext ctx;
try {
ctx = SSLContext.getInstance(protocol);
ctx.init(loadKeyManagers(), loadTrustManagers(), null);
} catch (Exception ex) {
throw new ClientException("Failed to initialize SSL - " + ex.getMessage(), ex);
}
return ctx;
}
private KeyManager[] loadKeyManagers() throws GeneralSecurityException, IOException {
if (StringUtils.hasText(keystoreLocation) == false) {
return null;
}
char[] pass = (StringUtils.hasText(keystorePass) ? keystorePass.trim().toCharArray() : null);
KeyStore keyStore = loadKeyStore(keystoreLocation, pass, keystoreType);
KeyManagerFactory kmFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
kmFactory.init(keyStore, pass);
return kmFactory.getKeyManagers();
}
private static KeyStore loadKeyStore(String source, char[] pass, String keyStoreType) throws GeneralSecurityException, IOException {
KeyStore keyStore = KeyStore.getInstance(keyStoreType);
Path path = Paths.get(source);
if (Files.exists(path) == false) {
throw new ClientException(
"Expected to find keystore file at [" + source + "] but was unable to. Make sure you have specified a valid URI."
);
}
try (InputStream in = Files.newInputStream(Paths.get(source), StandardOpenOption.READ)) {
keyStore.load(in, pass);
} catch (Exception ex) {
throw new ClientException("Cannot open keystore [" + source + "] - " + ex.getMessage(), ex);
} finally {
}
return keyStore;
}
private TrustManager[] loadTrustManagers() throws GeneralSecurityException, IOException {
KeyStore keyStore = null;
if (StringUtils.hasText(truststoreLocation)) {
char[] pass = (StringUtils.hasText(truststorePass) ? truststorePass.trim().toCharArray() : null);
keyStore = loadKeyStore(truststoreLocation, pass, truststoreType);
}
TrustManagerFactory tmFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
tmFactory.init(keyStore);
return tmFactory.getTrustManagers();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
SslConfig other = (SslConfig) obj;
return Objects.equals(enabled, other.enabled)
&& Objects.equals(protocol, other.protocol)
&& Objects.equals(keystoreLocation, other.keystoreLocation)
&& Objects.equals(keystorePass, other.keystorePass)
&& Objects.equals(keystoreType, other.keystoreType)
&& Objects.equals(truststoreLocation, other.truststoreLocation)
&& Objects.equals(truststorePass, other.truststorePass)
&& Objects.equals(truststoreType, other.truststoreType);
}
@Override
public int hashCode() {
return getClass().hashCode();
}
}
| SslConfig |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/customtype/ObjectUserType.java | {
"start": 653,
"end": 2099
} | class ____ implements CompositeUserType<Object> {
@Override
public Object getPropertyValue(Object component, int property) throws HibernateException {
return switch ( property ) {
case 0 -> component;
case 1 -> component.getClass().getName();
default -> null;
};
}
@Override
public Object instantiate(ValueAccess valueAccess) {
return valueAccess.getValue( 0, Object.class );
}
@Override
public Class<?> embeddable() {
return TaggedObject.class;
}
@Override
public Class<Object> returnedClass() {
return Object.class;
}
@Override
public boolean equals(Object x, Object y) throws HibernateException {
if ( x == y ) {
return true;
}
if ( x == null || y == null ) {
return false;
}
return x.equals( y );
}
@Override
public int hashCode(Object x) throws HibernateException {
return x.hashCode();
}
@Override
public Object deepCopy(Object value) throws HibernateException {
return value; // Persisting only immutable types.
}
@Override
public boolean isMutable() {
return false;
}
@Override
public Serializable disassemble(Object value) throws HibernateException {
return (Serializable) value;
}
@Override
public Object assemble(Serializable cached, Object owner) throws HibernateException {
return cached;
}
@Override
public Object replace(Object original, Object target, Object owner) throws HibernateException {
return original;
}
public static | ObjectUserType |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/generics/PricedStuff.java | {
"start": 254,
"end": 297
} | class ____ extends Stuff<Price> {
}
| PricedStuff |
java | elastic__elasticsearch | build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/JarUtils.java | {
"start": 3600,
"end": 3687
} | interface ____<T, R> {
R apply(T t) throws IOException;
}
}
| UncheckedIOFunction |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredTimestampedKeyValueStoreTest.java | {
"start": 3569,
"end": 14748
} | class ____ {
private static final String APPLICATION_ID = "test-app";
private static final String STORE_NAME = "store-name";
private static final String STORE_TYPE = "scope";
private static final String STORE_LEVEL_GROUP = "stream-state-metrics";
private static final String CHANGELOG_TOPIC = "changelog-topic-name";
private static final String THREAD_ID_TAG_KEY = "thread-id";
private static final String KEY = "key";
private static final Bytes KEY_BYTES = Bytes.wrap(KEY.getBytes());
private static final ValueAndTimestamp<String> VALUE_AND_TIMESTAMP =
ValueAndTimestamp.make("value", 97L);
// timestamp is 97 what is ASCII of 'a'
private static final byte[] VALUE_AND_TIMESTAMP_BYTES = "\0\0\0\0\0\0\0avalue".getBytes();
private final String threadId = Thread.currentThread().getName();
private final TaskId taskId = new TaskId(0, 0, "My-Topology");
@Mock
private KeyValueStore<Bytes, byte[]> inner;
@Mock
private InternalProcessorContext<?, ?> context;
private MockTime mockTime;
private static final Map<String, Object> CONFIGS = mkMap(mkEntry(StreamsConfig.InternalConfig.TOPIC_PREFIX_ALTERNATIVE, APPLICATION_ID));
private MeteredTimestampedKeyValueStore<String, String> metered;
private final KeyValue<Bytes, byte[]> byteKeyValueTimestampPair = KeyValue.pair(KEY_BYTES,
VALUE_AND_TIMESTAMP_BYTES
);
private final Metrics metrics = new Metrics();
private Map<String, String> tags;
private void setUpWithoutContext() {
mockTime = new MockTime();
metered = new MeteredTimestampedKeyValueStore<>(
inner,
"scope",
mockTime,
Serdes.String(),
new ValueAndTimestampSerde<>(Serdes.String())
);
metrics.config().recordLevel(Sensor.RecordingLevel.DEBUG);
tags = mkMap(
mkEntry(THREAD_ID_TAG_KEY, threadId),
mkEntry("task-id", taskId.toString()),
mkEntry(STORE_TYPE + "-state-id", STORE_NAME)
);
}
private void setUp() {
setUpWithoutContext();
when(context.applicationId()).thenReturn(APPLICATION_ID);
when(context.metrics())
.thenReturn(new StreamsMetricsImpl(metrics, "test", mockTime));
when(context.taskId()).thenReturn(taskId);
when(context.changelogFor(STORE_NAME)).thenReturn(CHANGELOG_TOPIC);
when(inner.name()).thenReturn(STORE_NAME);
when(context.appConfigs()).thenReturn(CONFIGS);
}
@SuppressWarnings({"unchecked", "rawtypes"})
private void setUpWithExpectSerdes() {
setUp();
when(context.keySerde()).thenReturn((Serde) Serdes.String());
when(context.valueSerde()).thenReturn((Serde) Serdes.Long());
}
private void init() {
metered.init(context, metered);
}
@Test
public void shouldDelegateInit() {
setUp();
final MeteredTimestampedKeyValueStore<String, String> outer = new MeteredTimestampedKeyValueStore<>(
inner,
STORE_TYPE,
new MockTime(),
Serdes.String(),
new ValueAndTimestampSerde<>(Serdes.String())
);
doNothing().when(inner).init(context, outer);
outer.init(context, outer);
}
@Test
public void shouldPassChangelogTopicNameToStateStoreSerde() {
setUp();
doShouldPassChangelogTopicNameToStateStoreSerde(CHANGELOG_TOPIC);
}
@Test
public void shouldPassDefaultChangelogTopicNameToStateStoreSerdeIfLoggingDisabled() {
setUp();
final String defaultChangelogTopicName = ProcessorStateManager.storeChangelogTopic(APPLICATION_ID, STORE_NAME, taskId.topologyName());
when(context.changelogFor(STORE_NAME)).thenReturn(null);
doShouldPassChangelogTopicNameToStateStoreSerde(defaultChangelogTopicName);
}
@SuppressWarnings("unchecked")
private void doShouldPassChangelogTopicNameToStateStoreSerde(final String topic) {
final Serde<String> keySerde = mock(Serde.class);
final Serializer<String> keySerializer = mock(Serializer.class);
final Serde<ValueAndTimestamp<String>> valueSerde = mock(Serde.class);
final Deserializer<ValueAndTimestamp<String>> valueDeserializer = mock(Deserializer.class);
final Serializer<ValueAndTimestamp<String>> valueSerializer = mock(Serializer.class);
when(keySerde.serializer()).thenReturn(keySerializer);
when(keySerializer.serialize(topic, KEY)).thenReturn(KEY.getBytes());
when(valueSerde.deserializer()).thenReturn(valueDeserializer);
when(valueDeserializer.deserialize(topic, VALUE_AND_TIMESTAMP_BYTES)).thenReturn(VALUE_AND_TIMESTAMP);
when(valueSerde.serializer()).thenReturn(valueSerializer);
when(valueSerializer.serialize(topic, VALUE_AND_TIMESTAMP)).thenReturn(VALUE_AND_TIMESTAMP_BYTES);
when(inner.get(KEY_BYTES)).thenReturn(VALUE_AND_TIMESTAMP_BYTES);
metered = new MeteredTimestampedKeyValueStore<>(
inner,
STORE_TYPE,
new MockTime(),
keySerde,
valueSerde
);
metered.init(context, metered);
metered.get(KEY);
metered.put(KEY, VALUE_AND_TIMESTAMP);
}
@Test
public void testMetrics() {
setUp();
init();
final JmxReporter reporter = new JmxReporter();
final MetricsContext metricsContext = new KafkaMetricsContext("kafka.streams");
reporter.contextChange(metricsContext);
metrics.addReporter(reporter);
assertTrue(reporter.containsMbean(String.format(
"kafka.streams:type=%s,%s=%s,task-id=%s,%s-state-id=%s",
STORE_LEVEL_GROUP,
THREAD_ID_TAG_KEY,
threadId,
taskId,
STORE_TYPE,
STORE_NAME
)));
}
@Test
public void shouldWriteBytesToInnerStoreAndRecordPutMetric() {
setUp();
doNothing().when(inner).put(KEY_BYTES, VALUE_AND_TIMESTAMP_BYTES);
init();
metered.put(KEY, VALUE_AND_TIMESTAMP);
final KafkaMetric metric = metric("put-rate");
assertTrue((Double) metric.metricValue() > 0);
}
@Test
public void shouldGetWithBinary() {
setUp();
when(inner.get(KEY_BYTES)).thenReturn(VALUE_AND_TIMESTAMP_BYTES);
init();
final RawAndDeserializedValue<String> valueWithBinary = metered.getWithBinary(KEY);
assertEquals(VALUE_AND_TIMESTAMP, valueWithBinary.value);
assertArrayEquals(VALUE_AND_TIMESTAMP_BYTES, valueWithBinary.serializedValue);
}
@Test
public void shouldNotPutIfSameValuesAndGreaterTimestamp() {
setUp();
init();
metered.put(KEY, VALUE_AND_TIMESTAMP);
final ValueAndTimestampSerde<String> stringSerde = new ValueAndTimestampSerde<>(Serdes.String());
final byte[] encodedOldValue = stringSerde.serializer().serialize("TOPIC", VALUE_AND_TIMESTAMP);
final ValueAndTimestamp<String> newValueAndTimestamp = ValueAndTimestamp.make("value", 98L);
assertFalse(metered.putIfDifferentValues(KEY, newValueAndTimestamp, encodedOldValue));
}
@Test
public void shouldPutIfOutOfOrder() {
setUp();
doNothing().when(inner).put(KEY_BYTES, VALUE_AND_TIMESTAMP_BYTES);
init();
metered.put(KEY, VALUE_AND_TIMESTAMP);
final ValueAndTimestampSerde<String> stringSerde = new ValueAndTimestampSerde<>(Serdes.String());
final byte[] encodedOldValue = stringSerde.serializer().serialize("TOPIC", VALUE_AND_TIMESTAMP);
final ValueAndTimestamp<String> outOfOrderValueAndTimestamp = ValueAndTimestamp.make("value", 95L);
assertTrue(metered.putIfDifferentValues(KEY, outOfOrderValueAndTimestamp, encodedOldValue));
}
@Test
public void shouldGetBytesFromInnerStoreAndReturnGetMetric() {
setUp();
when(inner.get(KEY_BYTES)).thenReturn(VALUE_AND_TIMESTAMP_BYTES);
init();
assertThat(metered.get(KEY), equalTo(VALUE_AND_TIMESTAMP));
final KafkaMetric metric = metric("get-rate");
assertTrue((Double) metric.metricValue() > 0);
}
@Test
public void shouldPutIfAbsentAndRecordPutIfAbsentMetric() {
setUp();
when(inner.putIfAbsent(KEY_BYTES, VALUE_AND_TIMESTAMP_BYTES)).thenReturn(null);
init();
metered.putIfAbsent(KEY, VALUE_AND_TIMESTAMP);
final KafkaMetric metric = metric("put-if-absent-rate");
assertTrue((Double) metric.metricValue() > 0);
}
private KafkaMetric metric(final String name) {
return this.metrics.metric(new MetricName(name, STORE_LEVEL_GROUP, "", tags));
}
@SuppressWarnings("unchecked")
@Test
public void shouldPutAllToInnerStoreAndRecordPutAllMetric() {
setUp();
doNothing().when(inner).putAll(any(List.class));
init();
metered.putAll(Collections.singletonList(KeyValue.pair(KEY, VALUE_AND_TIMESTAMP)));
final KafkaMetric metric = metric("put-all-rate");
assertTrue((Double) metric.metricValue() > 0);
}
@Test
public void shouldDeleteFromInnerStoreAndRecordDeleteMetric() {
setUp();
when(inner.delete(KEY_BYTES)).thenReturn(VALUE_AND_TIMESTAMP_BYTES);
init();
metered.delete(KEY);
final KafkaMetric metric = metric("delete-rate");
assertTrue((Double) metric.metricValue() > 0);
}
@Test
public void shouldGetRangeFromInnerStoreAndRecordRangeMetric() {
setUp();
when(inner.range(KEY_BYTES, KEY_BYTES)).thenReturn(
new KeyValueIteratorStub<>(Collections.singletonList(byteKeyValueTimestampPair).iterator()));
init();
final KeyValueIterator<String, ValueAndTimestamp<String>> iterator = metered.range(KEY, KEY);
assertThat(iterator.next().value, equalTo(VALUE_AND_TIMESTAMP));
assertFalse(iterator.hasNext());
iterator.close();
final KafkaMetric metric = metric("range-rate");
assertTrue((Double) metric.metricValue() > 0);
}
@Test
public void shouldGetAllFromInnerStoreAndRecordAllMetric() {
setUp();
when(inner.all())
.thenReturn(new KeyValueIteratorStub<>(Collections.singletonList(byteKeyValueTimestampPair).iterator()));
init();
final KeyValueIterator<String, ValueAndTimestamp<String>> iterator = metered.all();
assertThat(iterator.next().value, equalTo(VALUE_AND_TIMESTAMP));
assertFalse(iterator.hasNext());
iterator.close();
final KafkaMetric metric = metric(new MetricName("all-rate", STORE_LEVEL_GROUP, "", tags));
assertTrue((Double) metric.metricValue() > 0);
}
@Test
public void shouldFlushInnerWhenFlushTimeRecords() {
setUp();
doNothing().when(inner).flush();
init();
metered.flush();
final KafkaMetric metric = metric("flush-rate");
assertTrue((Double) metric.metricValue() > 0);
}
private | MeteredTimestampedKeyValueStoreTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java | {
"start": 972,
"end": 7864
} | class ____ extends InternalTermsTestCase {
@Override
protected InternalTerms<?, ?> createTestInstance(
String name,
Map<String, Object> metadata,
InternalAggregations aggregations,
boolean showTermDocCountError,
long docCountError
) {
return createTestInstance(generateRandomDict(), name, metadata, aggregations, showTermDocCountError, docCountError);
}
@Override
protected BuilderAndToReduce<InternalTerms<?, ?>> randomResultsToReduce(String name, int size) {
List<InternalTerms<?, ?>> inputs = new ArrayList<>();
BytesRef[] dict = generateRandomDict();
for (int i = 0; i < size; i++) {
InternalTerms<?, ?> t = randomBoolean() ? createUnmappedInstance(name) : createTestInstance(dict, name);
inputs.add(t);
}
return new BuilderAndToReduce<>(mockBuilder(inputs), inputs);
}
@Override
protected InternalTerms<?, ?> mutateInstance(InternalTerms<?, ?> instance) {
if (instance instanceof StringTerms stringTerms) {
String name = stringTerms.getName();
BucketOrder order = stringTerms.order;
int requiredSize = stringTerms.requiredSize;
long minDocCount = stringTerms.minDocCount;
DocValueFormat format = stringTerms.format;
int shardSize = stringTerms.getShardSize();
boolean showTermDocCountError = stringTerms.showTermDocCountError;
long otherDocCount = stringTerms.getSumOfOtherDocCounts();
List<StringTerms.Bucket> buckets = stringTerms.getBuckets();
long docCountError = stringTerms.getDocCountError();
Map<String, Object> metadata = stringTerms.getMetadata();
switch (between(0, 8)) {
case 0 -> name += randomAlphaOfLength(5);
case 1 -> requiredSize += between(1, 100);
case 2 -> minDocCount += between(1, 100);
case 3 -> shardSize += between(1, 100);
case 4 -> showTermDocCountError = showTermDocCountError == false;
case 5 -> otherDocCount += between(1, 100);
case 6 -> docCountError += between(1, 100);
case 7 -> {
buckets = new ArrayList<>(buckets);
buckets.add(
new StringTerms.Bucket(
new BytesRef(randomAlphaOfLengthBetween(1, 10)),
randomNonNegativeLong(),
InternalAggregations.EMPTY,
showTermDocCountError,
docCountError,
format
)
);
}
case 8 -> {
if (metadata == null) {
metadata = Maps.newMapWithExpectedSize(1);
} else {
metadata = new HashMap<>(instance.getMetadata());
}
metadata.put(randomAlphaOfLength(15), randomInt());
}
default -> throw new AssertionError("Illegal randomisation branch");
}
Collections.sort(buckets, stringTerms.reduceOrder.comparator());
return new StringTerms(
name,
stringTerms.reduceOrder,
order,
requiredSize,
minDocCount,
metadata,
format,
shardSize,
showTermDocCountError,
otherDocCount,
buckets,
docCountError
);
} else {
String name = instance.getName();
BucketOrder order = instance.order;
int requiredSize = instance.requiredSize;
long minDocCount = instance.minDocCount;
Map<String, Object> metadata = instance.getMetadata();
switch (between(0, 3)) {
case 0 -> name += randomAlphaOfLength(5);
case 1 -> requiredSize += between(1, 100);
case 2 -> minDocCount += between(1, 100);
case 3 -> {
if (metadata == null) {
metadata = Maps.newMapWithExpectedSize(1);
} else {
metadata = new HashMap<>(instance.getMetadata());
}
metadata.put(randomAlphaOfLength(15), randomInt());
}
default -> throw new AssertionError("Illegal randomisation branch");
}
return new UnmappedTerms(name, order, requiredSize, minDocCount, metadata);
}
}
private BytesRef[] generateRandomDict() {
Set<BytesRef> terms = new HashSet<>();
int numTerms = randomIntBetween(2, 100);
for (int i = 0; i < numTerms; i++) {
terms.add(new BytesRef(randomAlphaOfLength(10)));
}
return terms.stream().toArray(BytesRef[]::new);
}
private InternalTerms<?, ?> createTestInstance(BytesRef[] dict, String name) {
return createTestInstance(dict, name, createTestMetadata(), createSubAggregations(), showDocCount, docCountError);
}
private InternalTerms<?, ?> createTestInstance(
BytesRef[] dict,
String name,
Map<String, Object> metadata,
InternalAggregations aggregations,
boolean showTermDocCountError,
long docCountError
) {
BucketOrder order = BucketOrder.count(false);
long minDocCount = 1;
int requiredSize = 3;
int shardSize = requiredSize + 2;
DocValueFormat format = DocValueFormat.RAW;
long otherDocCount = 0;
List<StringTerms.Bucket> buckets = new ArrayList<>();
final int numBuckets = randomNumberOfBuckets();
Set<BytesRef> terms = new HashSet<>();
for (int i = 0; i < numBuckets; ++i) {
BytesRef term = dict[randomIntBetween(0, dict.length - 1)];
if (terms.add(term)) {
int docCount = randomIntBetween(1, 100);
buckets.add(new StringTerms.Bucket(term, docCount, aggregations, showTermDocCountError, docCountError, format));
}
}
BucketOrder reduceOrder = randomBoolean()
? BucketOrder.compound(BucketOrder.key(true), BucketOrder.count(false))
: BucketOrder.key(true);
Collections.sort(buckets, reduceOrder.comparator());
return new StringTerms(
name,
reduceOrder,
order,
requiredSize,
minDocCount,
metadata,
format,
shardSize,
showTermDocCountError,
otherDocCount,
buckets,
docCountError
);
}
}
| StringTermsTests |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/aot/hint/ReflectionHintsTests.java | {
"start": 10418,
"end": 10466
} | interface ____ {
void first();
}
| FirstInterface |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicyDefault.java | {
"start": 2296,
"end": 3062
} | class ____ responsible for choosing the desired number of targets
* for placing block replicas.
* The replica placement strategy is that if the writer is on a datanode,
* the 1st replica is placed on the local machine by default
* (By passing the {@link org.apache.hadoop.fs.CreateFlag#NO_LOCAL_WRITE} flag
* the client can request not to put a block replica on the local datanode.
* Subsequent replicas will still follow default block placement policy.).
* If the writer is not on a datanode, the 1st replica is placed on a random
* node.
* The 2nd replica is placed on a datanode that is on a different rack.
* The 3rd replica is placed on a datanode which is on a different node of the
* rack as the second replica.
*/
@InterfaceAudience.Private
public | is |
java | elastic__elasticsearch | x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/LocalStateSearchableSnapshots.java | {
"start": 652,
"end": 1597
} | class ____ extends LocalStateCompositeXPackPlugin implements SystemIndexPlugin {
private final SearchableSnapshots plugin;
public LocalStateSearchableSnapshots(final Settings settings, final Path configPath) {
super(settings, configPath);
this.plugin = new SearchableSnapshots(settings) {
@Override
protected XPackLicenseState getLicenseState() {
return LocalStateSearchableSnapshots.this.getLicenseState();
}
};
plugins.add(plugin);
}
@Override
public Collection<SystemIndexDescriptor> getSystemIndexDescriptors(Settings settings) {
return plugin.getSystemIndexDescriptors(settings);
}
@Override
public String getFeatureName() {
return plugin.getFeatureName();
}
@Override
public String getFeatureDescription() {
return plugin.getFeatureDescription();
}
}
| LocalStateSearchableSnapshots |
java | apache__flink | flink-formats/flink-hadoop-bulk/src/main/java/org/apache/flink/formats/hadoop/bulk/HadoopPathBasedPartFileWriter.java | {
"start": 9242,
"end": 10475
} | class ____
implements SimpleVersionedSerializer<InProgressFileRecoverable> {
static final UnsupportedInProgressFileRecoverableSerializable INSTANCE =
new UnsupportedInProgressFileRecoverableSerializable();
@Override
public int getVersion() {
throw new UnsupportedOperationException(
"Persists the path-based part file write is not supported");
}
@Override
public byte[] serialize(InProgressFileRecoverable obj) {
throw new UnsupportedOperationException(
"Persists the path-based part file write is not supported");
}
@Override
public InProgressFileRecoverable deserialize(int version, byte[] serialized) {
throw new UnsupportedOperationException(
"Persists the path-based part file write is not supported");
}
}
/**
* Factory to create {@link HadoopPathBasedPartFileWriter}. This writer does not support
* snapshotting the in-progress files. For pending files, it stores the target path and the
* staging file path into the state.
*/
public static | UnsupportedInProgressFileRecoverableSerializable |
java | spring-projects__spring-boot | module/spring-boot-jackson2/src/main/java/org/springframework/boot/jackson2/JsonMixinModuleEntries.java | {
"start": 5165,
"end": 5781
} | class ____
* @return {@code this}, to facilitate method chaining
*/
public Builder and(String typeClassName, String mixinClassName) {
this.entries.put(typeClassName, mixinClassName);
return this;
}
/**
* Add a mapping for the specified classes.
* @param type the type class
* @param mixinClass the mixin class
* @return {@code this}, to facilitate method chaining
*/
public Builder and(Class<?> type, Class<?> mixinClass) {
this.entries.put(type, mixinClass);
return this;
}
JsonMixinModuleEntries build() {
return new JsonMixinModuleEntries(this);
}
}
static | name |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/filter/ThresholdFilterTest.java | {
"start": 1248,
"end": 2166
} | class ____ {
@Test
void testThresholds() {
final ThresholdFilter filter = ThresholdFilter.createFilter(Level.ERROR, null, null);
filter.start();
assertTrue(filter.isStarted());
assertSame(Filter.Result.DENY, filter.filter(null, Level.DEBUG, null, (Object) null, null));
assertSame(Filter.Result.NEUTRAL, filter.filter(null, Level.ERROR, null, (Object) null, null));
LogEvent event = Log4jLogEvent.newBuilder() //
.setLevel(Level.DEBUG) //
.setMessage(new SimpleMessage("Test")) //
.build();
assertSame(Filter.Result.DENY, filter.filter(event));
event = Log4jLogEvent.newBuilder() //
.setLevel(Level.ERROR) //
.setMessage(new SimpleMessage("Test")) //
.build();
assertSame(Filter.Result.NEUTRAL, filter.filter(event));
}
}
| ThresholdFilterTest |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/io/checkpointing/UnalignedCheckpointsInterruptibleTimersTest.java | {
"start": 7849,
"end": 10897
} | class ____ extends AbstractStreamOperator<String>
implements OneInputStreamOperator<String, String>,
Triggerable<String, String>,
YieldingOperator<String> {
private final Map<Instant, Integer> timersToRegister;
private transient @Nullable MailboxExecutor mailboxExecutor;
private transient @Nullable MailboxWatermarkProcessor watermarkProcessor;
MultipleTimersAtTheSameTimestamp() {
this(Collections.emptyMap());
}
MultipleTimersAtTheSameTimestamp(Map<Instant, Integer> timersToRegister) {
this.timersToRegister = timersToRegister;
}
@Override
public void setMailboxExecutor(MailboxExecutor mailboxExecutor) {
this.mailboxExecutor = mailboxExecutor;
}
@Override
public void open() throws Exception {
super.open();
if (getTimeServiceManager().isPresent()) {
this.watermarkProcessor =
new MailboxWatermarkProcessor(
output, mailboxExecutor, getTimeServiceManager().get());
}
}
@Override
public void processElement(StreamRecord<String> element) {
if (!timersToRegister.isEmpty()) {
final InternalTimerService<String> timers =
getInternalTimerService("timers", StringSerializer.INSTANCE, this);
for (Map.Entry<Instant, Integer> entry : timersToRegister.entrySet()) {
for (int keyIdx = 0; keyIdx < entry.getValue(); keyIdx++) {
final String key = String.format("key-%d", keyIdx);
setCurrentKey(key);
timers.registerEventTimeTimer(
String.format("window-%s", entry.getKey()),
entry.getKey().toEpochMilli());
}
}
}
}
@Override
public void processWatermark(Watermark mark) throws Exception {
if (watermarkProcessor == null) {
super.processWatermark(mark);
} else {
watermarkProcessor.emitWatermarkInsideMailbox(mark);
}
}
@Override
public void onEventTime(InternalTimer<String, String> timer) throws Exception {
mailboxExecutor.execute(
() -> output.collect(asMailRecord(timer.getKey())), "mail-" + timer.getKey());
output.collect(asFiredRecord(timer.getKey()));
}
@Override
public void onProcessingTime(InternalTimer<String, String> timer) throws Exception {}
MultipleTimersAtTheSameTimestamp withTimers(Instant timestamp, int count) {
final Map<Instant, Integer> copy = new HashMap<>(timersToRegister);
copy.put(timestamp, count);
return new MultipleTimersAtTheSameTimestamp(copy);
}
}
}
| MultipleTimersAtTheSameTimestamp |
java | apache__camel | components/camel-huawei/camel-huaweicloud-obs/src/test/java/org/apache/camel/component/huaweicloud/obs/models/OBSRegionTest.java | {
"start": 968,
"end": 2344
} | class ____ {
@Test
public void testRegions() {
assertEquals("obs.af-south-1.myhuaweicloud.com", OBSRegion.valueOf("af-south-1"));
assertEquals("obs.ap-southeast-2.myhuaweicloud.com", OBSRegion.valueOf("ap-southeast-2"));
assertEquals("obs.ap-southeast-3.myhuaweicloud.com", OBSRegion.valueOf("ap-southeast-3"));
assertEquals("obs.cn-east-3.myhuaweicloud.com", OBSRegion.valueOf("cn-east-3"));
assertEquals("obs.cn-east-2.myhuaweicloud.com", OBSRegion.valueOf("cn-east-2"));
assertEquals("obs.cn-north-1.myhuaweicloud.com", OBSRegion.valueOf("cn-north-1"));
assertEquals("obs.cn-south-1.myhuaweicloud.com", OBSRegion.valueOf("cn-south-1"));
assertEquals("obs.ap-southeast-1.myhuaweicloud.com", OBSRegion.valueOf("ap-southeast-1"));
assertEquals("obs.sa-argentina-1.myhuaweicloud.com", OBSRegion.valueOf("sa-argentina-1"));
assertEquals("obs.sa-peru-1.myhuaweicloud.com", OBSRegion.valueOf("sa-peru-1"));
assertEquals("obs.na-mexico-1.myhuaweicloud.com", OBSRegion.valueOf("na-mexico-1"));
assertEquals("obs.la-south-2.myhuaweicloud.com", OBSRegion.valueOf("la-south-2"));
assertEquals("obs.sa-chile-1.myhuaweicloud.com", OBSRegion.valueOf("sa-chile-1"));
assertEquals("obs.sa-brazil-1.myhuaweicloud.com", OBSRegion.valueOf("sa-brazil-1"));
}
}
| OBSRegionTest |
java | apache__camel | components/camel-jsonpath/src/test/java/org/apache/camel/jsonpath/JsonPathWithSimpleCBRTest.java | {
"start": 1057,
"end": 3177
} | class ____ extends CamelTestSupport {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.choice()
.when().jsonpath("$.store.book[?(@.price < ${header.cheap})]")
.to("mock:cheap")
.when().jsonpath("$.store.book[?(@.price < ${header.average})]")
.to("mock:average")
.otherwise()
.to("mock:expensive");
}
};
}
@Test
public void testCheap() throws Exception {
getMockEndpoint("mock:cheap").expectedMessageCount(1);
getMockEndpoint("mock:average").expectedMessageCount(0);
getMockEndpoint("mock:expensive").expectedMessageCount(0);
fluentTemplate.withHeader("cheap", 10).withHeader("average", 30).withBody(new File("src/test/resources/cheap.json"))
.to("direct:start").send();
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testAverage() throws Exception {
getMockEndpoint("mock:cheap").expectedMessageCount(0);
getMockEndpoint("mock:average").expectedMessageCount(1);
getMockEndpoint("mock:expensive").expectedMessageCount(0);
fluentTemplate.withHeader("cheap", 10).withHeader("average", 30).withBody(new File("src/test/resources/average.json"))
.to("direct:start").send();
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testExpensive() throws Exception {
getMockEndpoint("mock:cheap").expectedMessageCount(0);
getMockEndpoint("mock:average").expectedMessageCount(0);
getMockEndpoint("mock:expensive").expectedMessageCount(1);
fluentTemplate.withHeader("cheap", 10).withHeader("average", 30).withBody(new File("src/test/resources/expensive.json"))
.to("direct:start").send();
MockEndpoint.assertIsSatisfied(context);
}
}
| JsonPathWithSimpleCBRTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/aggregate/DB2AggregateSupport.java | {
"start": 2480,
"end": 16436
} | class ____ extends AggregateSupportImpl {
public static final AggregateSupport INSTANCE = new DB2AggregateSupport( false );
public static final AggregateSupport JSON_INSTANCE = new DB2AggregateSupport( true );
private static final String JSON_QUERY_START = "json_query(";
private static final String JSON_QUERY_JSON_END = "')";
private static final String XML_EXTRACT_START = "xmlelement(name \"" + XmlHelper.ROOT_TAG + "\",xmlquery(";
private static final String XML_EXTRACT_SEPARATOR = "/*' passing ";
private static final String XML_EXTRACT_END = " as \"d\"))";
private final boolean jsonSupport;
public DB2AggregateSupport(boolean jsonSupport) {
this.jsonSupport = jsonSupport;
}
@Override
public String aggregateComponentCustomReadExpression(
String template,
String placeholder,
String aggregateParentReadExpression,
String columnExpression,
int aggregateColumnTypeCode,
SqlTypedMapping column,
TypeConfiguration typeConfiguration) {
switch ( aggregateColumnTypeCode ) {
case JSON:
case JSON_ARRAY:
if ( !jsonSupport ) {
break;
}
final String parentPartExpression;
if ( aggregateParentReadExpression.startsWith( JSON_QUERY_START ) && aggregateParentReadExpression.endsWith( JSON_QUERY_JSON_END ) ) {
parentPartExpression = aggregateParentReadExpression.substring( JSON_QUERY_START.length(), aggregateParentReadExpression.length() - JSON_QUERY_JSON_END.length() ) + ".";
}
else {
parentPartExpression = aggregateParentReadExpression + ",'$.";
}
switch ( column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode() ) {
case BOOLEAN:
if ( SqlTypes.isNumericType( column.getJdbcMapping().getJdbcType().getDdlTypeCode() ) ) {
return template.replace(
placeholder,
"decode(json_value(" + parentPartExpression + columnExpression + "'),'true',1,'false',0)"
);
}
else {
return template.replace(
placeholder,
"decode(json_value(" + parentPartExpression + columnExpression + "'),'true',true,'false',false)"
);
}
case TIMESTAMP_WITH_TIMEZONE:
case TIMESTAMP_UTC:
return template.replace(
placeholder,
"cast(trim(trailing 'Z' from json_value(" + parentPartExpression + columnExpression + "' returning varchar(35))) as " + column.getColumnDefinition() + ")"
);
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
case BLOB:
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"hextoraw(json_value(" + parentPartExpression + columnExpression + "'))"
);
case UUID:
return template.replace(
placeholder,
"hextoraw(replace(json_value(" + parentPartExpression + columnExpression + "'),'-',''))"
);
case JSON:
case JSON_ARRAY:
return template.replace(
placeholder,
"json_query(" + parentPartExpression + columnExpression + "')"
);
default:
return template.replace(
placeholder,
"json_value(" + parentPartExpression + columnExpression + "' returning " + column.getColumnDefinition() + ")"
);
}
case SQLXML:
case XML_ARRAY:
switch ( column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode() ) {
case BOOLEAN:
if ( SqlTypes.isNumericType( column.getJdbcMapping().getJdbcType().getDdlTypeCode() ) ) {
return template.replace(
placeholder,
"decode(xmlcast(xmlquery(" + xmlExtractArguments( aggregateParentReadExpression, columnExpression ) + ") as varchar(5)),'true',1,'false',0)"
);
}
else {
return template.replace(
placeholder,
"decode(xmlcast(xmlquery(" + xmlExtractArguments( aggregateParentReadExpression, columnExpression ) + ") as varchar(5)),'true',true,'false',false)"
);
}
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
case BLOB:
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"hextoraw(xmlcast(xmlquery(" + xmlExtractArguments( aggregateParentReadExpression, columnExpression ) + ") as clob))"
);
case TIMESTAMP_WITH_TIMEZONE:
case TIMESTAMP_UTC:
return template.replace(
placeholder,
"cast(replace(trim(trailing 'Z' from xmlcast(xmlquery(" + xmlExtractArguments( aggregateParentReadExpression, columnExpression ) + ") as varchar(35))),'T',' ') as " + column.getColumnDefinition() + ")"
);
case SQLXML:
return template.replace(
placeholder,
XML_EXTRACT_START + xmlExtractArguments( aggregateParentReadExpression, columnExpression + "/*" ) + "))"
);
case XML_ARRAY:
if ( typeConfiguration.getCurrentBaseSqlTypeIndicators().isXmlFormatMapperLegacyFormatEnabled() ) {
throw new IllegalArgumentException( "XML array '" + columnExpression + "' in '" + aggregateParentReadExpression + "' is not supported with legacy format enabled." );
}
else {
return template.replace(
placeholder,
"xmlelement(name \"Collection\",xmlquery(" + xmlExtractArguments( aggregateParentReadExpression, columnExpression + "/*" ) + "))"
);
}
case UUID:
if ( SqlTypes.isBinaryType( column.getJdbcMapping().getJdbcType().getDdlTypeCode() ) ) {
return template.replace(
placeholder,
"hextoraw(replace(xmlcast(xmlquery(" + xmlExtractArguments( aggregateParentReadExpression, columnExpression ) + ") as varchar(36)),'-',''))"
);
}
// Fall-through intended
default:
return template.replace(
placeholder,
"xmlcast(xmlquery(" + xmlExtractArguments( aggregateParentReadExpression, columnExpression ) + ") as " + column.getColumnDefinition() + ")"
);
}
case STRUCT:
return template.replace( placeholder, aggregateParentReadExpression + ".." + columnExpression );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnTypeCode );
}
private static String xmlExtractArguments(String aggregateParentReadExpression, String xpathFragment) {
final String extractArguments;
final int separatorIndex;
if ( aggregateParentReadExpression.startsWith( XML_EXTRACT_START )
&& aggregateParentReadExpression.endsWith( XML_EXTRACT_END )
&& (separatorIndex = aggregateParentReadExpression.indexOf( XML_EXTRACT_SEPARATOR )) != -1 ) {
final StringBuilder sb = new StringBuilder( aggregateParentReadExpression.length() - XML_EXTRACT_START.length() + xpathFragment.length() );
sb.append( aggregateParentReadExpression, XML_EXTRACT_START.length(), separatorIndex );
sb.append( '/' );
sb.append( xpathFragment );
sb.append( aggregateParentReadExpression, separatorIndex + 2, aggregateParentReadExpression.length() - 2 );
extractArguments = sb.toString();
}
else {
extractArguments = "'$d/" + XmlHelper.ROOT_TAG + "/" + xpathFragment + "' passing " + aggregateParentReadExpression + " as \"d\"";
}
return extractArguments;
}
private static String jsonCustomWriteExpression(String customWriteExpression, JdbcMapping jdbcMapping) {
final int sqlTypeCode = jdbcMapping.getJdbcType().getDefaultSqlTypeCode();
switch ( sqlTypeCode ) {
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
case BLOB:
// We encode binary data as hex
return "hex(" + customWriteExpression + ")";
case UUID:
return "regexp_replace(lower(hex(" + customWriteExpression + ")),'^(.{8})(.{4})(.{4})(.{4})(.{12})$','$1-$2-$3-$4-$5')";
case ARRAY:
case JSON_ARRAY:
return "(" + customWriteExpression + ") format json";
// case BOOLEAN:
// return "(" + customWriteExpression + ")=true";
case TIME:
return "varchar_format(timestamp('1970-01-01'," + customWriteExpression + "),'HH24:MI:SS')";
case TIMESTAMP:
return "replace(varchar_format(" + customWriteExpression + ",'YYYY-MM-DD HH24:MI:SS.FF9'),' ','T')";
case TIMESTAMP_UTC:
return "replace(varchar_format(" + customWriteExpression + ",'YYYY-MM-DD HH24:MI:SS.FF9'),' ','T')||'Z'";
default:
return customWriteExpression;
}
}
private static String xmlCustomWriteExpression(String customWriteExpression, JdbcMapping jdbcMapping) {
final int sqlTypeCode = jdbcMapping.getJdbcType().getDefaultSqlTypeCode();
switch ( sqlTypeCode ) {
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
case BLOB:
// We encode binary data as hex
return "hex(" + customWriteExpression + ")";
case UUID:
// Old DB2 didn't support regexp_replace yet
return "overlay(overlay(overlay(overlay(lower(hex(" + customWriteExpression + ")),'-',21,0,octets),'-',17,0,octets),'-',13,0,octets),'-',9,0,octets)";
// case ARRAY:
// case XML_ARRAY:
// return "(" + customWriteExpression + ") format json";
case BOOLEAN:
return "decode(" + customWriteExpression + ",true,'true',false,'false')";
case TIME:
return "varchar_format(timestamp('1970-01-01'," + customWriteExpression + "),'HH24:MI:SS')";
case TIMESTAMP:
return "replace(varchar_format(" + customWriteExpression + ",'YYYY-MM-DD HH24:MI:SS.FF9'),' ','T')";
case TIMESTAMP_UTC:
return "replace(varchar_format(" + customWriteExpression + ",'YYYY-MM-DD HH24:MI:SS.FF9'),' ','T')||'Z'";
default:
return customWriteExpression;
}
}
@Override
public String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String columnExpression,
int aggregateColumnTypeCode,
Column column) {
switch ( aggregateColumnTypeCode ) {
case JSON:
case JSON_ARRAY:
if ( jsonSupport ) {
// For JSON we always have to replace the whole object
return aggregateParentAssignmentExpression;
}
break;
case SQLXML:
case XML_ARRAY:
return aggregateParentAssignmentExpression;
case STRUCT:
return aggregateParentAssignmentExpression + ".." + columnExpression;
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnTypeCode );
}
@Override
public String aggregateCustomWriteExpression(
AggregateColumn aggregateColumn,
List<Column> aggregatedColumns) {
// We need to know what array this is STRUCT_ARRAY/JSON_ARRAY/XML_ARRAY,
// which we can easily get from the type code of the aggregate column
final int sqlTypeCode = aggregateColumn.getType().getJdbcType().getDefaultSqlTypeCode();
switch ( sqlTypeCode == ARRAY ? aggregateColumn.getTypeCode() : sqlTypeCode ) {
case JSON:
case JSON_ARRAY:
if ( jsonSupport ) {
return null;
}
break;
case SQLXML:
case XML_ARRAY:
return null;
case STRUCT:
final StringBuilder sb = new StringBuilder();
appendStructCustomWriteExpression( aggregateColumn, aggregatedColumns, sb );
return sb.toString();
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumn.getTypeCode() );
}
private static void appendStructCustomWriteExpression(
ColumnTypeInformation aggregateColumnType,
List<Column> aggregatedColumns,
StringBuilder sb) {
sb.append( aggregateColumnType.getTypeName() ).append( "()" );
for ( Column udtColumn : aggregatedColumns ) {
sb.append( ".." ).append( udtColumn.getName() ).append( '(' );
if ( udtColumn.getSqlTypeCode() == STRUCT ) {
final AggregateColumn aggregateColumn = (AggregateColumn) udtColumn;
appendStructCustomWriteExpression(
aggregateColumn,
aggregateColumn.getComponent().getAggregatedColumns(),
sb
);
}
else {
sb.append( "cast(? as " ).append( udtColumn.getSqlType() ).append( ')' );
}
sb.append( ')' );
}
}
@Override
public int aggregateComponentSqlTypeCode(int aggregateColumnSqlTypeCode, int columnSqlTypeCode) {
return switch (aggregateColumnSqlTypeCode) {
// DB2 doesn't support booleans in structs
case STRUCT -> columnSqlTypeCode == BOOLEAN ? SMALLINT : columnSqlTypeCode;
case JSON -> columnSqlTypeCode == ARRAY ? JSON_ARRAY : columnSqlTypeCode;
case SQLXML -> columnSqlTypeCode == ARRAY ? XML_ARRAY : columnSqlTypeCode;
default -> columnSqlTypeCode;
};
}
@Override
public boolean requiresAggregateCustomWriteExpressionRenderer(int aggregateSqlTypeCode) {
return aggregateSqlTypeCode == STRUCT || aggregateSqlTypeCode == JSON || aggregateSqlTypeCode == SQLXML;
}
@Override
public WriteExpressionRenderer aggregateCustomWriteExpressionRenderer(
SelectableMapping aggregateColumn,
SelectableMapping[] columnsToUpdate,
TypeConfiguration typeConfiguration) {
final int aggregateSqlTypeCode = aggregateColumn.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode();
switch ( aggregateSqlTypeCode ) {
case JSON:
if ( jsonSupport ) {
return new RootJsonWriteExpression( aggregateColumn, columnsToUpdate );
}
break;
case SQLXML:
return new RootXmlWriteExpression( aggregateColumn, columnsToUpdate );
case STRUCT:
return new RootStructWriteExpression( aggregateColumn, columnsToUpdate, typeConfiguration );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateSqlTypeCode );
}
private static String determineTypeName(SelectableMapping column, TypeConfiguration typeConfiguration) {
final String typeName;
if ( column.getColumnDefinition() == null ) {
final DdlType ddlType = typeConfiguration.getDdlTypeRegistry().getDescriptor(
column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode()
);
final Size size = new Size();
size.setLength( column.getLength() );
size.setPrecision( column.getPrecision() );
size.setScale( column.getScale() );
return ddlType.getCastTypeName(
size,
(SqlExpressible) column.getJdbcMapping(),
typeConfiguration.getDdlTypeRegistry()
);
}
else{
typeName = column.getColumnDefinition();
}
return typeName;
}
| DB2AggregateSupport |
java | apache__camel | tests/camel-itest/src/test/java/org/apache/camel/itest/jms/JmsHttpPostIssueWithMockTest.java | {
"start": 1614,
"end": 3788
} | class ____ extends CamelTestSupport {
@RegisterExtension
public static JmsServiceExtension jmsServiceExtension = JmsServiceExtension.createExtension();
private int port;
@Test
void testJmsInOnlyHttpPostIssue() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("jms:queue:in", "Hello World");
MockEndpoint.assertIsSatisfied(context);
}
@Test
void testJmsInOutHttpPostIssue() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
String out = template.requestBody("jms:queue:in", "Hello World", String.class);
assertEquals("OK", out);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
port = AvailablePortFinder.getNextAvailable();
return new RouteBuilder() {
public void configure() {
from("jms:queue:in")
.setBody().simple("name=${body}")
.setHeader(CONTENT_TYPE).constant("application/x-www-form-urlencoded")
.setHeader(HTTP_METHOD).constant("POST")
.to("http://localhost:" + port + "/myservice")
.to("mock:result");
from("jetty:http://0.0.0.0:" + port + "/myservice")
.process(exchange -> {
String body = exchange.getIn().getBody(String.class);
assertEquals("name=Hello World", body);
exchange.getMessage().setBody("OK");
exchange.getMessage().setHeader(CONTENT_TYPE, "text/plain");
exchange.getMessage().setHeader(HTTP_RESPONSE_CODE, 200);
});
}
};
}
@Override
protected void bindToRegistry(Registry registry) throws Exception {
// add ActiveMQ with embedded broker
JmsComponent amq = jmsServiceExtension.getComponent();
amq.setCamelContext(context);
registry.bind("jms", amq);
}
}
| JmsHttpPostIssueWithMockTest |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/pagerduty/PagerDutyActionFactory.java | {
"start": 662,
"end": 1498
} | class ____ extends ActionFactory {
private final TextTemplateEngine templateEngine;
private final PagerDutyService pagerDutyService;
public PagerDutyActionFactory(TextTemplateEngine templateEngine, PagerDutyService pagerDutyService) {
super(LogManager.getLogger(ExecutablePagerDutyAction.class));
this.templateEngine = templateEngine;
this.pagerDutyService = pagerDutyService;
}
@Override
public ExecutablePagerDutyAction parseExecutable(String watchId, String actionId, XContentParser parser) throws IOException {
PagerDutyAction action = PagerDutyAction.parse(watchId, actionId, parser);
pagerDutyService.getAccount(action.event.account);
return new ExecutablePagerDutyAction(action, actionLogger, pagerDutyService, templateEngine);
}
}
| PagerDutyActionFactory |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/utils/ArrayUtils.java | {
"start": 733,
"end": 1672
} | class ____ {
private ArrayUtils() {
}
/**
* <p>Checks if an array of Objects is empty or {@code null}.</p>
*
* @param array the array to test
* @return {@code true} if the array is empty or {@code null}
*/
public static boolean isEmpty(final Object[] array) {
return array == null || array.length == 0;
}
/**
* <p>Checks if the object is in the given array.</p>
*
* <p>The method returns {@code false} if a {@code null} array is passed in.</p>
*
* @param array the array to search through
* @param objectToFind the object to find
* @return {@code true} if the array contains the object
*/
public static boolean contains(final Object[] array, final Object objectToFind) {
if (array == null) {
return false;
}
return Arrays.asList(array).contains(objectToFind);
}
}
| ArrayUtils |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/short_/ShortAssert_usingComparator_Test.java | {
"start": 1033,
"end": 1600
} | class ____ extends ShortAssertBaseTest {
private Comparator<Short> comparator = alwaysEqual();
@Override
protected ShortAssert invoke_api_method() {
// in that, we don't care of the comparator, the point to check is that we switch correctly of comparator
return assertions.usingComparator(comparator);
}
@Override
protected void verify_internal_effects() {
assertThat(getObjects(assertions).getComparator()).isSameAs(comparator);
assertThat(getShorts(assertions).getComparator()).isSameAs(comparator);
}
}
| ShortAssert_usingComparator_Test |
java | apache__flink | flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/util/GenericRowRecordSortComparator.java | {
"start": 1243,
"end": 2253
} | class ____ implements Comparator<GenericRowData>, Serializable {
private static final long serialVersionUID = -4988371592272863772L;
private final RowData.FieldGetter sortKeyGetter;
public GenericRowRecordSortComparator(int sortKeyIdx, LogicalType sortKeyType) {
this.sortKeyGetter = RowData.createFieldGetter(sortKeyType, sortKeyIdx);
}
@Override
public int compare(GenericRowData row1, GenericRowData row2) {
RowKind kind1 = row1.getRowKind();
RowKind kind2 = row2.getRowKind();
if (kind1 != kind2) {
return kind1.toByteValue() - kind2.toByteValue();
} else {
Object key1 = sortKeyGetter.getFieldOrNull(row1);
Object key2 = sortKeyGetter.getFieldOrNull(row2);
if (key1 instanceof Comparable) {
return ((Comparable) key1).compareTo(key2);
} else {
throw new UnsupportedOperationException();
}
}
}
}
| GenericRowRecordSortComparator |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/injection/guice/internal/Errors.java | {
"start": 16826,
"end": 20307
} | class ____<T> {
final Class<T> type;
Converter(Class<T> type) {
this.type = type;
}
boolean appliesTo(Object o) {
return type.isAssignableFrom(o.getClass());
}
String convert(Object o) {
return toString(type.cast(o));
}
abstract String toString(T t);
}
private static final Collection<Converter<?>> converters = Arrays.asList(new Converter<Class>(Class.class) {
@Override
public String toString(Class c) {
return c.getName();
}
}, new Converter<Member>(Member.class) {
@Override
public String toString(Member member) {
return MoreTypes.toString(member);
}
}, new Converter<Key>(Key.class) {
@Override
public String toString(Key key) {
if (key.getAnnotationType() != null) {
return key.getTypeLiteral()
+ " annotated with "
+ (key.getAnnotation() != null ? key.getAnnotation() : key.getAnnotationType());
} else {
return key.getTypeLiteral().toString();
}
}
});
public static Object convert(Object o) {
for (Converter<?> converter : converters) {
if (converter.appliesTo(o)) {
return converter.convert(o);
}
}
return o;
}
public static void formatSource(Formatter formatter, Object source) {
if (source instanceof Dependency<?> dependency) {
InjectionPoint injectionPoint = dependency.getInjectionPoint();
if (injectionPoint != null) {
formatInjectionPoint(formatter, dependency, injectionPoint);
} else {
formatSource(formatter, dependency.getKey());
}
} else if (source instanceof InjectionPoint) {
formatInjectionPoint(formatter, null, (InjectionPoint) source);
} else if (source instanceof Class) {
formatter.format(" at %s%n", StackTraceElements.forType((Class<?>) source));
} else if (source instanceof Member) {
formatter.format(" at %s%n", StackTraceElements.forMember((Member) source));
} else if (source instanceof TypeLiteral) {
formatter.format(" while locating %s%n", source);
} else if (source instanceof Key<?> key) {
formatter.format(" while locating %s%n", convert(key));
} else {
formatter.format(" at %s%n", source);
}
}
public static void formatInjectionPoint(Formatter formatter, Dependency<?> dependency, InjectionPoint injectionPoint) {
Member member = injectionPoint.getMember();
Class<? extends Member> memberType = MoreTypes.memberType(member);
if (memberType == Field.class) {
dependency = injectionPoint.getDependencies().get(0);
formatter.format(" while locating %s%n", convert(dependency.getKey()));
formatter.format(" for field at %s%n", StackTraceElements.forMember(member));
} else if (dependency != null) {
formatter.format(" while locating %s%n", convert(dependency.getKey()));
formatter.format(" for parameter %s at %s%n", dependency.getParameterIndex(), StackTraceElements.forMember(member));
} else {
formatSource(formatter, injectionPoint.getMember());
}
}
}
| Converter |
java | grpc__grpc-java | core/src/main/java/io/grpc/internal/SpiffeUtil.java | {
"start": 12079,
"end": 12973
} | class ____ {
private final ImmutableMap<String, Long> sequenceNumbers;
private final ImmutableMap<String, ImmutableList<X509Certificate>> bundleMap;
private SpiffeBundle(Map<String, Long> sequenceNumbers,
Map<String, List<X509Certificate>> trustDomainMap) {
this.sequenceNumbers = ImmutableMap.copyOf(sequenceNumbers);
ImmutableMap.Builder<String, ImmutableList<X509Certificate>> builder = ImmutableMap.builder();
for (Map.Entry<String, List<X509Certificate>> entry : trustDomainMap.entrySet()) {
builder.put(entry.getKey(), ImmutableList.copyOf(entry.getValue()));
}
this.bundleMap = builder.build();
}
public ImmutableMap<String, Long> getSequenceNumbers() {
return sequenceNumbers;
}
public ImmutableMap<String, ImmutableList<X509Certificate>> getBundleMap() {
return bundleMap;
}
}
}
| SpiffeBundle |
java | apache__avro | lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java | {
"start": 1026,
"end": 8942
} | class ____ extends ByteArrayOutputStream {
static final int BLOCK_SIZE = 64 * 1024;
private int bitCount; // position in booleans
public OutputBuffer() {
this(BLOCK_SIZE + (BLOCK_SIZE >> 2));
}
public OutputBuffer(int size) {
super(size);
}
public boolean isFull() {
return size() >= BLOCK_SIZE;
}
public ByteBuffer asByteBuffer() {
return ByteBuffer.wrap(buf, 0, count);
}
public void writeValue(Object value, ValueType type) throws IOException {
switch (type) {
case NULL:
break;
case BOOLEAN:
writeBoolean((Boolean) value);
break;
case INT:
writeInt((Integer) value);
break;
case LONG:
writeLong((Long) value);
break;
case FIXED32:
writeFixed32((Integer) value);
break;
case FIXED64:
writeFixed64((Long) value);
break;
case FLOAT:
writeFloat((Float) value);
break;
case DOUBLE:
writeDouble((Double) value);
break;
case STRING:
writeString((String) value);
break;
case BYTES:
if (value instanceof ByteBuffer)
writeBytes((ByteBuffer) value);
else
writeBytes((byte[]) value);
break;
default:
throw new TrevniRuntimeException("Unknown value type: " + type);
}
}
public void writeBoolean(boolean value) {
if (bitCount == 0) { // first bool in byte
ensure(1);
count++;
}
if (value)
buf[count - 1] |= (byte) (1 << bitCount);
bitCount++;
if (bitCount == 8)
bitCount = 0;
}
public void writeLength(int length) throws IOException {
bitCount = 0;
writeInt(length);
}
public void writeString(String string) throws IOException {
byte[] bytes = string.getBytes(StandardCharsets.UTF_8);
writeInt(bytes.length);
write(bytes, 0, bytes.length);
}
public void writeBytes(ByteBuffer bytes) {
int pos = bytes.position();
int start = bytes.arrayOffset() + pos;
int len = bytes.limit() - pos;
writeBytes(bytes.array(), start, len);
}
public void writeBytes(byte[] bytes) {
writeBytes(bytes, 0, bytes.length);
}
public void writeBytes(byte[] bytes, int start, int len) {
writeInt(len);
write(bytes, start, len);
}
public void writeFloat(float f) throws IOException {
writeFixed32(Float.floatToRawIntBits(f));
}
public void writeDouble(double d) throws IOException {
writeFixed64(Double.doubleToRawLongBits(d));
}
public void writeFixed32(int i) throws IOException {
ensure(4);
buf[count] = (byte) ((i) & 0xFF);
buf[count + 1] = (byte) ((i >>> 8) & 0xFF);
buf[count + 2] = (byte) ((i >>> 16) & 0xFF);
buf[count + 3] = (byte) ((i >>> 24) & 0xFF);
count += 4;
}
public void writeFixed64(long l) throws IOException {
ensure(8);
int first = (int) (l & 0xFFFFFFFF);
int second = (int) ((l >>> 32) & 0xFFFFFFFF);
buf[count] = (byte) ((first) & 0xFF);
buf[count + 4] = (byte) ((second) & 0xFF);
buf[count + 5] = (byte) ((second >>> 8) & 0xFF);
buf[count + 1] = (byte) ((first >>> 8) & 0xFF);
buf[count + 2] = (byte) ((first >>> 16) & 0xFF);
buf[count + 6] = (byte) ((second >>> 16) & 0xFF);
buf[count + 7] = (byte) ((second >>> 24) & 0xFF);
buf[count + 3] = (byte) ((first >>> 24) & 0xFF);
count += 8;
}
public void writeInt(int n) {
ensure(5);
n = (n << 1) ^ (n >> 31); // move sign to low-order bit
if ((n & ~0x7F) != 0) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
}
}
}
}
buf[count++] = (byte) n;
}
public void writeLong(long n) throws IOException {
ensure(10);
n = (n << 1) ^ (n >> 63); // move sign to low-order bit
if ((n & ~0x7FL) != 0) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
}
}
}
}
}
}
}
}
}
buf[count++] = (byte) n;
}
private void ensure(int n) {
if (count + n > buf.length)
buf = Arrays.copyOf(buf, Math.max(buf.length << 1, count + n));
}
public static int size(Object value, ValueType type) {
switch (type) {
case NULL:
return 0;
case INT:
return size((Integer) value);
case LONG:
return size((Long) value);
case FIXED32:
case FLOAT:
return 4;
case FIXED64:
case DOUBLE:
return 8;
case STRING:
return size((String) value);
case BYTES:
if (value instanceof ByteBuffer)
return size((ByteBuffer) value);
return size((byte[]) value);
default:
throw new TrevniRuntimeException("Unknown value type: " + type);
}
}
public static int size(int n) {
n = (n << 1) ^ (n >> 31); // move sign to low-order bit
if (n <= (1 << (7 * 1)) - 1)
return 1;
if (n <= (1 << (7 * 2)) - 1)
return 2;
if (n <= (1 << (7 * 3)) - 1)
return 3;
if (n <= (1 << (7 * 4)) - 1)
return 4;
return 5;
}
public static int size(long n) {
n = (n << 1) ^ (n >> 63); // move sign to low-order bit
if (n <= (1 << (7 * 1)) - 1)
return 1;
if (n <= (1 << (7 * 2)) - 1)
return 2;
if (n <= (1 << (7 * 3)) - 1)
return 3;
if (n <= (1 << (7 * 4)) - 1)
return 4;
if (n <= (1 << (7 * 5)) - 1)
return 5;
if (n <= (1 << (7 * 6)) - 1)
return 6;
if (n <= (1 << (7 * 7)) - 1)
return 7;
if (n <= (1 << (7 * 8)) - 1)
return 8;
if (n <= (1 << (7 * 9)) - 1)
return 9;
return 10;
}
public static int size(ByteBuffer bytes) {
int length = bytes.remaining();
return size(length) + length;
}
public static int size(byte[] bytes) {
int length = bytes.length;
return size(length) + length;
}
public static int size(String string) {
int length = utf8Length(string);
return size(length) + length;
}
private static int utf8Length(String string) {
int stringLength = string.length();
int utf8Length = 0;
for (int i = 0; i < stringLength; i++) {
char c = string.charAt(i);
int p = c; // code point
if (Character.isHighSurrogate(c) // surrogate pair
&& i != stringLength - 1 && Character.isLowSurrogate(string.charAt(i + 1))) {
p = string.codePointAt(i);
i++;
}
if (p <= 0x007F) {
utf8Length += 1;
} else if (p <= 0x07FF) {
utf8Length += 2;
} else if (p <= 0x0FFFF) {
utf8Length += 3;
} else if (p <= 0x01FFFFF) {
utf8Length += 4;
} else if (p <= 0x03FFFFFF) {
utf8Length += 5;
} else {
utf8Length += 6;
}
}
return utf8Length;
}
}
| OutputBuffer |
java | quarkusio__quarkus | integration-tests/devtools-registry-client/src/test/java/io/quarkus/registry/RegistryConfigTest.java | {
"start": 222,
"end": 489
} | class ____ {
@Test
void should_return_config() {
given()
.when().get("/config")
.then()
.statusCode(200)
.body("maven.repository.id", is("registry.quarkus.io"));
}
}
| RegistryConfigTest |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/configuration/ImportTests.java | {
"start": 5010,
"end": 5699
} | class ____ in the case of inclusion via @Import
* or in the case of automatic registration via nesting
*/
@Test
void reproSpr9023() {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(B.class);
assertThat(ctx.getBeanNamesForType(B.class)[0]).isEqualTo("config-b");
assertThat(ctx.getBeanNamesForType(A.class)[0]).isEqualTo("config-a");
ctx.close();
}
@Test
void processImports() {
int configClasses = 2;
int beansInClasses = 2;
assertBeanDefinitionCount((configClasses + beansInClasses), ConfigurationWithImportAnnotation.class);
}
/**
* An imported config must override a scanned one, thus bean definitions
* from the imported | even |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/StreamCachingPerRouteTest.java | {
"start": 1202,
"end": 2900
} | class ____ extends ContextTestSupport {
@Test
public void testStreamCachingPerRoute() throws Exception {
MockEndpoint a = getMockEndpoint("mock:a");
a.expectedMessageCount(1);
MockEndpoint b = getMockEndpoint("mock:b");
b.expectedMessageCount(1);
MockEndpoint c = getMockEndpoint("mock:c");
c.expectedMessageCount(1);
new StreamSource(new StringReader("A"));
template.sendBody("direct:a", new StreamSource(new StringReader("A")));
Object sendB = new StreamSource(new StringReader("B"));
template.sendBody("direct:b", sendB);
template.sendBody("direct:c", new StreamSource(new StringReader("C")));
assertMockEndpointsSatisfied();
Object bodyA = a.getReceivedExchanges().get(0).getIn().getBody();
assertIsInstanceOf(StreamCache.class, bodyA);
Object bodyC = c.getReceivedExchanges().get(0).getIn().getBody();
assertIsInstanceOf(StreamCache.class, bodyC);
// should not be stream cache but the pure body
Object bodyB = b.getReceivedExchanges().get(0).getIn().getBody();
assertIsInstanceOf(StreamSource.class, bodyB);
assertSame(sendB, bodyB, "Should be same body as we send");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
context.setStreamCaching(true);
from("direct:a").to("mock:a");
from("direct:b").noStreamCaching().to("mock:b");
from("direct:c").streamCaching().to("mock:c");
}
};
}
}
| StreamCachingPerRouteTest |
java | apache__kafka | group-coordinator/src/main/java/org/apache/kafka/coordinator/group/streams/topics/RepartitionTopics.java | {
"start": 2321,
"end": 9006
} | class ____ the number of partition for all source topics to be defined.
*/
public RepartitionTopics(final LogContext logContext,
final Collection<Subtopology> subtopologies,
final Function<String, OptionalInt> topicPartitionCountProvider) {
this.log = logContext.logger(getClass());
this.subtopologies = subtopologies;
this.topicPartitionCountProvider = topicPartitionCountProvider;
}
/**
* Returns the set of the number of partitions for each repartition topic.
*
* @return the map of repartition topics for the requested topology to their required number of partitions.
*
* @throws IllegalStateException if the number of partitions for a source topic is not defined by topicPartitionCountProvider.
* @throws StreamsInvalidTopologyException if the number of partitions for all repartition topics cannot be determined, e.g.
* because of loops, or if a repartition source topic is not a sink topic of any subtopology.
*/
public Map<String, Integer> setup() {
final Set<String> missingSourceTopicsForTopology = new HashSet<>();
for (final Subtopology subtopology : subtopologies) {
final Set<String> missingSourceTopicsForSubtopology = computeMissingExternalSourceTopics(subtopology);
missingSourceTopicsForTopology.addAll(missingSourceTopicsForSubtopology);
}
if (!missingSourceTopicsForTopology.isEmpty()) {
throw new IllegalStateException(String.format("Missing source topics: %s",
String.join(", ", missingSourceTopicsForTopology)));
}
final Map<String, Integer> repartitionTopicPartitionCount = computeRepartitionTopicPartitionCount();
for (final Subtopology subtopology : subtopologies) {
if (subtopology.repartitionSourceTopics().stream().anyMatch(repartitionTopic -> !repartitionTopicPartitionCount.containsKey(repartitionTopic.name()))) {
throw new StreamsInvalidTopologyException("Failed to compute number of partitions for all repartition topics, because "
+ "a repartition source topic is never used as a sink topic.");
}
}
return repartitionTopicPartitionCount;
}
private Set<String> computeMissingExternalSourceTopics(final Subtopology subtopology) {
final Set<String> missingExternalSourceTopics = new HashSet<>(subtopology.sourceTopics());
for (final TopicInfo topicInfo : subtopology.repartitionSourceTopics()) {
missingExternalSourceTopics.remove(topicInfo.name());
}
missingExternalSourceTopics.removeIf(x -> topicPartitionCountProvider.apply(x).isPresent());
return missingExternalSourceTopics;
}
/**
* Computes the number of partitions and returns it for each repartition topic.
*/
private Map<String, Integer> computeRepartitionTopicPartitionCount() {
boolean partitionCountNeeded;
Map<String, Integer> repartitionTopicPartitionCounts = new HashMap<>();
for (final Subtopology subtopology : subtopologies) {
for (final TopicInfo repartitionSourceTopic : subtopology.repartitionSourceTopics()) {
if (repartitionSourceTopic.partitions() != 0) {
repartitionTopicPartitionCounts.put(repartitionSourceTopic.name(), repartitionSourceTopic.partitions());
}
}
}
do {
partitionCountNeeded = false;
// avoid infinitely looping without making any progress on unknown repartitions
boolean progressMadeThisIteration = false;
for (final Subtopology subtopology : subtopologies) {
for (final String repartitionSinkTopic : subtopology.repartitionSinkTopics()) {
if (!repartitionTopicPartitionCounts.containsKey(repartitionSinkTopic)) {
final Integer numPartitions = computePartitionCount(
repartitionTopicPartitionCounts,
subtopology
);
if (numPartitions == null) {
partitionCountNeeded = true;
log.trace("Unable to determine number of partitions for {}, another iteration is needed",
repartitionSinkTopic);
} else {
log.trace("Determined number of partitions for {} to be {}",
repartitionSinkTopic,
numPartitions);
repartitionTopicPartitionCounts.put(repartitionSinkTopic, numPartitions);
progressMadeThisIteration = true;
}
}
}
}
if (!progressMadeThisIteration && partitionCountNeeded) {
throw new StreamsInvalidTopologyException("Failed to compute number of partitions for all " +
"repartition topics. There may be loops in the topology that cannot be resolved.");
}
} while (partitionCountNeeded);
return repartitionTopicPartitionCounts;
}
private Integer computePartitionCount(final Map<String, Integer> repartitionTopicPartitionCounts,
final Subtopology subtopology) {
Integer partitionCount = null;
// try set the number of partitions for this repartition topic if it is not set yet
// use the maximum of all its source topic partitions as the number of partitions
// It is possible that there is another internal topic, i.e,
// map().join().join(map())
for (final TopicInfo repartitionSourceTopic : subtopology.repartitionSourceTopics()) {
Integer numPartitionsCandidate = repartitionTopicPartitionCounts.get(repartitionSourceTopic.name());
if (numPartitionsCandidate != null && (partitionCount == null || numPartitionsCandidate > partitionCount)) {
partitionCount = numPartitionsCandidate;
}
}
for (final String externalSourceTopic : subtopology.sourceTopics()) {
final OptionalInt actualPartitionCount = topicPartitionCountProvider.apply(externalSourceTopic);
if (actualPartitionCount.isPresent() && (partitionCount == null || actualPartitionCount.getAsInt() > partitionCount)) {
partitionCount = actualPartitionCount.getAsInt();
}
}
return partitionCount;
}
}
| requires |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/output/StreamMessageListOutput.java | {
"start": 394,
"end": 2116
} | class ____<K, V> extends CommandOutput<K, V, List<StreamMessage<K, V>>>
implements StreamingOutput<StreamMessage<K, V>> {
private final K stream;
private boolean initialized;
private Subscriber<StreamMessage<K, V>> subscriber;
private K key;
private String id;
private Map<K, V> body;
public StreamMessageListOutput(RedisCodec<K, V> codec, K stream) {
super(codec, Collections.emptyList());
setSubscriber(ListSubscriber.instance());
this.stream = stream;
}
@Override
public void set(ByteBuffer bytes) {
if (id == null) {
id = decodeString(bytes);
return;
}
if (key == null) {
key = codec.decodeKey(bytes);
return;
}
if (body == null) {
body = new LinkedHashMap<>();
}
body.put(key, bytes == null ? null : codec.decodeValue(bytes));
key = null;
}
@Override
public void multi(int count) {
if (!initialized) {
output = OutputFactory.newList(count);
initialized = true;
}
}
@Override
public void complete(int depth) {
if (depth == 1) {
subscriber.onNext(output, new StreamMessage<>(stream, id, body));
key = null;
id = null;
body = null;
}
}
@Override
public void setSubscriber(Subscriber<StreamMessage<K, V>> subscriber) {
LettuceAssert.notNull(subscriber, "Subscriber must not be null");
this.subscriber = subscriber;
}
@Override
public Subscriber<StreamMessage<K, V>> getSubscriber() {
return subscriber;
}
}
| StreamMessageListOutput |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/resource/jdbc/internal/ResourceRegistryLogger.java | {
"start": 1008,
"end": 3648
} | interface ____ extends BasicLogger {
String LOGGER_NAME = SubSystemLogging.BASE + ".resource.registry";
/**
* Static access to the logging instance
*/
ResourceRegistryLogger RESOURCE_REGISTRY_LOGGER = Logger.getMessageLogger(
MethodHandles.lookup(),
ResourceRegistryLogger.class,
LOGGER_NAME
);
@LogMessage(level = TRACE)
@Message(value = "Releasing registered JDBC resources", id = 10002501)
void releasingResources();
@LogMessage(level = TRACE)
@Message(value = "Registering statement [%s]", id = 10002502)
void registeringStatement(Statement statement);
@LogMessage(level = TRACE)
@Message(value = "Releasing statement [%s]", id = 10002503)
void releasingStatement(Statement statement);
@LogMessage(level = TRACE)
@Message(value = "Releasing result set [%s]", id = 10002504)
void releasingResultSet(ResultSet resultSet);
@LogMessage(level = TRACE)
@Message(value = "Closing result set [%s]", id = 10002505)
void closingResultSet(ResultSet resultSet);
@LogMessage(level = TRACE)
@Message(value = "Closing prepared statement [%s]", id = 10002506)
void closingPreparedStatement(Statement statement);
@LogMessage(level = TRACE)
@Message(value = "Registering result set [%s]", id = 10002507)
void registeringResultSet(ResultSet resultSet);
@LogMessage(level = DEBUG)
@Message(value = "Unable to release JDBC statement [%s]", id = 10002508)
void unableToReleaseStatement(String message);
@LogMessage(level = DEBUG)
@Message(value = "Unable to release JDBC result set [%s]", id = 10002509)
void unableToReleaseResultSet(String message);
@LogMessage(level = DEBUG)
@Message(value = "Exception clearing maxRows or queryTimeout for JDBC Statement [%s]", id = 10002510)
void exceptionClearingMaxRowsOrQueryTimeout(String message);
@LogMessage(level = DEBUG)
@Message(value = "Unable to free '%s' reference [%s]", id = 10002511)
void unableToFreeLob(String lobType, String message);
// Keep this at DEBUG level, rather than WARN. Many connection pool implementations return
// a proxy/wrapper for the JDBC Statement, causing excessive logging here. See HHH-8210.
@LogMessage(level = DEBUG)
@Message(value = "Statement associated with ResultSet was not registered", id = 10002514)
void unregisteredStatement();
@LogMessage(level = WARN)
@Message(value = "ResultSet had no statement associated with it, but was not yet registered", id = 10002515)
void unregisteredResultSetWithoutStatement();
@LogMessage(level = DEBUG)
@Message(value = "Request to release '%s', but none have ever been registered", id = 10002516)
void noRegisteredLobs(String lobType);
}
| ResourceRegistryLogger |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JdkObsoleteTest.java | {
"start": 2191,
"end": 2598
} | class ____ {
void f(Matcher m) {
StringBuffer sb = new StringBuffer();
m.appendReplacement(sb, null);
}
}
""")
.doTest();
}
@Test
public void stringBuffer_appendTail() {
testHelper
.addSourceLines(
"Test.java",
"""
import java.util.regex.Matcher;
| Test |
java | alibaba__nacos | ai/src/main/java/com/alibaba/nacos/ai/index/McpCacheIndex.java | {
"start": 834,
"end": 2515
} | interface ____ {
/**
* Get MCP ID by namespace ID and MCP name.
*
* @param namespaceId namespace ID
* @param mcpName MCP name
* @return MCP ID, returns null if not found
*/
String getMcpId(String namespaceId, String mcpName);
/**
* Get MCP server information by namespace ID and MCP name.
*
* @param namespaceId namespace ID
* @param mcpName MCP name
* @return MCP server information, returns null if not found
*/
McpServerIndexData getMcpServerByName(String namespaceId, String mcpName);
/**
* Get MCP server information by MCP ID.
*
* @param mcpId MCP ID
* @return MCP server information, returns null if not found
*/
McpServerIndexData getMcpServerById(String mcpId);
/**
* Update index.
*
* @param namespaceId namespace ID
* @param mcpName MCP name
* @param mcpId MCP ID
*/
void updateIndex(String namespaceId, String mcpName, String mcpId);
/**
* Remove index by name.
*
* @param namespaceId namespace ID
* @param mcpName MCP name
*/
void removeIndex(String namespaceId, String mcpName);
/**
* Remove index by ID.
*
* @param mcpId MCP ID
*/
void removeIndex(String mcpId);
/**
* Clear cache.
*/
void clear();
/**
* Get cache size.
*
* @return number of cache entries
*/
int getSize();
/**
* Get cache statistics.
*
* @return cache statistics
*/
CacheStats getStats();
/**
* Cache statistics.
*/
| McpCacheIndex |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/proxy/jdbc/StatementProxyImpl.java | {
"start": 927,
"end": 17413
} | class ____ extends WrapperProxyImpl implements StatementProxy {
private final ConnectionProxy connection;
protected Statement statement;
protected String lastExecuteSql;
protected long lastExecuteStartNano;
protected long lastExecuteTimeNano;
protected JdbcSqlStat sqlStat;
protected boolean firstResultSet;
protected ArrayList<String> batchSqlList;
protected StatementExecuteType lastExecuteType;
protected Integer updateCount;
private FilterChainImpl filterChain;
public StatementProxyImpl(ConnectionProxy connection, Statement statement, long id) {
super(statement, id);
this.connection = connection;
this.statement = statement;
}
public ConnectionProxy getConnectionProxy() {
return connection;
}
public Statement getRawObject() {
return this.statement;
}
public final FilterChainImpl createChain() {
FilterChainImpl chain = this.filterChain;
if (chain == null) {
chain = new FilterChainImpl(this.getConnectionProxy().getDirectDataSource());
} else {
this.filterChain = null;
}
return chain;
}
public final void recycleFilterChain(FilterChainImpl chain) {
chain.reset();
this.filterChain = chain;
}
@Override
public void addBatch(String sql) throws SQLException {
if (batchSqlList == null) {
batchSqlList = new ArrayList<String>();
}
FilterChainImpl chain = createChain();
chain.statement_addBatch(this, sql);
recycleFilterChain(chain);
batchSqlList.add(sql);
}
@Override
public void cancel() throws SQLException {
if (this.statement == null) {
return;
}
FilterChainImpl chain = createChain();
chain.statement_cancel(this);
recycleFilterChain(chain);
}
@Override
public void clearBatch() throws SQLException {
if (this.statement == null) {
return;
}
if (batchSqlList == null) {
batchSqlList = new ArrayList<String>();
}
FilterChainImpl chain = createChain();
chain.statement_clearBatch(this);
recycleFilterChain(chain);
batchSqlList.clear();
}
@Override
public void clearWarnings() throws SQLException {
if (this.statement == null) {
return;
}
FilterChainImpl chain = createChain();
chain.statement_clearWarnings(this);
recycleFilterChain(chain);
}
@Override
public void close() throws SQLException {
if (this.statement == null) {
return;
}
FilterChainImpl chain = createChain();
chain.statement_close(this);
recycleFilterChain(chain);
}
@Override
public boolean execute(String sql) throws SQLException {
updateCount = null;
lastExecuteSql = sql;
lastExecuteType = StatementExecuteType.Execute;
lastExecuteStartNano = -1L;
lastExecuteTimeNano = -1L;
FilterChainImpl chain = createChain();
firstResultSet = chain.statement_execute(this, sql);
recycleFilterChain(chain);
return firstResultSet;
}
@Override
public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
updateCount = null;
lastExecuteSql = sql;
lastExecuteType = StatementExecuteType.Execute;
lastExecuteStartNano = -1L;
lastExecuteTimeNano = -1L;
FilterChainImpl chain = createChain();
firstResultSet = chain.statement_execute(this, sql, autoGeneratedKeys);
recycleFilterChain(chain);
return firstResultSet;
}
@Override
public boolean execute(String sql, int[] columnIndexes) throws SQLException {
updateCount = null;
lastExecuteSql = sql;
lastExecuteType = StatementExecuteType.Execute;
lastExecuteStartNano = -1L;
lastExecuteTimeNano = -1L;
FilterChainImpl chain = createChain();
firstResultSet = chain.statement_execute(this, sql, columnIndexes);
recycleFilterChain(chain);
return firstResultSet;
}
@Override
public boolean execute(String sql, String[] columnNames) throws SQLException {
updateCount = null;
lastExecuteSql = sql;
lastExecuteType = StatementExecuteType.Execute;
lastExecuteStartNano = -1L;
lastExecuteTimeNano = -1L;
FilterChainImpl chain = createChain();
firstResultSet = chain.statement_execute(this, sql, columnNames);
recycleFilterChain(chain);
return firstResultSet;
}
@Override
public int[] executeBatch() throws SQLException {
firstResultSet = false;
lastExecuteType = StatementExecuteType.ExecuteBatch;
lastExecuteStartNano = -1L;
lastExecuteTimeNano = -1L;
FilterChainImpl chain = createChain();
int[] updateCounts = chain.statement_executeBatch(this);
recycleFilterChain(chain);
if (updateCounts != null && updateCounts.length == 1) {
updateCount = updateCounts[0];
}
return updateCounts;
}
@Override
public ResultSet executeQuery(String sql) throws SQLException {
firstResultSet = true;
updateCount = null;
lastExecuteSql = sql;
lastExecuteType = StatementExecuteType.ExecuteQuery;
lastExecuteStartNano = -1L;
lastExecuteTimeNano = -1L;
FilterChainImpl chain = createChain();
ResultSet resultSet = chain.statement_executeQuery(this, sql);
recycleFilterChain(chain);
return resultSet;
}
@Override
public int executeUpdate(String sql) throws SQLException {
firstResultSet = false;
lastExecuteSql = sql;
lastExecuteType = StatementExecuteType.ExecuteUpdate;
lastExecuteStartNano = -1L;
lastExecuteTimeNano = -1L;
FilterChainImpl chain = createChain();
updateCount = chain.statement_executeUpdate(this, sql);
recycleFilterChain(chain);
return updateCount;
}
@Override
public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
firstResultSet = false;
lastExecuteSql = sql;
lastExecuteType = StatementExecuteType.ExecuteUpdate;
lastExecuteStartNano = -1L;
lastExecuteTimeNano = -1L;
FilterChainImpl chain = createChain();
updateCount = chain.statement_executeUpdate(this, sql, autoGeneratedKeys);
recycleFilterChain(chain);
return updateCount;
}
@Override
public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
firstResultSet = false;
lastExecuteSql = sql;
lastExecuteType = StatementExecuteType.ExecuteUpdate;
lastExecuteStartNano = -1L;
lastExecuteTimeNano = -1L;
FilterChainImpl chain = createChain();
updateCount = chain.statement_executeUpdate(this, sql, columnIndexes);
recycleFilterChain(chain);
return updateCount;
}
@Override
public int executeUpdate(String sql, String[] columnNames) throws SQLException {
firstResultSet = false;
lastExecuteSql = sql;
lastExecuteType = StatementExecuteType.ExecuteUpdate;
lastExecuteStartNano = -1L;
lastExecuteTimeNano = -1L;
FilterChainImpl chain = createChain();
updateCount = chain.statement_executeUpdate(this, sql, columnNames);
recycleFilterChain(chain);
return updateCount;
}
@Override
public Connection getConnection() throws SQLException {
FilterChainImpl chain = createChain();
Connection conn = chain.statement_getConnection(this);
recycleFilterChain(chain);
return conn;
}
@Override
public int getFetchDirection() throws SQLException {
FilterChainImpl chain = createChain();
int value = chain.statement_getFetchDirection(this);
recycleFilterChain(chain);
return value;
}
@Override
public int getFetchSize() throws SQLException {
FilterChainImpl chain = createChain();
int value = chain.statement_getFetchSize(this);
recycleFilterChain(chain);
return value;
}
@Override
public ResultSet getGeneratedKeys() throws SQLException {
FilterChainImpl chain = createChain();
ResultSet value = chain.statement_getGeneratedKeys(this);
recycleFilterChain(chain);
return value;
}
@Override
public int getMaxFieldSize() throws SQLException {
FilterChainImpl chain = createChain();
int value = chain.statement_getMaxFieldSize(this);
recycleFilterChain(chain);
return value;
}
@Override
public int getMaxRows() throws SQLException {
FilterChainImpl chain = createChain();
int value = chain.statement_getMaxRows(this);
recycleFilterChain(chain);
return value;
}
@Override
public boolean getMoreResults() throws SQLException {
FilterChainImpl chain = createChain();
boolean value = chain.statement_getMoreResults(this);
updateCount = null;
recycleFilterChain(chain);
return value;
}
@Override
public boolean getMoreResults(int current) throws SQLException {
updateCount = null;
FilterChainImpl chain = createChain();
boolean value = chain.statement_getMoreResults(this, current);
recycleFilterChain(chain);
return value;
}
@Override
public int getQueryTimeout() throws SQLException {
FilterChainImpl chain = createChain();
int value = chain.statement_getQueryTimeout(this);
recycleFilterChain(chain);
return value;
}
@Override
public ResultSet getResultSet() throws SQLException {
FilterChainImpl chain = createChain();
ResultSet value = chain.statement_getResultSet(this);
recycleFilterChain(chain);
return value;
}
@Override
public int getResultSetConcurrency() throws SQLException {
FilterChainImpl chain = createChain();
int value = chain.statement_getResultSetConcurrency(this);
recycleFilterChain(chain);
return value;
}
@Override
public int getResultSetHoldability() throws SQLException {
FilterChainImpl chain = createChain();
int value = chain.statement_getResultSetHoldability(this);
recycleFilterChain(chain);
return value;
}
@Override
public int getResultSetType() throws SQLException {
FilterChainImpl chain = createChain();
int value = chain.statement_getResultSetType(this);
recycleFilterChain(chain);
return value;
}
// bug fixed for oracle
@Override
public int getUpdateCount() throws SQLException {
if (updateCount == null) {
FilterChainImpl chain = createChain();
updateCount = chain.statement_getUpdateCount(this);
recycleFilterChain(chain);
}
return updateCount;
}
@Override
public SQLWarning getWarnings() throws SQLException {
FilterChainImpl chain = createChain();
SQLWarning value = chain.statement_getWarnings(this);
recycleFilterChain(chain);
return value;
}
@Override
public boolean isClosed() throws SQLException {
FilterChainImpl chain = createChain();
boolean value = chain.statement_isClosed(this);
recycleFilterChain(chain);
return value;
}
@Override
public boolean isPoolable() throws SQLException {
FilterChainImpl chain = createChain();
boolean value = chain.statement_isPoolable(this);
recycleFilterChain(chain);
return value;
}
@Override
public void setCursorName(String name) throws SQLException {
FilterChainImpl chain = createChain();
chain.statement_setCursorName(this, name);
recycleFilterChain(chain);
}
@Override
public void setEscapeProcessing(boolean enable) throws SQLException {
FilterChainImpl chain = createChain();
chain.statement_setEscapeProcessing(this, enable);
recycleFilterChain(chain);
}
@Override
public void setFetchDirection(int direction) throws SQLException {
FilterChainImpl chain = createChain();
chain.statement_setFetchDirection(this, direction);
recycleFilterChain(chain);
}
@Override
public void setFetchSize(int rows) throws SQLException {
FilterChainImpl chain = createChain();
chain.statement_setFetchSize(this, rows);
recycleFilterChain(chain);
}
@Override
public void setMaxFieldSize(int max) throws SQLException {
FilterChainImpl chain = createChain();
chain.statement_setMaxFieldSize(this, max);
recycleFilterChain(chain);
}
@Override
public void setMaxRows(int max) throws SQLException {
FilterChainImpl chain = createChain();
chain.statement_setMaxRows(this, max);
recycleFilterChain(chain);
}
@Override
public void setPoolable(boolean poolable) throws SQLException {
FilterChainImpl chain = createChain();
chain.statement_setPoolable(this, poolable);
recycleFilterChain(chain);
}
@Override
public void setQueryTimeout(int seconds) throws SQLException {
FilterChainImpl chain = createChain();
chain.statement_setQueryTimeout(this, seconds);
recycleFilterChain(chain);
}
@Override
public List<String> getBatchSqlList() {
if (batchSqlList == null) {
batchSqlList = new ArrayList<String>();
}
return batchSqlList;
}
@Override
public String getBatchSql() {
List<String> sqlList = getBatchSqlList();
StringBuilder buf = new StringBuilder();
for (String item : sqlList) {
if (buf.length() > 0) {
buf.append("\n;\n");
}
buf.append(item);
}
return buf.toString();
}
public String getLastExecuteSql() {
return lastExecuteSql;
}
public void closeOnCompletion() throws SQLException {
statement.closeOnCompletion();
}
public boolean isCloseOnCompletion() throws SQLException {
return statement.isCloseOnCompletion();
}
@Override
public Map<Integer, JdbcParameter> getParameters() {
return Collections.emptyMap();
}
public JdbcSqlStat getSqlStat() {
return sqlStat;
}
public void setSqlStat(JdbcSqlStat sqlStat) {
this.sqlStat = sqlStat;
}
public long getLastExecuteTimeNano() {
return lastExecuteTimeNano;
}
public void setLastExecuteTimeNano(long lastExecuteTimeNano) {
this.lastExecuteTimeNano = lastExecuteTimeNano;
}
public void setLastExecuteTimeNano() {
if (this.lastExecuteTimeNano <= 0 && this.lastExecuteStartNano > 0) {
this.lastExecuteTimeNano = System.nanoTime() - this.lastExecuteStartNano;
}
}
public long getLastExecuteStartNano() {
return lastExecuteStartNano;
}
public void setLastExecuteStartNano(long lastExecuteStartNano) {
this.lastExecuteStartNano = lastExecuteStartNano;
this.lastExecuteTimeNano = -1L;
}
public void setLastExecuteStartNano() {
if (lastExecuteStartNano <= 0) {
setLastExecuteStartNano(System.nanoTime());
}
}
public StatementExecuteType getLastExecuteType() {
return lastExecuteType;
}
public boolean isFirstResultSet() {
return firstResultSet;
}
@SuppressWarnings("unchecked")
public <T> T unwrap(Class<T> iface) throws SQLException {
if (iface == StatementProxy.class) {
return (T) this;
}
return super.unwrap(iface);
}
public boolean isWrapperFor(Class<?> iface) throws SQLException {
if (iface == StatementProxy.class) {
return true;
}
return super.isWrapperFor(iface);
}
@Override
public int getParametersSize() {
return 0;
}
@Override
public JdbcParameter getParameter(int i) {
return null;
}
}
| StatementProxyImpl |
java | apache__rocketmq | proxy/src/test/java/org/apache/rocketmq/proxy/service/relay/ProxyChannelTest.java | {
"start": 2429,
"end": 8814
} | class ____ extends ProxyChannel {
protected MockProxyChannel(ProxyRelayService proxyRelayService, Channel parent,
String remoteAddress, String localAddress) {
super(proxyRelayService, parent, remoteAddress, localAddress);
}
@Override
public boolean isOpen() {
return false;
}
@Override
public boolean isActive() {
return false;
}
}
@Test
public void testWriteAndFlush() throws Exception {
when(this.proxyRelayService.processCheckTransactionState(any(), any(), any(), any()))
.thenReturn(new RelayData<>(mock(TransactionData.class), new CompletableFuture<>()));
ArgumentCaptor<ConsumeMessageDirectlyResultRequestHeader> consumeMessageDirectlyArgumentCaptor =
ArgumentCaptor.forClass(ConsumeMessageDirectlyResultRequestHeader.class);
when(this.proxyRelayService.processConsumeMessageDirectly(any(), any(), consumeMessageDirectlyArgumentCaptor.capture()))
.thenReturn(new CompletableFuture<>());
ArgumentCaptor<GetConsumerRunningInfoRequestHeader> getConsumerRunningInfoArgumentCaptor =
ArgumentCaptor.forClass(GetConsumerRunningInfoRequestHeader.class);
when(this.proxyRelayService.processGetConsumerRunningInfo(any(), any(), getConsumerRunningInfoArgumentCaptor.capture()))
.thenReturn(new CompletableFuture<>());
CheckTransactionStateRequestHeader checkTransactionStateRequestHeader = new CheckTransactionStateRequestHeader();
checkTransactionStateRequestHeader.setTransactionId(MessageClientIDSetter.createUniqID());
RemotingCommand checkTransactionRequest = RemotingCommand.createRequestCommand(RequestCode.CHECK_TRANSACTION_STATE, checkTransactionStateRequestHeader);
MessageExt transactionMessageExt = new MessageExt();
transactionMessageExt.setTopic("topic");
transactionMessageExt.setTags("tags");
transactionMessageExt.setBornHost(NetworkUtil.string2SocketAddress("127.0.0.2:8888"));
transactionMessageExt.setStoreHost(NetworkUtil.string2SocketAddress("127.0.0.1:10911"));
transactionMessageExt.setBody(UUID.randomUUID().toString().getBytes(StandardCharsets.UTF_8));
transactionMessageExt.setMsgId(MessageClientIDSetter.createUniqID());
checkTransactionRequest.setBody(MessageDecoder.encode(transactionMessageExt, false));
GetConsumerRunningInfoRequestHeader consumerRunningInfoRequestHeader = new GetConsumerRunningInfoRequestHeader();
consumerRunningInfoRequestHeader.setConsumerGroup("group");
consumerRunningInfoRequestHeader.setClientId("clientId");
RemotingCommand consumerRunningInfoRequest = RemotingCommand.createRequestCommand(RequestCode.GET_CONSUMER_RUNNING_INFO, consumerRunningInfoRequestHeader);
ConsumeMessageDirectlyResultRequestHeader consumeMessageDirectlyResultRequestHeader = new ConsumeMessageDirectlyResultRequestHeader();
consumeMessageDirectlyResultRequestHeader.setConsumerGroup("group");
consumeMessageDirectlyResultRequestHeader.setClientId("clientId");
MessageExt consumeMessageDirectlyMessageExt = new MessageExt();
consumeMessageDirectlyMessageExt.setTopic("topic");
consumeMessageDirectlyMessageExt.setTags("tags");
consumeMessageDirectlyMessageExt.setBornHost(NetworkUtil.string2SocketAddress("127.0.0.2:8888"));
consumeMessageDirectlyMessageExt.setStoreHost(NetworkUtil.string2SocketAddress("127.0.0.1:10911"));
consumeMessageDirectlyMessageExt.setBody(UUID.randomUUID().toString().getBytes(StandardCharsets.UTF_8));
consumeMessageDirectlyMessageExt.setMsgId(MessageClientIDSetter.createUniqID());
RemotingCommand consumeMessageDirectlyResult = RemotingCommand.createRequestCommand(RequestCode.CONSUME_MESSAGE_DIRECTLY, consumeMessageDirectlyResultRequestHeader);
consumeMessageDirectlyResult.setBody(MessageDecoder.encode(consumeMessageDirectlyMessageExt, false));
MockProxyChannel channel = new MockProxyChannel(this.proxyRelayService, null, "127.0.0.2:8888", "127.0.0.1:10911") {
@Override
protected CompletableFuture<Void> processOtherMessage(Object msg) {
return CompletableFuture.completedFuture(null);
}
@Override
protected CompletableFuture<Void> processCheckTransaction(CheckTransactionStateRequestHeader header,
MessageExt messageExt, TransactionData transactionData, CompletableFuture<ProxyRelayResult<Void>> responseFuture) {
assertEquals(checkTransactionStateRequestHeader, header);
assertArrayEquals(transactionMessageExt.getBody(), messageExt.getBody());
return CompletableFuture.completedFuture(null);
}
@Override
protected CompletableFuture<Void> processGetConsumerRunningInfo(RemotingCommand command,
GetConsumerRunningInfoRequestHeader header,
CompletableFuture<ProxyRelayResult<ConsumerRunningInfo>> responseFuture) {
assertEquals(consumerRunningInfoRequestHeader, getConsumerRunningInfoArgumentCaptor.getValue());
assertEquals(consumerRunningInfoRequestHeader, header);
return CompletableFuture.completedFuture(null);
}
@Override
protected CompletableFuture<Void> processConsumeMessageDirectly(RemotingCommand command,
ConsumeMessageDirectlyResultRequestHeader header, MessageExt messageExt,
CompletableFuture<ProxyRelayResult<ConsumeMessageDirectlyResult>> responseFuture) {
assertEquals(consumeMessageDirectlyResultRequestHeader, consumeMessageDirectlyArgumentCaptor.getValue());
assertEquals(consumeMessageDirectlyResultRequestHeader, header);
assertArrayEquals(consumeMessageDirectlyMessageExt.getBody(), messageExt.getBody());
return CompletableFuture.completedFuture(null);
}
};
assertTrue(channel.writeAndFlush(checkTransactionRequest).isSuccess());
assertTrue(channel.writeAndFlush(consumerRunningInfoRequest).isSuccess());
assertTrue(channel.writeAndFlush(consumeMessageDirectlyResult).isSuccess());
}
}
| MockProxyChannel |
java | apache__flink | flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/connector/print/table/PrintTableSinkFactory.java | {
"start": 4181,
"end": 6670
} | class ____ implements DynamicTableSink, SupportsPartitioning {
private final DataType type;
private String printIdentifier;
private final boolean stdErr;
private final @Nullable Integer parallelism;
private final List<String> partitionKeys;
private Map<String, String> staticPartitions = new LinkedHashMap<>();
private PrintSink(
DataType type,
List<String> partitionKeys,
String printIdentifier,
boolean stdErr,
Integer parallelism) {
this.type = type;
this.partitionKeys = partitionKeys;
this.printIdentifier = printIdentifier;
this.stdErr = stdErr;
this.parallelism = parallelism;
}
@Override
public ChangelogMode getChangelogMode(ChangelogMode requestedMode) {
return requestedMode;
}
@Override
public SinkRuntimeProvider getSinkRuntimeProvider(DynamicTableSink.Context context) {
DataStructureConverter converter = context.createDataStructureConverter(type);
staticPartitions.forEach(
(key, value) -> {
printIdentifier = null != printIdentifier ? printIdentifier + ":" : "";
printIdentifier += key + "=" + value;
});
return SinkFunctionProvider.of(
new RowDataPrintFunction(converter, printIdentifier, stdErr), parallelism);
}
@Override
public DynamicTableSink copy() {
return new PrintSink(type, partitionKeys, printIdentifier, stdErr, parallelism);
}
@Override
public String asSummaryString() {
return "Print to " + (stdErr ? "System.err" : "System.out");
}
@Override
public void applyStaticPartition(Map<String, String> partition) {
// make it a LinkedHashMap to maintain partition column order
staticPartitions = new LinkedHashMap<>();
for (String partitionCol : partitionKeys) {
if (partition.containsKey(partitionCol)) {
staticPartitions.put(partitionCol, partition.get(partitionCol));
}
}
}
}
/**
* Implementation of the SinkFunction converting {@link RowData} to string and passing to {@link
* PrintSinkFunction}.
*/
private static | PrintSink |
java | elastic__elasticsearch | distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/ProxyMatcher.java | {
"start": 688,
"end": 1923
} | class ____ extends TypeSafeMatcher<Proxy> {
private final Proxy.Type type;
private final String hostname;
private final int port;
public static ProxyMatcher matchesProxy(Proxy.Type type, String hostname, int port) {
return new ProxyMatcher(type, hostname, port);
}
public static ProxyMatcher matchesProxy(Proxy.Type type) {
return new ProxyMatcher(type, null, -1);
}
ProxyMatcher(Proxy.Type type, String hostname, int port) {
this.type = type;
this.hostname = hostname;
this.port = port;
}
@Override
@SuppressForbidden(reason = "Proxy constructor uses InetSocketAddress")
protected boolean matchesSafely(Proxy proxy) {
if (proxy.type() != this.type) {
return false;
}
if (hostname == null) {
return true;
}
InetSocketAddress address = (InetSocketAddress) proxy.address();
return this.hostname.equals(address.getHostName()) && this.port == address.getPort();
}
@Override
public void describeTo(Description description) {
description.appendText("a proxy instance of type [" + type + "] pointing at [" + hostname + ":" + port + "]");
}
}
| ProxyMatcher |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/provider/bug/Config.java | {
"start": 323,
"end": 1145
} | class ____ {
@Singleton
@First
AtomicInteger createCounterFirst() {
return new AtomicInteger();
}
@Singleton
@Second
AtomicInteger createCounterSecond() {
return new AtomicInteger();
}
@Singleton
@Third
AtomicInteger createCounterThird() {
return new AtomicInteger();
}
@Prototype
@First
Injectable createFirst(@First Provider<AtomicInteger> counter) {
return new Injectable(counter.get().addAndGet(1));
}
@Prototype
@Second
Injectable createSecond(@Second Provider<AtomicInteger> counter) {
return new Injectable(counter.get().addAndGet(10));
}
@Prototype
@Third
@Requires(property = "third.enabled", value = "true")
Injectable createThird(@Third Provider<AtomicInteger> counter) {
return new Injectable(counter.get().addAndGet(100));
}
}
| Config |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceStateModel.java | {
"start": 1064,
"end": 5021
} | class ____ {
/**
* Map of all valid state transitions
* [current] [proposed1, proposed2, ...]
*/
private static final boolean[][] statemap =
{
// uninited inited started stopped
/* uninited */ {false, true, false, true},
/* inited */ {false, true, true, true},
/* started */ {false, false, true, true},
/* stopped */ {false, false, false, true},
};
/**
* The state of the service
*/
private volatile Service.STATE state;
/**
* The name of the service: used in exceptions
*/
private String name;
/**
* Create the service state model in the {@link Service.STATE#NOTINITED}
* state.
*
* @param name input name.
*/
public ServiceStateModel(String name) {
this(name, Service.STATE.NOTINITED);
}
/**
* Create a service state model instance in the chosen state
* @param state the starting state
* @param name input name.
*/
public ServiceStateModel(String name, Service.STATE state) {
this.state = state;
this.name = name;
}
/**
* Query the service state. This is a non-blocking operation.
* @return the state
*/
public Service.STATE getState() {
return state;
}
/**
* Query that the state is in a specific state
* @param proposed proposed new state
* @return the state
*/
public boolean isInState(Service.STATE proposed) {
return state.equals(proposed);
}
/**
* Verify that that a service is in a given state.
* @param expectedState the desired state
* @throws ServiceStateException if the service state is different from
* the desired state
*/
public void ensureCurrentState(Service.STATE expectedState) {
if (state != expectedState) {
throw new ServiceStateException(name+ ": for this operation, the " +
"current service state must be "
+ expectedState
+ " instead of " + state);
}
}
/**
* Enter a state -thread safe.
*
* @param proposed proposed new state
* @return the original state
* @throws ServiceStateException if the transition is not permitted
*/
public synchronized Service.STATE enterState(Service.STATE proposed) {
checkStateTransition(name, state, proposed);
Service.STATE oldState = state;
//atomic write of the new state
state = proposed;
return oldState;
}
/**
* Check that a state tansition is valid and
* throw an exception if not
* @param name name of the service (can be null)
* @param state current state
* @param proposed proposed new state
*/
public static void checkStateTransition(String name,
Service.STATE state,
Service.STATE proposed) {
if (!isValidStateTransition(state, proposed)) {
throw new ServiceStateException(name + " cannot enter state "
+ proposed + " from state " + state);
}
}
/**
* Is a state transition valid?
* There are no checks for current==proposed
* as that is considered a non-transition.
*
* using an array kills off all branch misprediction costs, at the expense
* of cache line misses.
*
* @param current current state
* @param proposed proposed new state
* @return true if the transition to a new state is valid
*/
public static boolean isValidStateTransition(Service.STATE current,
Service.STATE proposed) {
boolean[] row = statemap[current.getValue()];
return row[proposed.getValue()];
}
/**
* return the state text as the toString() value
* @return the current state's description
*/
@Override
public String toString() {
return (name.isEmpty() ? "" : ((name) + ": "))
+ state.toString();
}
}
| ServiceStateModel |
java | apache__dubbo | dubbo-test/dubbo-test-spring/src/main/java/org/apache/dubbo/test/spring/SpringAnnotationBeanTest.java | {
"start": 2341,
"end": 2491
} | class ____ {
@Bean
public TestService testService() {
return new TestService();
}
}
static | TestConfiguration |
java | playframework__playframework | core/play/src/main/java/play/http/websocket/Message.java | {
"start": 299,
"end": 426
} | class ____ {
// private constructor to seal it
private Message() {}
/** A text WebSocket message */
public static | Message |
java | apache__kafka | metadata/src/test/java/org/apache/kafka/image/publisher/BrokerRegistrationTrackerTest.java | {
"start": 1641,
"end": 1983
} | class ____ {
static final Uuid INCARNATION_ID = Uuid.fromString("jyjLbk31Tpa53pFrU9Y-Ng");
static final Uuid A = Uuid.fromString("Ahw3vXfnThqeZbb7HD1w6Q");
static final Uuid B = Uuid.fromString("BjOacT0OTNqIvUWIlKhahg");
static final Uuid C = Uuid.fromString("CVHi_iv2Rvy5_1rtPdasfg");
static | BrokerRegistrationTrackerTest |
java | apache__dubbo | dubbo-spring-boot-project/dubbo-spring-boot-actuator/src/main/java/org/apache/dubbo/spring/boot/actuate/endpoint/condition/CompatibleOnEnabledEndpointCondition.java | {
"start": 3285,
"end": 3363
} | class ____",
"",
String.format("No condition | found |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/generated/PartiallyGeneratedComponentTest.java | {
"start": 857,
"end": 1988
} | class ____ {
@Test
public void testPartialComponentGeneration(SessionFactoryScope scope) {
ComponentOwner owner = new ComponentOwner( "initial" );
scope.inTransaction(
s -> s.persist( owner )
);
assertNotNull( owner.getComponent(), "expecting insert value generation" );
int previousValue = owner.getComponent().getGenerated();
assertFalse( 0 == previousValue, "expecting insert value generation" );
ComponentOwner owner2 = scope.fromTransaction(
s -> {
ComponentOwner _owner = s.find( ComponentOwner.class, owner.getId() );
assertEquals( previousValue, _owner.getComponent().getGenerated(), "expecting insert value generation" );
_owner.setName( "subsequent" );
return _owner;
}
);
assertNotNull( owner2.getComponent() );
int previousValue2 = owner2.getComponent().getGenerated();
scope.inTransaction(
s -> {
ComponentOwner _owner = s.find( ComponentOwner.class, owner.getId() );
assertEquals( previousValue2, _owner.getComponent().getGenerated(), "expecting update value generation" );
s.remove( _owner );
}
);
}
}
| PartiallyGeneratedComponentTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/aggregate/LongValueSum.java | {
"start": 989,
"end": 1133
} | class ____ a value aggregator that sums up
* a sequence of long values.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public | implements |
java | google__guava | android/guava-tests/test/com/google/common/collect/ImmutableListMultimapTest.java | {
"start": 2868,
"end": 6298
} | class ____
extends TestStringListMultimapGenerator {
@Override
protected ListMultimap<String, String> create(Entry<String, String>[] entries) {
return ImmutableListMultimap.copyOf(Arrays.asList(entries));
}
}
@J2ktIncompatible
@GwtIncompatible // suite
@AndroidIncompatible // test-suite builders
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTest(
ListMultimapTestSuiteBuilder.using(new ImmutableListMultimapGenerator())
.named("ImmutableListMultimap")
.withFeatures(ALLOWS_ANY_NULL_QUERIES, SERIALIZABLE, KNOWN_ORDER, CollectionSize.ANY)
.createTestSuite());
suite.addTest(
ListMultimapTestSuiteBuilder.using(new ImmutableListMultimapCopyOfEntriesGenerator())
.named("ImmutableListMultimap.copyOf[Iterable<Entry>]")
.withFeatures(ALLOWS_ANY_NULL_QUERIES, SERIALIZABLE, KNOWN_ORDER, CollectionSize.ANY)
.createTestSuite());
suite.addTestSuite(ImmutableListMultimapTest.class);
return suite;
}
public void testBuilderWithExpectedKeysNegative() {
assertThrows(
IllegalArgumentException.class, () -> ImmutableListMultimap.builderWithExpectedKeys(-1));
}
public void testBuilderWithExpectedKeysZero() {
ImmutableListMultimap.Builder<String, String> builder =
ImmutableListMultimap.builderWithExpectedKeys(0);
builder.put("key", "value");
assertThat(builder.build().entries()).containsExactly(Maps.immutableEntry("key", "value"));
}
public void testBuilderWithExpectedKeysPositive() {
ImmutableListMultimap.Builder<String, String> builder =
ImmutableListMultimap.builderWithExpectedKeys(1);
builder.put("key", "value");
assertThat(builder.build().entries()).containsExactly(Maps.immutableEntry("key", "value"));
}
public void testBuilderWithExpectedValuesPerKeyNegative() {
ImmutableListMultimap.Builder<String, String> builder = ImmutableListMultimap.builder();
assertThrows(IllegalArgumentException.class, () -> builder.expectedValuesPerKey(-1));
}
public void testBuilderWithExpectedValuesPerKeyZero() {
ImmutableListMultimap.Builder<String, String> builder =
ImmutableListMultimap.<String, String>builder().expectedValuesPerKey(0);
builder.put("key", "value");
assertThat(builder.build().entries()).containsExactly(Maps.immutableEntry("key", "value"));
}
public void testBuilderWithExpectedValuesPerKeyPositive() {
ImmutableListMultimap.Builder<String, String> builder =
ImmutableListMultimap.<String, String>builder().expectedValuesPerKey(1);
builder.put("key", "value");
assertThat(builder.build().entries()).containsExactly(Maps.immutableEntry("key", "value"));
}
public void testBuilder_withImmutableEntry() {
ImmutableListMultimap<String, Integer> multimap =
new Builder<String, Integer>().put(Maps.immutableEntry("one", 1)).build();
assertEquals(Arrays.asList(1), multimap.get("one"));
}
public void testBuilder_withImmutableEntryAndNullContents() {
Builder<String, Integer> builder = new Builder<>();
assertThrows(
NullPointerException.class, () -> builder.put(Maps.immutableEntry("one", (Integer) null)));
assertThrows(
NullPointerException.class, () -> builder.put(Maps.immutableEntry((String) null, 1)));
}
private static | ImmutableListMultimapCopyOfEntriesGenerator |
java | quarkusio__quarkus | extensions/container-image/container-image-podman/deployment/src/main/java/io/quarkus/container/image/podman/deployment/PodmanProcessor.java | {
"start": 1586,
"end": 8618
} | class ____ extends CommonProcessor<PodmanConfig> {
private static final Logger LOG = Logger.getLogger(PodmanProcessor.class);
private static final String PODMAN = "podman";
static final String PODMAN_CONTAINER_IMAGE_NAME = "podman";
@Override
protected String getProcessorImplementation() {
return PODMAN;
}
@BuildStep
public AvailableContainerImageExtensionBuildItem availability() {
return new AvailableContainerImageExtensionBuildItem(PODMAN);
}
@BuildStep(onlyIf = { IsNormalNotRemoteDev.class, PodmanBuild.class }, onlyIfNot = NativeBuild.class)
public void podmanBuildFromJar(PodmanConfig podmanConfig,
PodmanStatusBuildItem podmanStatusBuildItem,
ContainerImageConfig containerImageConfig,
OutputTargetBuildItem out,
ContainerImageInfoBuildItem containerImageInfo,
@SuppressWarnings("unused") CompiledJavaVersionBuildItem compiledJavaVersion,
Optional<ContainerImageBuildRequestBuildItem> buildRequest,
Optional<ContainerImagePushRequestBuildItem> pushRequest,
@SuppressWarnings("unused") Optional<JvmStartupOptimizerArchiveResultBuildItem> jvmStartupOptimizerArchiveResult, // ensure podman build will be performed after AppCDS creation
BuildProducer<ArtifactResultBuildItem> artifactResultProducer,
BuildProducer<ContainerImageBuilderBuildItem> containerImageBuilder,
PackageConfig packageConfig,
@SuppressWarnings("unused") JarBuildItem jar) {
buildFromJar(podmanConfig, podmanStatusBuildItem, containerImageConfig, out, containerImageInfo, buildRequest,
pushRequest, artifactResultProducer, containerImageBuilder, packageConfig, ContainerRuntime.PODMAN);
}
@BuildStep(onlyIf = { IsNormalNotRemoteDev.class, NativeBuild.class, PodmanBuild.class })
public void podmanBuildFromNativeImage(PodmanConfig podmanConfig,
PodmanStatusBuildItem podmanStatusBuildItem,
ContainerImageConfig containerImageConfig,
ContainerImageInfoBuildItem containerImage,
Optional<ContainerImageBuildRequestBuildItem> buildRequest,
Optional<ContainerImagePushRequestBuildItem> pushRequest,
OutputTargetBuildItem out,
@SuppressWarnings("unused") Optional<UpxCompressedBuildItem> upxCompressed, // used to ensure that we work with the compressed native binary if compression was enabled
BuildProducer<ArtifactResultBuildItem> artifactResultProducer,
BuildProducer<ContainerImageBuilderBuildItem> containerImageBuilder,
PackageConfig packageConfig,
// used to ensure that the native binary has been built
NativeImageBuildItem nativeImage) {
buildFromNativeImage(podmanConfig, podmanStatusBuildItem, containerImageConfig, containerImage,
buildRequest, pushRequest, out, artifactResultProducer, containerImageBuilder, packageConfig, nativeImage,
ContainerRuntime.PODMAN);
}
@Override
protected String createContainerImage(ContainerImageConfig containerImageConfig,
PodmanConfig podmanConfig,
ContainerImageInfoBuildItem containerImageInfo,
OutputTargetBuildItem out,
DockerfilePaths dockerfilePaths,
boolean buildContainerImage,
boolean pushContainerImage,
PackageConfig packageConfig,
String executableName) {
// Following https://developers.redhat.com/articles/2023/11/03/how-build-multi-architecture-container-images#testing_multi_architecture_containers
// If we are building more than 1 platform, then the build needs to happen in 2 separate steps
// 1) podman manifest create <image_name>
// 2) podman build --platform <platforms> --manifest <image_name>
// Then when pushing you push the manifest, not the image:
// podman manifest push <image_name>
var isMultiPlatformBuild = isMultiPlatformBuild(podmanConfig);
var image = containerImageInfo.getImage();
if (isMultiPlatformBuild) {
createManifest(image, executableName);
}
if (buildContainerImage) {
var podmanBuildArgs = getPodmanBuildArgs(image, dockerfilePaths, containerImageConfig, podmanConfig,
isMultiPlatformBuild);
buildImage(containerImageInfo, out, executableName, podmanBuildArgs, true);
}
if (pushContainerImage) {
loginToRegistryIfNeeded(containerImageConfig, containerImageInfo, executableName);
if (isMultiPlatformBuild) {
pushManifests(containerImageInfo, executableName);
} else {
pushImages(containerImageInfo, executableName, podmanConfig);
}
}
return image;
}
@Override
protected String[] createPushArgs(String image, PodmanConfig config) {
return new String[] { "push", image, String.format("--tls-verify=%b", config.tlsVerify()) };
}
private String[] getPodmanBuildArgs(String image,
DockerfilePaths dockerfilePaths,
ContainerImageConfig containerImageConfig,
PodmanConfig podmanConfig,
boolean isMultiPlatformBuild) {
var podmanBuildArgs = getContainerCommonBuildArgs(image, dockerfilePaths, containerImageConfig, podmanConfig,
!isMultiPlatformBuild);
podmanConfig.platform()
.filter(platform -> !platform.isEmpty())
.ifPresent(platform -> {
podmanBuildArgs.addAll(List.of("--platform", String.join(",", platform)));
if (isMultiPlatformBuild) {
podmanBuildArgs.addAll(List.of("--manifest", image));
}
});
podmanBuildArgs.add(dockerfilePaths.dockerExecutionPath().toAbsolutePath().toString());
return podmanBuildArgs.toArray(String[]::new);
}
private void pushManifests(ContainerImageInfoBuildItem containerImageInfo, String executableName) {
Stream.concat(containerImageInfo.getAdditionalImageTags().stream(), Stream.of(containerImageInfo.getImage()))
.forEach(manifestToPush -> pushManifest(manifestToPush, executableName));
}
private void pushManifest(String image, String executableName) {
ProcessBuilder.exec(executableName, "manifest", "push", image);
LOG.infof("Successfully pushed podman manifest %s", image);
}
private void createManifest(String image, String executableName) {
LOG.infof("Running '%s manifest create %s'", executableName, image);
ProcessBuilder.exec(executableName, "manifest", "create", image);
}
private boolean isMultiPlatformBuild(PodmanConfig podmanConfig) {
return podmanConfig.platform()
.map(List::size)
.orElse(0) >= 2;
}
}
| PodmanProcessor |
java | apache__camel | components/camel-google/camel-google-sheets/src/test/java/org/apache/camel/component/google/sheets/SheetsSpreadsheetsIT.java | {
"start": 2408,
"end": 3785
} | class ____ extends AbstractGoogleSheetsTestSupport {
private String title = "camel-sheets-" + new SecureRandom().nextInt(Integer.MAX_VALUE);
@Test
public void test() {
Spreadsheet sheetToCreate = new Spreadsheet();
SpreadsheetProperties sheetProperties = new SpreadsheetProperties();
sheetProperties.setTitle(title);
sheetToCreate.setProperties(sheetProperties);
final Spreadsheet result = requestBody("direct://CREATE", sheetToCreate);
assertNotNull(result, "create result is null");
assertEquals(title, result.getProperties().getTitle());
LOG.debug("create: {}", result);
}
@Override
protected GoogleSheetsClientFactory getClientFactory() throws Exception {
return new MockGoogleSheetsClientFactory(
new MockLowLevelHttpResponse().setContent("{\"properties\":{\"title\":\"" + title + "\"}}"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct://CREATE")
.to("google-sheets://" + PATH_PREFIX + "/create?inBody=content");
}
};
}
}
@Nested
| CreateIT |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/TypeNameShadowingTest.java | {
"start": 7262,
"end": 7415
} | class ____ {}
<T> void f(T t) {}
}
""")
.addOutputLines(
"Foo.java",
"""
| T |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/InterQJournalProtocolServerSideTranslatorPB.java | {
"start": 1751,
"end": 3036
} | class ____ implements
InterQJournalProtocolPB{
/* Server side implementation to delegate the requests to. */
private final InterQJournalProtocol impl;
public InterQJournalProtocolServerSideTranslatorPB(InterQJournalProtocol
impl) {
this.impl = impl;
}
@Override
public GetEditLogManifestResponseProto getEditLogManifestFromJournal(
RpcController controller, GetEditLogManifestRequestProto request)
throws ServiceException {
try {
return impl.getEditLogManifestFromJournal(
request.getJid().getIdentifier(),
request.hasNameServiceId() ? request.getNameServiceId() : null,
request.getSinceTxId(),
request.getInProgressOk());
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public StorageInfoProto getStorageInfo(
RpcController controller, GetStorageInfoRequestProto request)
throws ServiceException {
try {
return impl.getStorageInfo(
request.getJid().getIdentifier(),
request.hasNameServiceId() ? request.getNameServiceId() : null
);
} catch (IOException e) {
throw new ServiceException(e);
}
}
}
| InterQJournalProtocolServerSideTranslatorPB |
java | apache__camel | components/camel-leveldb/src/test/java/org/apache/camel/component/leveldb/LevelDBCustomSerializationTest.java | {
"start": 5225,
"end": 6855
} | class ____ implements Serializable {
private String a;
private byte[] b;
public ObjectWithBinaryField() {
}
public ObjectWithBinaryField(String a, byte[] b) {
this.a = a;
this.b = b;
}
public ObjectWithBinaryField withA(String a) {
this.a = a;
return this;
}
public ObjectWithBinaryField aggregateWith(ObjectWithBinaryField newObject) throws IOException {
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) {
outputStream.write(b);
outputStream.write(newObject.b);
return new ObjectWithBinaryField(a + newObject.a, outputStream.toByteArray());
}
}
@Override
public String toString() {
return "ObjectWithBinaryField{" +
"a='" + a + '\'' +
", b=" + Arrays.toString(b) +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ObjectWithBinaryField that = (ObjectWithBinaryField) o;
return Objects.equals(a, that.a) &&
Arrays.equals(b, that.b);
}
@Override
public int hashCode() {
int result = Objects.hash(a);
result = 31 * result + Arrays.hashCode(b);
return result;
}
}
public static | ObjectWithBinaryField |
java | spring-projects__spring-boot | documentation/spring-boot-docs/src/test/java/org/springframework/boot/docs/howto/actuator/maphealthindicatorstometrics/MetricsHealthMicrometerExportTests.java | {
"start": 2488,
"end": 2805
} | class ____ {
@Bean
MetricsHealthMicrometerExport example() {
return new MetricsHealthMicrometerExport();
}
@Bean
SimpleMeterRegistry simpleMeterRegistry() {
return new SimpleMeterRegistry();
}
@Bean
HealthIndicator outOfService() {
return () -> Health.outOfService().build();
}
}
}
| Config |
java | apache__camel | components/camel-git/src/generated/java/org/apache/camel/component/git/GitEndpointConfigurer.java | {
"start": 730,
"end": 11874
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
GitEndpoint target = (GitEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "allowempty":
case "allowEmpty": target.setAllowEmpty(property(camelContext, boolean.class, value)); return true;
case "backofferrorthreshold":
case "backoffErrorThreshold": target.setBackoffErrorThreshold(property(camelContext, int.class, value)); return true;
case "backoffidlethreshold":
case "backoffIdleThreshold": target.setBackoffIdleThreshold(property(camelContext, int.class, value)); return true;
case "backoffmultiplier":
case "backoffMultiplier": target.setBackoffMultiplier(property(camelContext, int.class, value)); return true;
case "branchname":
case "branchName": target.setBranchName(property(camelContext, java.lang.String.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "delay": target.setDelay(property(camelContext, long.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "gitconfigfile":
case "gitConfigFile": target.setGitConfigFile(property(camelContext, java.lang.String.class, value)); return true;
case "greedy": target.setGreedy(property(camelContext, boolean.class, value)); return true;
case "initialdelay":
case "initialDelay": target.setInitialDelay(property(camelContext, long.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "operation": target.setOperation(property(camelContext, java.lang.String.class, value)); return true;
case "password": target.setPassword(property(camelContext, java.lang.String.class, value)); return true;
case "pollstrategy":
case "pollStrategy": target.setPollStrategy(property(camelContext, org.apache.camel.spi.PollingConsumerPollStrategy.class, value)); return true;
case "remotename":
case "remoteName": target.setRemoteName(property(camelContext, java.lang.String.class, value)); return true;
case "remotepath":
case "remotePath": target.setRemotePath(property(camelContext, java.lang.String.class, value)); return true;
case "repeatcount":
case "repeatCount": target.setRepeatCount(property(camelContext, long.class, value)); return true;
case "runlogginglevel":
case "runLoggingLevel": target.setRunLoggingLevel(property(camelContext, org.apache.camel.LoggingLevel.class, value)); return true;
case "scheduledexecutorservice":
case "scheduledExecutorService": target.setScheduledExecutorService(property(camelContext, java.util.concurrent.ScheduledExecutorService.class, value)); return true;
case "scheduler": target.setScheduler(property(camelContext, java.lang.Object.class, value)); return true;
case "schedulerproperties":
case "schedulerProperties": target.setSchedulerProperties(property(camelContext, java.util.Map.class, value)); return true;
case "sendemptymessagewhenidle":
case "sendEmptyMessageWhenIdle": target.setSendEmptyMessageWhenIdle(property(camelContext, boolean.class, value)); return true;
case "startscheduler":
case "startScheduler": target.setStartScheduler(property(camelContext, boolean.class, value)); return true;
case "tagname":
case "tagName": target.setTagName(property(camelContext, java.lang.String.class, value)); return true;
case "targetbranchname":
case "targetBranchName": target.setTargetBranchName(property(camelContext, java.lang.String.class, value)); return true;
case "timeunit":
case "timeUnit": target.setTimeUnit(property(camelContext, java.util.concurrent.TimeUnit.class, value)); return true;
case "type": target.setType(property(camelContext, org.apache.camel.component.git.consumer.GitType.class, value)); return true;
case "usefixeddelay":
case "useFixedDelay": target.setUseFixedDelay(property(camelContext, boolean.class, value)); return true;
case "username": target.setUsername(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "allowempty":
case "allowEmpty": return boolean.class;
case "backofferrorthreshold":
case "backoffErrorThreshold": return int.class;
case "backoffidlethreshold":
case "backoffIdleThreshold": return int.class;
case "backoffmultiplier":
case "backoffMultiplier": return int.class;
case "branchname":
case "branchName": return java.lang.String.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "delay": return long.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "gitconfigfile":
case "gitConfigFile": return java.lang.String.class;
case "greedy": return boolean.class;
case "initialdelay":
case "initialDelay": return long.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "operation": return java.lang.String.class;
case "password": return java.lang.String.class;
case "pollstrategy":
case "pollStrategy": return org.apache.camel.spi.PollingConsumerPollStrategy.class;
case "remotename":
case "remoteName": return java.lang.String.class;
case "remotepath":
case "remotePath": return java.lang.String.class;
case "repeatcount":
case "repeatCount": return long.class;
case "runlogginglevel":
case "runLoggingLevel": return org.apache.camel.LoggingLevel.class;
case "scheduledexecutorservice":
case "scheduledExecutorService": return java.util.concurrent.ScheduledExecutorService.class;
case "scheduler": return java.lang.Object.class;
case "schedulerproperties":
case "schedulerProperties": return java.util.Map.class;
case "sendemptymessagewhenidle":
case "sendEmptyMessageWhenIdle": return boolean.class;
case "startscheduler":
case "startScheduler": return boolean.class;
case "tagname":
case "tagName": return java.lang.String.class;
case "targetbranchname":
case "targetBranchName": return java.lang.String.class;
case "timeunit":
case "timeUnit": return java.util.concurrent.TimeUnit.class;
case "type": return org.apache.camel.component.git.consumer.GitType.class;
case "usefixeddelay":
case "useFixedDelay": return boolean.class;
case "username": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
GitEndpoint target = (GitEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "allowempty":
case "allowEmpty": return target.isAllowEmpty();
case "backofferrorthreshold":
case "backoffErrorThreshold": return target.getBackoffErrorThreshold();
case "backoffidlethreshold":
case "backoffIdleThreshold": return target.getBackoffIdleThreshold();
case "backoffmultiplier":
case "backoffMultiplier": return target.getBackoffMultiplier();
case "branchname":
case "branchName": return target.getBranchName();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "delay": return target.getDelay();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "gitconfigfile":
case "gitConfigFile": return target.getGitConfigFile();
case "greedy": return target.isGreedy();
case "initialdelay":
case "initialDelay": return target.getInitialDelay();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "operation": return target.getOperation();
case "password": return target.getPassword();
case "pollstrategy":
case "pollStrategy": return target.getPollStrategy();
case "remotename":
case "remoteName": return target.getRemoteName();
case "remotepath":
case "remotePath": return target.getRemotePath();
case "repeatcount":
case "repeatCount": return target.getRepeatCount();
case "runlogginglevel":
case "runLoggingLevel": return target.getRunLoggingLevel();
case "scheduledexecutorservice":
case "scheduledExecutorService": return target.getScheduledExecutorService();
case "scheduler": return target.getScheduler();
case "schedulerproperties":
case "schedulerProperties": return target.getSchedulerProperties();
case "sendemptymessagewhenidle":
case "sendEmptyMessageWhenIdle": return target.isSendEmptyMessageWhenIdle();
case "startscheduler":
case "startScheduler": return target.isStartScheduler();
case "tagname":
case "tagName": return target.getTagName();
case "targetbranchname":
case "targetBranchName": return target.getTargetBranchName();
case "timeunit":
case "timeUnit": return target.getTimeUnit();
case "type": return target.getType();
case "usefixeddelay":
case "useFixedDelay": return target.isUseFixedDelay();
case "username": return target.getUsername();
default: return null;
}
}
@Override
public Object getCollectionValueType(Object target, String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "schedulerproperties":
case "schedulerProperties": return java.lang.Object.class;
default: return null;
}
}
}
| GitEndpointConfigurer |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceKey.java | {
"start": 588,
"end": 2280
} | class ____ implements Accountable, Comparable<SequenceKey> {
private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(SequenceKey.class);
public static final SequenceKey NONE = new SequenceKey();
private final Object[] keys;
private final int hashCode;
public SequenceKey(Object... keys) {
this.keys = keys;
this.hashCode = Objects.hash(keys);
}
public List<Object> asList() {
return keys == null ? emptyList() : Arrays.asList(keys);
}
@Override
public long ramBytesUsed() {
return SHALLOW_SIZE + RamUsageEstimator.sizeOfObject(keys);
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
SequenceKey other = (SequenceKey) obj;
return Arrays.equals(keys, other.keys);
}
@Override
public String toString() {
return CollectionUtils.isEmpty(keys) ? "NONE" : Arrays.toString(keys);
}
@Override
@SuppressWarnings({ "unchecked", "rawtypes" })
public int compareTo(SequenceKey other) {
int length = keys.length;
int otherLength = other.keys.length;
for (int i = 0; i < length && i < otherLength; i++) {
if (keys[i] instanceof Comparable key) {
int result = key.compareTo(other.keys[i]);
if (result != 0) {
return result;
}
}
}
return length - otherLength;
}
}
| SequenceKey |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/bean/override/example/IntegerExampleGenericService.java | {
"start": 829,
"end": 969
} | class ____ implements ExampleGenericService<Integer> {
@Override
public Integer greeting() {
return 123;
}
}
| IntegerExampleGenericService |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/AutoValueBoxedValues.java | {
"start": 3909,
"end": 4005
} | class ____ with the fixes
* to be applied.
*
* @param classTree The {@link AutoValue} | along |
java | apache__kafka | clients/src/test/java/org/apache/kafka/test/NoRetryException.java | {
"start": 1135,
"end": 1386
} | class ____ extends RuntimeException {
private final Throwable cause;
public NoRetryException(Throwable cause) {
this.cause = cause;
}
@Override
public Throwable getCause() {
return this.cause;
}
}
| NoRetryException |
java | google__guava | android/guava/src/com/google/common/collect/CartesianList.java | {
"start": 1141,
"end": 4807
} | class ____<E> extends AbstractList<List<E>> implements RandomAccess {
private final transient ImmutableList<List<E>> axes;
private final transient int[] axesSizeProduct;
static <E> List<List<E>> create(List<? extends List<? extends E>> lists) {
ImmutableList.Builder<List<E>> axesBuilder = new ImmutableList.Builder<>(lists.size());
for (List<? extends E> list : lists) {
List<E> copy = ImmutableList.copyOf(list);
if (copy.isEmpty()) {
return ImmutableList.of();
}
axesBuilder.add(copy);
}
return new CartesianList<>(axesBuilder.build());
}
CartesianList(ImmutableList<List<E>> axes) {
this.axes = axes;
int[] axesSizeProduct = new int[axes.size() + 1];
axesSizeProduct[axes.size()] = 1;
try {
for (int i = axes.size() - 1; i >= 0; i--) {
axesSizeProduct[i] = Math.multiplyExact(axesSizeProduct[i + 1], axes.get(i).size());
}
} catch (ArithmeticException e) {
throw new IllegalArgumentException(
"Cartesian product too large; must have size at most Integer.MAX_VALUE");
}
this.axesSizeProduct = axesSizeProduct;
}
private int getAxisIndexForProductIndex(int index, int axis) {
return (index / axesSizeProduct[axis + 1]) % axes.get(axis).size();
}
@Override
public int indexOf(@Nullable Object o) {
if (!(o instanceof List)) {
return -1;
}
List<?> list = (List<?>) o;
if (list.size() != axes.size()) {
return -1;
}
ListIterator<?> itr = list.listIterator();
int computedIndex = 0;
while (itr.hasNext()) {
int axisIndex = itr.nextIndex();
int elemIndex = axes.get(axisIndex).indexOf(itr.next());
if (elemIndex == -1) {
return -1;
}
computedIndex += elemIndex * axesSizeProduct[axisIndex + 1];
}
return computedIndex;
}
@Override
public int lastIndexOf(@Nullable Object o) {
if (!(o instanceof List)) {
return -1;
}
List<?> list = (List<?>) o;
if (list.size() != axes.size()) {
return -1;
}
ListIterator<?> itr = list.listIterator();
int computedIndex = 0;
while (itr.hasNext()) {
int axisIndex = itr.nextIndex();
int elemIndex = axes.get(axisIndex).lastIndexOf(itr.next());
if (elemIndex == -1) {
return -1;
}
computedIndex += elemIndex * axesSizeProduct[axisIndex + 1];
}
return computedIndex;
}
@Override
public ImmutableList<E> get(int index) {
checkElementIndex(index, size());
return new ImmutableList<E>() {
@Override
public int size() {
return axes.size();
}
@Override
public E get(int axis) {
checkElementIndex(axis, size());
int axisIndex = getAxisIndexForProductIndex(index, axis);
return axes.get(axis).get(axisIndex);
}
@Override
boolean isPartialView() {
return true;
}
// redeclare to help optimizers with b/310253115
@SuppressWarnings("RedundantOverride")
@J2ktIncompatible // serialization
@Override
@GwtIncompatible // serialization
Object writeReplace() {
return super.writeReplace();
}
};
}
@Override
public int size() {
return axesSizeProduct[0];
}
@Override
public boolean contains(@Nullable Object object) {
if (!(object instanceof List)) {
return false;
}
List<?> list = (List<?>) object;
if (list.size() != axes.size()) {
return false;
}
int i = 0;
for (Object o : list) {
if (!axes.get(i).contains(o)) {
return false;
}
i++;
}
return true;
}
}
| CartesianList |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/tableperclass/Component.java | {
"start": 644,
"end": 1278
} | class ____ {
private String manufacturerPartNumber;
private Long manufacturerId;
private Long id;
public void setId(Long id) {
this.id = id;
}
@Id
public Long getId() {
return id;
}
@Column(nullable = false)
public String getManufacturerPartNumber() {
return manufacturerPartNumber;
}
@Column(nullable = false)
public Long getManufacturerId() {
return manufacturerId;
}
public void setManufacturerId(Long manufacturerId) {
this.manufacturerId = manufacturerId;
}
public void setManufacturerPartNumber(String manufacturerPartNumber) {
this.manufacturerPartNumber = manufacturerPartNumber;
}
}
| Component |
java | quarkusio__quarkus | extensions/panache/mongodb-rest-data-panache/deployment/src/main/java/io/quarkus/mongodb/rest/data/panache/deployment/EntityClassHelper.java | {
"start": 411,
"end": 2260
} | class ____ {
private static final DotName OBJECT_ID = DotName.createSimple(ObjectId.class.getName());
private static final DotName BSON_ID_ANNOTATION = DotName.createSimple(BsonId.class.getName());
private final IndexView index;
public EntityClassHelper(IndexView index) {
this.index = index;
}
public FieldInfo getIdField(String className) {
return getIdField(index.getClassByName(DotName.createSimple(className)));
}
private FieldInfo getIdField(ClassInfo classInfo) {
ClassInfo tmpClassInfo = classInfo;
while (tmpClassInfo != null) {
for (FieldInfo field : tmpClassInfo.fields()) {
if (field.type().name().equals(OBJECT_ID) || field.hasAnnotation(BSON_ID_ANNOTATION)) {
return field;
}
}
if (tmpClassInfo.superName() != null) {
tmpClassInfo = index.getClassByName(tmpClassInfo.superName());
} else {
tmpClassInfo = null;
}
}
throw new IllegalArgumentException("Couldn't find id field of " + classInfo);
}
public MethodDescriptor getSetter(String className, FieldInfo field) {
return getSetter(index.getClassByName(DotName.createSimple(className)), field);
}
private MethodDescriptor getSetter(ClassInfo entityClass, FieldInfo field) {
if (entityClass == null) {
return null;
}
MethodInfo methodInfo = entityClass.method(JavaBeanUtil.getSetterName(field.name()), field.type());
if (methodInfo != null) {
return MethodDescriptor.of(methodInfo);
} else if (entityClass.superName() != null) {
return getSetter(index.getClassByName(entityClass.superName()), field);
}
return null;
}
}
| EntityClassHelper |
java | dropwizard__dropwizard | dropwizard-jersey/src/test/java/io/dropwizard/jersey/jsr310/OffsetDateTimeParamTest.java | {
"start": 196,
"end": 516
} | class ____ {
@Test
void parsesDateTimes() throws Exception {
final OffsetDateTimeParam param = new OffsetDateTimeParam("2012-11-19T13:37+01:00");
assertThat(param.get())
.isEqualTo(OffsetDateTime.of(2012, 11, 19, 13, 37, 0, 0, ZoneOffset.ofHours(1)));
}
}
| OffsetDateTimeParamTest |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/GenericTypeResolverTests.java | {
"start": 14419,
"end": 14522
} | class ____ {
public void nestedGenerics(List<Map<String, Integer>> input) {
}
}
| WithMethodParameter |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/executiongraph/failover/partitionrelease/PartitionGroupReleaseStrategyFactoryLoaderTest.java | {
"start": 1160,
"end": 2184
} | class ____ {
@Test
public void featureEnabledByDefault() {
final Configuration emptyConfiguration = new Configuration();
final PartitionGroupReleaseStrategy.Factory factory =
PartitionGroupReleaseStrategyFactoryLoader.loadPartitionGroupReleaseStrategyFactory(
emptyConfiguration);
assertThat(factory).isInstanceOf(RegionPartitionGroupReleaseStrategy.Factory.class);
}
@Test
public void featureCanBeDisabled() {
final Configuration emptyConfiguration = new Configuration();
emptyConfiguration.set(JobManagerOptions.PARTITION_RELEASE_DURING_JOB_EXECUTION, false);
final PartitionGroupReleaseStrategy.Factory factory =
PartitionGroupReleaseStrategyFactoryLoader.loadPartitionGroupReleaseStrategyFactory(
emptyConfiguration);
assertThat(factory).isInstanceOf(NotReleasingPartitionGroupReleaseStrategy.Factory.class);
}
}
| PartitionGroupReleaseStrategyFactoryLoaderTest |
java | apache__dubbo | dubbo-registry/dubbo-registry-api/src/test/java/org/apache/dubbo/registry/RegistryServiceListener1.java | {
"start": 978,
"end": 1593
} | class ____ implements RegistryServiceListener {
static RegistryServiceListener delegate;
@Override
public void onRegister(URL url, Registry registry) {
delegate.onRegister(url, registry);
}
@Override
public void onUnregister(URL url, Registry registry) {
delegate.onUnregister(url, registry);
}
@Override
public void onSubscribe(URL url, Registry registry) {
delegate.onSubscribe(url, registry);
}
@Override
public void onUnsubscribe(URL url, Registry registry) {
delegate.onUnsubscribe(url, registry);
}
}
| RegistryServiceListener1 |
java | redisson__redisson | redisson/src/main/java/org/redisson/RedissonSemaphore.java | {
"start": 1588,
"end": 20253
} | class ____ extends RedissonExpirable implements RSemaphore {
private static final Logger LOGGER = LoggerFactory.getLogger(RedissonSemaphore.class);
private final SemaphorePubSub semaphorePubSub;
public RedissonSemaphore(CommandAsyncExecutor commandExecutor, String name) {
super(commandExecutor, name);
this.semaphorePubSub = getSubscribeService().getSemaphorePubSub();
}
String getChannelName() {
return getChannelName(getRawName());
}
public static String getChannelName(String name) {
return prefixName("redisson_sc", name);
}
@Override
public void acquire() throws InterruptedException {
acquire(1);
}
@Override
public void acquire(int permits) throws InterruptedException {
if (tryAcquire(permits)) {
return;
}
CompletableFuture<RedissonLockEntry> future = subscribe();
semaphorePubSub.timeout(future);
RedissonLockEntry entry = commandExecutor.getInterrupted(future);
try {
while (true) {
if (tryAcquire(permits)) {
return;
}
entry.getLatch().acquire();
}
} finally {
unsubscribe(entry);
}
// get(acquireAsync(permits));
}
@Override
public RFuture<Void> acquireAsync() {
return acquireAsync(1);
}
@Override
public RFuture<Void> acquireAsync(int permits) {
CompletableFuture<Void> result = new CompletableFuture<>();
RFuture<Boolean> tryAcquireFuture = tryAcquireAsync(permits);
tryAcquireFuture.whenComplete((res, e) -> {
if (e != null) {
result.completeExceptionally(e);
return;
}
if (res) {
if (!result.complete(null)) {
releaseAsync(permits);
}
return;
}
CompletableFuture<RedissonLockEntry> subscribeFuture = subscribe();
semaphorePubSub.timeout(subscribeFuture);
subscribeFuture.whenComplete((r, e1) -> {
if (e1 != null) {
result.completeExceptionally(e1);
return;
}
acquireAsync(permits, r, result);
});
});
return new CompletableFutureWrapper<>(result);
}
private void tryAcquireAsync(AtomicLong time, int permits, RedissonLockEntry entry, CompletableFuture<Boolean> result) {
if (result.isDone()) {
unsubscribe(entry);
return;
}
if (time.get() <= 0) {
unsubscribe(entry);
result.complete(false);
return;
}
long curr = System.currentTimeMillis();
RFuture<Boolean> tryAcquireFuture = tryAcquireAsync(permits);
tryAcquireFuture.whenComplete((res, e) -> {
if (e != null) {
unsubscribe(entry);
result.completeExceptionally(e);
return;
}
if (res) {
unsubscribe(entry);
if (!result.complete(true)) {
releaseAsync(permits);
}
return;
}
long el = System.currentTimeMillis() - curr;
time.addAndGet(-el);
if (time.get() <= 0) {
unsubscribe(entry);
result.complete(false);
return;
}
// waiting for message
long current = System.currentTimeMillis();
if (entry.getLatch().tryAcquire()) {
tryAcquireAsync(time, permits, entry, result);
} else {
AtomicBoolean executed = new AtomicBoolean();
AtomicReference<Timeout> futureRef = new AtomicReference<>();
Runnable listener = () -> {
executed.set(true);
if (futureRef.get() != null && !futureRef.get().cancel()) {
entry.getLatch().release();
return;
}
long elapsed = System.currentTimeMillis() - current;
time.addAndGet(-elapsed);
tryAcquireAsync(time, permits, entry, result);
};
entry.addListener(listener);
long t = time.get();
if (!executed.get()) {
Timeout scheduledFuture = commandExecutor.getServiceManager().newTimeout(new TimerTask() {
@Override
public void run(Timeout timeout) throws Exception {
if (entry.removeListener(listener)) {
long elapsed = System.currentTimeMillis() - current;
time.addAndGet(-elapsed);
tryAcquireAsync(time, permits, entry, result);
}
}
}, t, TimeUnit.MILLISECONDS);
futureRef.set(scheduledFuture);
}
}
});
}
private void acquireAsync(int permits, RedissonLockEntry entry, CompletableFuture<Void> result) {
if (result.isDone()) {
unsubscribe(entry);
return;
}
RFuture<Boolean> tryAcquireFuture = tryAcquireAsync(permits);
tryAcquireFuture.whenComplete((res, e) -> {
if (e != null) {
unsubscribe(entry);
result.completeExceptionally(e);
return;
}
if (res) {
unsubscribe(entry);
if (!result.complete(null)) {
releaseAsync(permits);
}
return;
}
if (entry.getLatch().tryAcquire()) {
acquireAsync(permits, entry, result);
} else {
entry.addListener(() -> {
acquireAsync(permits, entry, result);
});
}
});
}
@Override
public boolean tryAcquire() {
return tryAcquire(1);
}
@Override
public boolean tryAcquire(int permits) {
return get(tryAcquireAsync(permits));
}
@Override
public RFuture<Boolean> tryAcquireAsync() {
return tryAcquireAsync(1);
}
@Override
public RFuture<Boolean> tryAcquireAsync(int permits) {
if (permits < 0) {
throw new IllegalArgumentException("Permits amount can't be negative");
}
if (permits == 0) {
return new CompletableFutureWrapper<>(true);
}
return commandExecutor.getServiceManager().execute(() -> {
RFuture<Boolean> future = tryAcquireAsync0(permits);
return commandExecutor.handleNoSync(future, e -> releaseAsync(permits));
});
}
private RFuture<Boolean> tryAcquireAsync0(int permits) {
return commandExecutor.syncedEvalNoRetry(getRawName(), LongCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN,
"local value = redis.call('get', KEYS[1]); " +
"if (value ~= false and tonumber(value) >= tonumber(ARGV[1])) then " +
"local val = redis.call('decrby', KEYS[1], ARGV[1]); " +
"return 1; " +
"end; " +
"return 0;",
Collections.<Object>singletonList(getRawName()), permits);
}
@Override
public RFuture<Boolean> tryAcquireAsync(long waitTime, TimeUnit unit) {
return tryAcquireAsync(1, waitTime, unit);
}
@Override
public boolean tryAcquire(Duration waitTime) throws InterruptedException {
return tryAcquire(1, waitTime);
}
@Override
public boolean tryAcquire(int permits, Duration waitTime) throws InterruptedException {
LOGGER.debug("trying to acquire, permits: {}, waitTime: {}, name: {}", permits, waitTime, getName());
long time = waitTime.toMillis();
long current = System.currentTimeMillis();
if (tryAcquire(permits)) {
LOGGER.debug("acquired, permits: {}, waitTime: {}, name: {}", permits, waitTime, getName());
return true;
}
time -= System.currentTimeMillis() - current;
if (time <= 0) {
LOGGER.debug("unable to acquire, permits: {}, name: {}", permits, getName());
return false;
}
current = System.currentTimeMillis();
CompletableFuture<RedissonLockEntry> future = subscribe();
RedissonLockEntry entry;
try {
entry = future.get(time, TimeUnit.MILLISECONDS);
} catch (ExecutionException e) {
LOGGER.error(e.getMessage(), e);
return false;
} catch (TimeoutException | CancellationException e) {
LOGGER.debug("unable to subscribe for permits acquisition, permits: {}, name: {}", permits, getName());
return false;
}
try {
time -= System.currentTimeMillis() - current;
if (time <= 0) {
LOGGER.debug("unable to acquire, permits: {}, name: {}", permits, getName());
return false;
}
while (true) {
current = System.currentTimeMillis();
if (tryAcquire(permits)) {
LOGGER.debug("acquired, permits: {}, wait-time: {}, name: {}", permits, waitTime, getName());
return true;
}
time -= System.currentTimeMillis() - current;
if (time <= 0) {
LOGGER.debug("unable to acquire, permits: {}, name: {}", permits, getName());
return false;
}
// waiting for message
current = System.currentTimeMillis();
LOGGER.debug("wait for acquisition, permits: {}, wait-time(ms): {}, name: {}", permits, time, getName());
entry.getLatch().tryAcquire(time, TimeUnit.MILLISECONDS);
time -= System.currentTimeMillis() - current;
if (time <= 0) {
LOGGER.debug("unable to acquire, permits: {}, name: {}", permits, getName());
return false;
}
}
} finally {
unsubscribe(entry);
}
// return get(tryAcquireAsync(permits, waitTime));
}
@Override
public RFuture<Boolean> tryAcquireAsync(Duration waitTime) {
return tryAcquireAsync(1, waitTime);
}
@Override
public RFuture<Boolean> tryAcquireAsync(int permits, Duration waitTime) {
CompletableFuture<Boolean> result = new CompletableFuture<>();
AtomicLong time = new AtomicLong(waitTime.toMillis());
long curr = System.currentTimeMillis();
RFuture<Boolean> tryAcquireFuture = tryAcquireAsync(permits);
tryAcquireFuture.whenComplete((res, e) -> {
if (e != null) {
result.completeExceptionally(e);
return;
}
if (res) {
if (!result.complete(true)) {
releaseAsync(permits);
}
return;
}
long elap = System.currentTimeMillis() - curr;
time.addAndGet(-elap);
if (time.get() <= 0) {
result.complete(false);
return;
}
long current = System.currentTimeMillis();
CompletableFuture<RedissonLockEntry> subscribeFuture = subscribe();
semaphorePubSub.timeout(subscribeFuture, time.get());
subscribeFuture.whenComplete((r, ex) -> {
if (ex != null) {
result.completeExceptionally(ex);
return;
}
long elapsed = System.currentTimeMillis() - current;
time.addAndGet(-elapsed);
if (time.get() < 0) {
unsubscribe(r);
result.complete(false);
return;
}
tryAcquireAsync(time, permits, r, result);
});
});
return new CompletableFutureWrapper<>(result);
}
@Override
public boolean tryAcquire(int permits, long waitTime, TimeUnit unit) throws InterruptedException {
return tryAcquire(permits, Duration.ofMillis(unit.toMillis(waitTime)));
}
@Override
public RFuture<Boolean> tryAcquireAsync(int permits, long waitTime, TimeUnit unit) {
return tryAcquireAsync(permits, Duration.ofMillis(unit.toMillis(waitTime)));
}
private CompletableFuture<RedissonLockEntry> subscribe() {
return semaphorePubSub.subscribe(getRawName(), getChannelName());
}
private void unsubscribe(RedissonLockEntry entry) {
semaphorePubSub.unsubscribe(entry, getRawName(), getChannelName());
}
@Override
public boolean tryAcquire(long time, TimeUnit unit) throws InterruptedException {
return tryAcquire(1, time, unit);
}
@Override
public void release() {
release(1);
}
@Override
public void release(int permits) {
get(releaseAsync(permits));
}
@Override
public RFuture<Void> releaseAsync() {
return releaseAsync(1);
}
@Override
public RFuture<Void> releaseAsync(int permits) {
if (permits < 0) {
throw new IllegalArgumentException("Permits amount can't be negative");
}
if (permits == 0) {
return new CompletableFutureWrapper<>((Void) null);
}
RFuture<Void> future = commandExecutor.syncedEvalNoRetry(getRawName(), StringCodec.INSTANCE, RedisCommands.EVAL_VOID,
"local value = redis.call('incrby', KEYS[1], ARGV[1]); " +
"redis.call(ARGV[2], KEYS[2], value); ",
Arrays.asList(getRawName(), getChannelName()), permits, getSubscribeService().getPublishCommand());
if (LOGGER.isDebugEnabled()) {
future.thenAccept(o -> {
LOGGER.debug("released, permits: {}, name: {}", permits, getName());
});
}
return future;
}
@Override
public int drainPermits() {
return get(drainPermitsAsync());
}
@Override
public RFuture<Integer> drainPermitsAsync() {
return commandExecutor.syncedEvalWithRetry(getRawName(), LongCodec.INSTANCE, RedisCommands.EVAL_INTEGER,
"local value = redis.call('get', KEYS[1]); " +
"if (value == false) then " +
"return 0; " +
"end; " +
"redis.call('set', KEYS[1], 0); " +
"return value;",
Collections.singletonList(getRawName()));
}
@Override
public int availablePermits() {
return get(availablePermitsAsync());
}
@Override
public RFuture<Integer> availablePermitsAsync() {
return commandExecutor.writeAsync(getRawName(), LongCodec.INSTANCE, RedisCommands.GET_INTEGER, getRawName());
}
@Override
public boolean trySetPermits(int permits) {
return get(trySetPermitsAsync(permits));
}
@Override
public RFuture<Boolean> trySetPermitsAsync(int permits) {
RFuture<Boolean> future = commandExecutor.syncedEvalWithRetry(getRawName(), LongCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN,
"local value = redis.call('get', KEYS[1]); " +
"if (value == false) then "
+ "redis.call('set', KEYS[1], ARGV[1]); "
+ "redis.call(ARGV[2], KEYS[2], ARGV[1]); "
+ "return 1;"
+ "end;"
+ "return 0;",
Arrays.asList(getRawName(), getChannelName()),
permits, getSubscribeService().getPublishCommand());
if (LOGGER.isDebugEnabled()) {
future.thenAccept(r -> {
if (r) {
LOGGER.debug("permits set, permits: {}, name: {}", permits, getName());
} else {
LOGGER.debug("unable to set permits, permits: {}, name: {}", permits, getName());
}
});
}
return future;
}
@Override
public boolean trySetPermits(int permits, Duration timeToLive) {
return get(trySetPermitsAsync(permits, timeToLive));
}
@Override
public RFuture<Boolean> trySetPermitsAsync(int permits, Duration timeToLive) {
RFuture<Boolean> future = commandExecutor.syncedEvalWithRetry(getRawName(), LongCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN,
"local value = redis.call('get', KEYS[1]); " +
"if (value == false) then "
+ "redis.call('set', KEYS[1], ARGV[1], 'px', ARGV[3]); "
+ "redis.call(ARGV[2], KEYS[2], ARGV[1]); "
+ "return 1;"
+ "end;"
+ "return 0;",
Arrays.asList(getRawName(), getChannelName()),
permits, getSubscribeService().getPublishCommand(), timeToLive.toMillis());
if (LOGGER.isDebugEnabled()) {
future.thenAccept(r -> {
if (r) {
LOGGER.debug("permits set, permits: {}, name: {}", permits, getName());
} else {
LOGGER.debug("unable to set permits, permits: {}, name: {}", permits, getName());
}
});
}
return future;
}
@Override
public void addPermits(int permits) {
get(addPermitsAsync(permits));
}
@Override
public RFuture<Void> addPermitsAsync(int permits) {
return commandExecutor.syncedEvalWithRetry(getRawName(), LongCodec.INSTANCE, RedisCommands.EVAL_VOID,
"local value = redis.call('get', KEYS[1]); " +
"if (value == false) then "
+ "value = 0;"
+ "end;"
+ "redis.call('set', KEYS[1], value + ARGV[1]); "
+ "redis.call(ARGV[2], KEYS[2], value + ARGV[1]); ",
Arrays.asList(getRawName(), getChannelName()),
permits, getSubscribeService().getPublishCommand());
}
}
| RedissonSemaphore |
java | apache__camel | components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/fixed/headerfooter/OrderFooter.java | {
"start": 1027,
"end": 1633
} | class ____ {
@DataField(pos = 1, length = 1)
private int recordType = 9;
@DataField(pos = 2, length = 9, align = "R", paddingChar = '0')
private int numberOfRecordsInTheFile;
public int getRecordType() {
return recordType;
}
public void setRecordType(int recordType) {
this.recordType = recordType;
}
public int getNumberOfRecordsInTheFile() {
return numberOfRecordsInTheFile;
}
public void setNumberOfRecordsInTheFile(int numberOfRecordsInTheFile) {
this.numberOfRecordsInTheFile = numberOfRecordsInTheFile;
}
}
| OrderFooter |
java | dropwizard__dropwizard | dropwizard-unix-socket/src/main/java/io/dropwizard/unixsocket/UnixSocketConnectorFactory.java | {
"start": 1271,
"end": 3744
} | class ____ extends HttpConnectorFactory {
private static final Logger LOGGER = LoggerFactory.getLogger(UnixSocketConnectorFactory.class);
private String path = "/tmp/dropwizard.sock";
private boolean deleteSocketFileOnStartup;
@JsonProperty
public String getPath() {
return path;
}
@JsonProperty
public void setPath(String path) {
this.path = path;
}
@JsonProperty
public boolean isDeleteSocketFileOnStartup() {
return deleteSocketFileOnStartup;
}
@JsonProperty
public void setDeleteSocketFileOnStartup(boolean deleteSocketFileOnStartup) {
this.deleteSocketFileOnStartup = deleteSocketFileOnStartup;
}
@Override
public Connector build(Server server,
MetricRegistry metrics,
String name,
@Nullable ThreadPool threadPool) {
var scheduler = new ScheduledExecutorScheduler();
var bufferPool = buildBufferPool();
var httpConfig = buildHttpConfiguration();
var httpConnectionFactory = buildHttpConnectionFactory(httpConfig);
var instrumentedConnectionFactory = new InstrumentedConnectionFactory(httpConnectionFactory,
metrics.timer(httpConnections()));
final UnixDomainServerConnector connector = new UnixDomainServerConnector(server,
threadPool,
scheduler,
bufferPool,
getAcceptorThreads().orElse(-1),
getSelectorThreads().orElse(-1),
instrumentedConnectionFactory);
if (getAcceptQueueSize() != null) {
connector.setAcceptQueueSize(getAcceptQueueSize());
}
var unixDomainPath = Paths.get(path);
connector.setUnixDomainPath(unixDomainPath);
connector.setIdleTimeout(getIdleTimeout().toMilliseconds());
connector.setName(name);
if (deleteSocketFileOnStartup) {
// in case there is a leftover domain socket due to ungraceful stop, try to delete it first.
try {
Files.deleteIfExists(unixDomainPath);
} catch (IOException e) {
LOGGER.warn("Failed to delete existing unix domain socket file at {}.", path);
}
}
return connector;
}
@Override
protected String httpConnections() {
return name(UnixSocketConnectorFactory.class, path, "connections");
}
}
| UnixSocketConnectorFactory |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/issues/PopulateInitialHeadersFailedIssueTest.java | {
"start": 1206,
"end": 2447
} | class ____ extends ContextTestSupport {
@Test
public void testPopulateInitialHeadersFailed() throws Exception {
Exchange exchange = DefaultExchange.newFromEndpoint(context.getEndpoint("seda:start"));
exchange.setPattern(ExchangePattern.InOut);
MyFaultMessage msg = new MyFaultMessage(exchange);
exchange.setMessage(msg);
msg.setBody("Hello World");
getMockEndpoint("mock:result").expectedMessageCount(0);
template.send("seda:start", exchange);
assertMockEndpointsSatisfied();
IllegalArgumentException iae = assertIsInstanceOf(IllegalArgumentException.class, exchange.getException());
Assertions.assertEquals("Forced headers error", iae.getMessage());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// enable redelivery which forces copy defensive headers
errorHandler(defaultErrorHandler().maximumRedeliveries(3).redeliveryDelay(0));
from("seda:start")
.to("mock:result");
}
};
}
private static | PopulateInitialHeadersFailedIssueTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java | {
"start": 2483,
"end": 11367
} | class ____ {
private static TemporarySocketDirectory sockDir;
@BeforeAll
public static void init() {
sockDir = new TemporarySocketDirectory();
DomainSocket.disableBindPathValidation();
}
@AfterAll
public static void shutdown() throws IOException {
sockDir.close();
}
@BeforeEach
public void before() {
assumeTrue(DomainSocket.getLoadingFailureReason() == null);
}
/**
* Test that we can create a socket and close it, even if it hasn't been
* opened.
*
* @throws IOException
*/
@Test
@Timeout(value = 180)
public void testSocketCreateAndClose() throws IOException {
DomainSocket serv = DomainSocket.bindAndListen(
new File(sockDir.getDir(), "test_sock_create_and_close").
getAbsolutePath());
serv.close();
}
/**
* Test DomainSocket path setting and getting.
*
* @throws IOException
*/
@Test
@Timeout(value = 180)
public void testSocketPathSetGet() throws IOException {
assertEquals("/var/run/hdfs/sock.100",
DomainSocket.getEffectivePath("/var/run/hdfs/sock._PORT", 100));
}
/**
* Test that we get a read result of -1 on EOF.
*
* @throws IOException
*/
@Test
@Timeout(value = 180)
public void testSocketReadEof() throws Exception {
final String TEST_PATH = new File(sockDir.getDir(),
"testSocketReadEof").getAbsolutePath();
final DomainSocket serv = DomainSocket.bindAndListen(TEST_PATH);
ExecutorService exeServ = Executors.newSingleThreadExecutor();
Callable<Void> callable = new Callable<Void>() {
public Void call(){
DomainSocket conn;
try {
conn = serv.accept();
} catch (IOException e) {
throw new RuntimeException("unexpected IOException", e);
}
byte buf[] = new byte[100];
for (int i = 0; i < buf.length; i++) {
buf[i] = 0;
}
try {
assertEquals(-1, conn.getInputStream().read());
} catch (IOException e) {
throw new RuntimeException("unexpected IOException", e);
}
return null;
}
};
Future<Void> future = exeServ.submit(callable);
DomainSocket conn = DomainSocket.connect(serv.getPath());
Thread.sleep(50);
conn.close();
serv.close(true);
future.get(2, TimeUnit.MINUTES);
}
/**
* Test that if one thread is blocking in a read or write operation, another
* thread can close the socket and stop the accept.
*
* @throws IOException
*/
@Test
@Timeout(value = 180)
public void testSocketAcceptAndClose() throws Exception {
final String TEST_PATH =
new File(sockDir.getDir(), "test_sock_accept_and_close").getAbsolutePath();
final DomainSocket serv = DomainSocket.bindAndListen(TEST_PATH);
ExecutorService exeServ = Executors.newSingleThreadExecutor();
Callable<Void> callable = new Callable<Void>() {
public Void call(){
try {
serv.accept();
throw new RuntimeException("expected the accept() to be " +
"interrupted and fail");
} catch (AsynchronousCloseException e) {
return null;
} catch (IOException e) {
throw new RuntimeException("unexpected IOException", e);
}
}
};
Future<Void> future = exeServ.submit(callable);
Thread.sleep(500);
serv.close(true);
future.get(2, TimeUnit.MINUTES);
}
/**
* Test that we get an AsynchronousCloseException when the DomainSocket
* we're using is closed during a read or write operation.
*
* @throws IOException
*/
private void testAsyncCloseDuringIO(final boolean closeDuringWrite)
throws Exception {
final String TEST_PATH = new File(sockDir.getDir(),
"testAsyncCloseDuringIO(" + closeDuringWrite + ")").getAbsolutePath();
final DomainSocket serv = DomainSocket.bindAndListen(TEST_PATH);
ExecutorService exeServ = Executors.newFixedThreadPool(2);
Callable<Void> serverCallable = new Callable<Void>() {
public Void call() {
DomainSocket serverConn = null;
try {
serverConn = serv.accept();
byte buf[] = new byte[100];
for (int i = 0; i < buf.length; i++) {
buf[i] = 0;
}
// The server just continues either writing or reading until someone
// asynchronously closes the client's socket. At that point, all our
// reads return EOF, and writes get a socket error.
if (closeDuringWrite) {
try {
while (true) {
serverConn.getOutputStream().write(buf);
}
} catch (IOException e) {
}
} else {
do { ; } while
(serverConn.getInputStream().read(buf, 0, buf.length) != -1);
}
} catch (IOException e) {
throw new RuntimeException("unexpected IOException", e);
} finally {
IOUtils.cleanupWithLogger(DomainSocket.LOG, serverConn);
}
return null;
}
};
Future<Void> serverFuture = exeServ.submit(serverCallable);
final DomainSocket clientConn = DomainSocket.connect(serv.getPath());
Callable<Void> clientCallable = new Callable<Void>() {
public Void call(){
// The client writes or reads until another thread
// asynchronously closes the socket. At that point, we should
// get ClosedChannelException, or possibly its subclass
// AsynchronousCloseException.
byte buf[] = new byte[100];
for (int i = 0; i < buf.length; i++) {
buf[i] = 0;
}
try {
if (closeDuringWrite) {
while (true) {
clientConn.getOutputStream().write(buf);
}
} else {
while (true) {
clientConn.getInputStream().read(buf, 0, buf.length);
}
}
} catch (ClosedChannelException e) {
return null;
} catch (IOException e) {
throw new RuntimeException("unexpected IOException", e);
}
}
};
Future<Void> clientFuture = exeServ.submit(clientCallable);
Thread.sleep(500);
clientConn.close();
serv.close(true);
clientFuture.get(2, TimeUnit.MINUTES);
serverFuture.get(2, TimeUnit.MINUTES);
}
@Test
@Timeout(value = 180)
public void testAsyncCloseDuringWrite() throws Exception {
testAsyncCloseDuringIO(true);
}
@Test
@Timeout(value = 180)
public void testAsyncCloseDuringRead() throws Exception {
testAsyncCloseDuringIO(false);
}
/**
* Test that attempting to connect to an invalid path doesn't work.
*
* @throws IOException
*/
@Test
@Timeout(value = 180)
public void testInvalidOperations() throws IOException {
try {
DomainSocket.connect(
new File(sockDir.getDir(), "test_sock_invalid_operation").
getAbsolutePath());
} catch (IOException e) {
GenericTestUtils.assertExceptionContains("connect(2) error: ", e);
}
}
/**
* Test setting some server options.
*
* @throws IOException
*/
@Test
@Timeout(value = 180)
public void testServerOptions() throws Exception {
final String TEST_PATH = new File(sockDir.getDir(),
"test_sock_server_options").getAbsolutePath();
DomainSocket serv = DomainSocket.bindAndListen(TEST_PATH);
// Let's set a new receive buffer size
int bufSize = serv.getAttribute(DomainSocket.RECEIVE_BUFFER_SIZE);
int newBufSize = bufSize / 2;
serv.setAttribute(DomainSocket.RECEIVE_BUFFER_SIZE, newBufSize);
int nextBufSize = serv.getAttribute(DomainSocket.RECEIVE_BUFFER_SIZE);
assertEquals(newBufSize, nextBufSize);
// Let's set a server timeout
int newTimeout = 1000;
serv.setAttribute(DomainSocket.RECEIVE_TIMEOUT, newTimeout);
int nextTimeout = serv.getAttribute(DomainSocket.RECEIVE_TIMEOUT);
assertEquals(newTimeout, nextTimeout);
ExecutorService exeServ = Executors.newSingleThreadExecutor();
Callable<Void> callable = new Callable<Void>() {
public Void call() {
try {
serv.accept();
fail("expected the accept() to time out and fail");
} catch (SocketTimeoutException e) {
GenericTestUtils.assertExceptionContains("accept(2) error: ", e);
} catch (AsynchronousCloseException e) {
return null;
} catch (IOException e) {
throw new RuntimeException("unexpected IOException", e);
}
return null;
}
};
Future<Void> future = exeServ.submit(callable);
Thread.sleep(500);
serv.close(true);
future.get();
assertFalse(serv.isOpen());
}
/**
* A Throwable representing success.
*
* We can't use null to represent this, because you cannot insert null into
* ArrayBlockingQueue.
*/
static | TestDomainSocket |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/runtime/operators/windowing/WindowedValue.java | {
"start": 1108,
"end": 1540
} | class ____<T, W extends Window> {
private final T value;
private final W window;
public WindowedValue(T value, W window) {
this.value = value;
this.window = window;
}
public T value() {
return value;
}
public W window() {
return window;
}
@Override
public String toString() {
return "WindowedValue(" + value + ", " + window + ")";
}
}
| WindowedValue |
java | apache__maven | compat/maven-artifact/src/main/java/org/apache/maven/artifact/versioning/DefaultArtifactVersion.java | {
"start": 923,
"end": 6242
} | class ____ implements ArtifactVersion {
private Integer majorVersion;
private Integer minorVersion;
private Integer incrementalVersion;
private Integer buildNumber;
private String qualifier;
private ComparableVersion comparable;
public DefaultArtifactVersion(String version) {
parseVersion(version);
}
@Override
public int hashCode() {
return 11 + comparable.hashCode();
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other instanceof ArtifactVersion artifactVersion) {
return compareTo(artifactVersion) == 0;
}
return false;
}
@Override
public int compareTo(ArtifactVersion otherVersion) {
if (otherVersion instanceof DefaultArtifactVersion defaultArtifactVersion) {
return this.comparable.compareTo(defaultArtifactVersion.comparable);
} else {
return compareTo(new DefaultArtifactVersion(otherVersion.toString()));
}
}
@Override
public int getMajorVersion() {
return majorVersion != null ? majorVersion : 0;
}
@Override
public int getMinorVersion() {
return minorVersion != null ? minorVersion : 0;
}
@Override
public int getIncrementalVersion() {
return incrementalVersion != null ? incrementalVersion : 0;
}
@Override
public int getBuildNumber() {
return buildNumber != null ? buildNumber : 0;
}
@Override
public String getQualifier() {
return qualifier;
}
@Override
public final void parseVersion(String version) {
comparable = new ComparableVersion(version);
int index = version.indexOf('-');
String part1;
String part2 = null;
if (index < 0) {
part1 = version;
} else {
part1 = version.substring(0, index);
part2 = version.substring(index + 1);
}
if (part2 != null) {
if (part2.length() == 1 || !part2.startsWith("0")) {
buildNumber = tryParseInt(part2);
if (buildNumber == null) {
qualifier = part2;
}
} else {
qualifier = part2;
}
}
if ((!part1.contains(".")) && !part1.startsWith("0")) {
majorVersion = tryParseInt(part1);
if (majorVersion == null) {
// qualifier is the whole version, including "-"
qualifier = version;
buildNumber = null;
}
} else {
boolean fallback = false;
String[] tok = part1.split("\\.");
int idx = 0;
if (idx < tok.length) {
majorVersion = getNextIntegerToken(tok[idx++]);
if (majorVersion == null) {
fallback = true;
}
} else {
fallback = true;
}
if (idx < tok.length) {
minorVersion = getNextIntegerToken(tok[idx++]);
if (minorVersion == null) {
fallback = true;
}
}
if (idx < tok.length) {
incrementalVersion = getNextIntegerToken(tok[idx++]);
if (incrementalVersion == null) {
fallback = true;
}
}
if (idx < tok.length) {
qualifier = tok[idx];
fallback = isDigits(qualifier);
}
// string tokenizer won't detect these and ignores them
if (part1.contains("..") || part1.startsWith(".") || part1.endsWith(".")) {
fallback = true;
}
if (fallback) {
// qualifier is the whole version, including "-"
qualifier = version;
majorVersion = null;
minorVersion = null;
incrementalVersion = null;
buildNumber = null;
}
}
}
private static boolean isDigits(String cs) {
if (cs == null || cs.isEmpty()) {
return false;
}
final int sz = cs.length();
for (int i = 0; i < sz; i++) {
if (!Character.isDigit(cs.charAt(i))) {
return false;
}
}
return true;
}
private static Integer getNextIntegerToken(String s) {
if ((s.length() > 1) && s.startsWith("0")) {
return null;
}
return tryParseInt(s);
}
private static Integer tryParseInt(String s) {
// for performance, check digits instead of relying later on catching NumberFormatException
if (!isDigits(s)) {
return null;
}
try {
long longValue = Long.parseLong(s);
if (longValue > Integer.MAX_VALUE) {
return null;
}
return (int) longValue;
} catch (NumberFormatException e) {
// should never happen since checked isDigits(s) before
return null;
}
}
@Override
public String toString() {
return comparable.toString();
}
}
| DefaultArtifactVersion |
java | quarkusio__quarkus | integration-tests/funqy-amazon-lambda/src/test/java/io/quarkus/funqy/test/NoArgFunTest.java | {
"start": 323,
"end": 585
} | class ____ {
@Test
public void testNoArgFun() throws Exception {
given()
.when()
.post()
.then()
.statusCode(200)
.body(containsString("noArgFun"));
}
}
| NoArgFunTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.