language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/internal/CannotForceNonNullableException.java
|
{
"start": 467,
"end": 618
}
|
class ____ extends AnnotationException {
public CannotForceNonNullableException(String message) {
super( message );
}
}
|
CannotForceNonNullableException
|
java
|
spring-projects__spring-security
|
kerberos/kerberos-core/src/main/java/org/springframework/security/kerberos/authentication/sun/GlobalSunJaasKerberosConfig.java
|
{
"start": 952,
"end": 2349
}
|
class ____ implements BeanPostProcessor, InitializingBean {
private boolean debug = false;
private String krbConfLocation;
@Override
public void afterPropertiesSet() throws Exception {
if (this.debug) {
System.setProperty("sun.security.krb5.debug", "true");
}
if (this.krbConfLocation != null) {
System.setProperty("java.security.krb5.conf", this.krbConfLocation);
}
}
/**
* Enable debug logs from the Sun Kerberos Implementation. Default is false.
* @param debug true if debug should be enabled
*/
public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Kerberos config file location can be specified here.
* @param krbConfLocation the path to krb config file
*/
public void setKrbConfLocation(String krbConfLocation) {
this.krbConfLocation = krbConfLocation;
}
// The following methods are not used here. This Bean implements only
// BeanPostProcessor to ensure that it
// is created before any other bean is created, because the system properties needed
// to be set very early
// in the startup-phase, but after the BeanFactoryPostProcessing.
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
return bean;
}
@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException {
return bean;
}
}
|
GlobalSunJaasKerberosConfig
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/web/servlet/assertj/MockMvcTesterIntegrationTests.java
|
{
"start": 14183,
"end": 15474
}
|
class ____ {
private final PrintStream standardOut = System.out;
private final ByteArrayOutputStream capturedOut = new ByteArrayOutputStream();
@BeforeEach
public void setUp() {
System.setOut(new PrintStream(capturedOut));
}
@AfterEach
public void tearDown() {
System.setOut(standardOut);
}
@Test
void debugUsesSystemOutByDefault() {
assertThat(mvc.get().uri("/greet")).debug().hasStatusOk();
assertThat(capturedOut()).contains("MockHttpServletRequest:", "MockHttpServletResponse:");
}
@Test
void debugCanPrintToCustomOutputStream() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
assertThat(mvc.get().uri("/greet")).debug(out).hasStatusOk();
assertThat(out.toString(StandardCharsets.UTF_8))
.contains("MockHttpServletRequest:", "MockHttpServletResponse:");
assertThat(capturedOut()).isEmpty();
}
@Test
void debugCanPrintToCustomWriter() {
StringWriter out = new StringWriter();
assertThat(mvc.get().uri("/greet")).debug(out).hasStatusOk();
assertThat(out.toString())
.contains("MockHttpServletRequest:", "MockHttpServletResponse:");
assertThat(capturedOut()).isEmpty();
}
private String capturedOut() {
return this.capturedOut.toString(StandardCharsets.UTF_8);
}
}
@Nested
|
DebugTests
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/handler/predicate/AfterRoutePredicateFactoryTests.java
|
{
"start": 1991,
"end": 4195
}
|
class ____ {
@Test
public void beforeStringWorks() {
String dateString = minusHours(1);
final boolean result = runPredicate(dateString);
assertThat(result).isTrue();
}
@Test
public void afterStringWorks() {
String dateString = plusHours(1);
final boolean result = runPredicate(dateString);
assertThat(result).isFalse();
}
@Test
public void beforeEpochWorks() {
String dateString = minusHoursMillis(1);
final boolean result = runPredicate(dateString);
assertThat(result).isTrue();
}
@Test
public void afterEpochWorks() {
String dateString = plusHoursMillis(1);
final boolean result = runPredicate(dateString);
assertThat(result).isFalse();
}
@Test
public void testPredicates() {
boolean result = new AfterRoutePredicateFactory().apply(c -> c.setDatetime(ZonedDateTime.now().minusHours(2)))
.test(getExchange());
assertThat(result).isTrue();
}
private boolean runPredicate(String dateString) {
HashMap<String, Object> map = new HashMap<>();
map.put(DATETIME_KEY, dateString);
AfterRoutePredicateFactory factory = new AfterRoutePredicateFactory();
Config config = bindConfig(map, factory);
return factory.apply(config).test(getExchange());
}
@Test
public void toStringFormat() {
Config config = new Config();
config.setDatetime(ZonedDateTime.now());
Predicate predicate = new AfterRoutePredicateFactory().apply(config);
assertThat(predicate.toString()).contains("After: " + config.getDatetime());
}
@Test
public void testConfig() {
try (ValidatorFactory factory = Validation.buildDefaultValidatorFactory()) {
Validator validator = factory.getValidator();
Config config = new Config();
config.setDatetime(ZonedDateTime.now());
assertThat(validator.validate(config).isEmpty()).isTrue();
}
}
@Test
public void testConfigNullField() {
try (ValidatorFactory factory = Validation.buildDefaultValidatorFactory()) {
Validator validator = factory.getValidator();
Config config = new Config();
Set<ConstraintViolation<Config>> validate = validator.validate(config);
assertThat(validate.isEmpty()).isFalse();
assertThat(validate.size()).isEqualTo(1);
}
}
}
|
AfterRoutePredicateFactoryTests
|
java
|
apache__camel
|
core/camel-core-languages/src/main/java/org/apache/camel/language/csimple/CSimpleLanguage.java
|
{
"start": 7072,
"end": 8256
}
|
class ____ {
private Map<String, CSimpleExpression> compiledPredicates = new LinkedHashMap<>();
private Map<String, CSimpleExpression> compiledExpressions = new LinkedHashMap<>();
public CSimpleLanguage build() {
final Map<String, CSimpleExpression> predicates = compiledPredicates.isEmpty()
? Collections.emptyMap()
: new ConcurrentHashMap<>(compiledPredicates);
this.compiledPredicates = null; // invalidate the builder to prevent leaking the mutable collection
final Map<String, CSimpleExpression> expressions = compiledExpressions.isEmpty()
? Collections.emptyMap()
: new ConcurrentHashMap<>(compiledExpressions);
this.compiledExpressions = null; // invalidate the builder to prevent leaking the mutable collection
return new CSimpleLanguage(predicates, expressions);
}
public Builder expression(CSimpleExpression expression) {
(expression.isPredicate() ? compiledPredicates : compiledExpressions).put(expression.getText(), expression);
return this;
}
}
|
Builder
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/jdk/MapSerializationTest.java
|
{
"start": 1411,
"end": 1828
}
|
class ____ {
@JsonPropertyOrder(alphabetic=true)
public LinkedHashMap<String,Integer> map;
public MapOrderingBean(String... keys) {
map = new LinkedHashMap<String,Integer>();
int ix = 1;
for (String key : keys) {
map.put(key, ix++);
}
}
}
// [databind#565]: Support ser/deser of Map.Entry
static
|
MapOrderingBean
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/AttributeConverterTest.java
|
{
"start": 18047,
"end": 18218
}
|
class ____ {
@Id
private Long id;
@Convert(disableConversion = true)
private String name;
}
@Entity(name = "T4")
@SuppressWarnings("unused")
public static
|
Tester3
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterWebHdfsMethods.java
|
{
"start": 2043,
"end": 6058
}
|
class ____ {
static final Logger LOG =
LoggerFactory.getLogger(TestRouterWebHdfsMethods.class);
protected static StateStoreDFSCluster cluster;
protected static RouterContext router;
protected static String httpUri;
@BeforeAll
public static void globalSetUp() throws Exception {
cluster = new StateStoreDFSCluster(false, 2);
Configuration conf = new RouterConfigBuilder()
.stateStore()
.rpc()
.http()
.admin()
.build();
cluster.addRouterOverrides(conf);
cluster.setIndependentDNs();
cluster.startCluster();
cluster.startRouters();
cluster.waitClusterUp();
router = cluster.getRandomRouter();
httpUri = "http://"+router.getHttpAddress();
}
@AfterAll
public static void tearDown() {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
@Test
public void testWebHdfsCreate() throws Exception {
// the file is created at default ns (ns0)
String path = "/tmp/file";
URL url = new URL(getUri(path));
LOG.info("URL: {}", url);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
assertEquals(HttpURLConnection.HTTP_CREATED, conn.getResponseCode());
verifyFile("ns0", path, true);
verifyFile("ns1", path, false);
conn.disconnect();
}
@Test
public void testWebHdfsCreateWithMounts() throws Exception {
// the file is created at mounted ns (ns1)
String mountPoint = "/tmp-ns1";
String path = "/tmp-ns1/file";
createMountTableEntry(
router.getRouter(), mountPoint,
DestinationOrder.RANDOM, Collections.singletonList("ns1"));
URL url = new URL(getUri(path));
LOG.info("URL: {}", url);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
assertEquals(HttpURLConnection.HTTP_CREATED, conn.getResponseCode());
verifyFile("ns1", path, true);
verifyFile("ns0", path, false);
conn.disconnect();
}
private String getUri(String path) {
final String user = System.getProperty("user.name");
final StringBuilder uri = new StringBuilder(httpUri);
uri.append("/webhdfs/v1").
append(path).
append("?op=CREATE").
append("&user.name=" + user);
return uri.toString();
}
private void verifyFile(String ns, String path, boolean shouldExist)
throws Exception {
FileSystem fs = cluster.getNamenode(ns, null).getFileSystem();
try {
fs.getFileStatus(new Path(path));
if (!shouldExist) {
fail(path + " should not exist in ns " + ns);
}
} catch (FileNotFoundException e) {
if (shouldExist) {
fail(path + " should exist in ns " + ns);
}
}
}
@Test
public void testGetNsFromDataNodeNetworkLocation() {
assertEquals("ns0", RouterWebHdfsMethods
.getNsFromDataNodeNetworkLocation("/ns0/rack-info1"));
assertEquals("ns0", RouterWebHdfsMethods
.getNsFromDataNodeNetworkLocation("/ns0/row1/rack-info1"));
assertEquals("", RouterWebHdfsMethods
.getNsFromDataNodeNetworkLocation("/row0"));
assertEquals("", RouterWebHdfsMethods
.getNsFromDataNodeNetworkLocation("whatever-rack-info1"));
}
@Test
public void testWebHdfsCreateWithInvalidPath() throws Exception {
// A path name include duplicated slashes.
String path = "//tmp//file";
assertResponse(path);
}
private void assertResponse(String path) throws IOException {
URL url = new URL(getUri(path));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
// Assert response code.
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, conn.getResponseCode());
// Assert exception.
Map<?, ?> response = WebHdfsFileSystem.jsonParse(conn, true);
assertEquals("InvalidPathException",
((LinkedHashMap) response.get("RemoteException")).get("exception"));
conn.disconnect();
}
}
|
TestRouterWebHdfsMethods
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/introspect/JacksonAnnotationIntrospectorTest.java
|
{
"start": 2701,
"end": 3182
}
|
class ____ extends StdDeserializer<QName>
{
public QNameDeserializer() { super(QName.class); }
@Override
public QName deserialize(JsonParser p, DeserializationContext ctxt)
{
if (!p.hasToken(JsonToken.VALUE_STRING)) {
throw new IllegalArgumentException("Unexpected token "+p.currentToken());
}
return QName.valueOf(p.getString());
}
}
@JsonIgnoreType
static
|
QNameDeserializer
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/web/client/samples/matchers/XpathRequestMatchersIntegrationTests.java
|
{
"start": 7556,
"end": 8114
}
|
class ____ {
@XmlElementWrapper(name="composers")
@XmlElement(name="composer")
private List<Person> composers;
@XmlElementWrapper(name="performers")
@XmlElement(name="performer")
private List<Person> performers;
public PeopleWrapper() {
}
public PeopleWrapper(List<Person> composers, List<Person> performers) {
this.composers = composers;
this.performers = performers;
}
public List<Person> getComposers() {
return this.composers;
}
public List<Person> getPerformers() {
return this.performers;
}
}
}
|
PeopleWrapper
|
java
|
grpc__grpc-java
|
core/src/main/java/io/grpc/internal/ManagedChannelImpl.java
|
{
"start": 75848,
"end": 77194
}
|
class ____ implements ManagedClientTransport.Listener {
@Override
public void transportShutdown(Status s) {
checkState(shutdown.get(), "Channel must have been shut down");
}
@Override
public void transportReady() {
// Don't care
}
@Override
public Attributes filterTransport(Attributes attributes) {
return attributes;
}
@Override
public void transportInUse(final boolean inUse) {
inUseStateAggregator.updateObjectInUse(delayedTransport, inUse);
if (inUse) {
// It's possible to be in idle mode while inUseStateAggregator is in-use, if one of the
// subchannels is in use. But we should never be in idle mode when delayed transport is in
// use.
exitIdleMode();
}
}
@Override
public void transportTerminated() {
checkState(shutdown.get(), "Channel must have been shut down");
terminating = true;
shutdownNameResolverAndLoadBalancer(false);
// No need to call channelStateManager since we are already in SHUTDOWN state.
// Until LoadBalancer is shutdown, it may still create new subchannels. We catch them
// here.
maybeShutdownNowSubchannels();
maybeTerminateChannel();
}
}
/**
* Must be accessed from syncContext.
*/
private final
|
DelayedTransportListener
|
java
|
jhy__jsoup
|
src/main/java/org/jsoup/select/NodeEvaluator.java
|
{
"start": 714,
"end": 1332
}
|
class ____ extends NodeEvaluator {
final java.lang.Class<? extends Node> type;
final String selector;
InstanceType(java.lang.Class<? extends Node> type, String selector) {
super();
this.type = type;
this.selector = "::" + selector;
}
@Override
boolean evaluateMatch(Node node) {
return type.isInstance(node);
}
@Override
protected int cost() {
return 1;
}
@Override
public String toString() {
return selector;
}
}
static
|
InstanceType
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/jsontype/jdk/TypeRefinementForMapTest.java
|
{
"start": 1266,
"end": 1720
}
|
class ____<K, V extends HasUniqueId<K>>
extends LinkedHashMap<K, V>
{
@JsonCreator(mode=JsonCreator.Mode.DELEGATING)
public MyHashMap(V[] values) {
for (int i = 0; i < values.length; i++) {
V v = values[i];
put(v.getId(), v);
}
}
}
// for [databind#1384]
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
public static final
|
MyHashMap
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/android/FragmentInjectionTest.java
|
{
"start": 2299,
"end": 2762
}
|
class ____ extends PreferenceActivity {
protected boolean isValidFragment(String fragment, String unused) {
return true;
}
}
""")
.doTest();
}
@Test
public void isValidFragmentTriviallyImplemented() {
compilationHelper
.addSourceLines(
"MyPrefActivity.java",
"""
import android.preference.PreferenceActivity;
|
MyPrefActivity
|
java
|
apache__rocketmq
|
proxy/src/test/java/org/apache/rocketmq/proxy/service/admin/DefaultAdminServiceTest.java
|
{
"start": 1914,
"end": 4422
}
|
class ____ {
@Mock
private MQClientAPIFactory mqClientAPIFactory;
@Mock
private MQClientAPIExt mqClientAPIExt;
private DefaultAdminService defaultAdminService;
@Before
public void before() {
when(mqClientAPIFactory.getClient()).thenReturn(mqClientAPIExt);
defaultAdminService = new DefaultAdminService(mqClientAPIFactory);
}
@Test
public void testCreateTopic() throws Exception {
when(mqClientAPIExt.getTopicRouteInfoFromNameServer(eq("createTopic"), anyLong()))
.thenThrow(new MQClientException(ResponseCode.TOPIC_NOT_EXIST, ""))
.thenReturn(createTopicRouteData(1));
when(mqClientAPIExt.getTopicRouteInfoFromNameServer(eq("sampleTopic"), anyLong()))
.thenReturn(createTopicRouteData(2));
ArgumentCaptor<String> addrArgumentCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<TopicConfig> topicConfigArgumentCaptor = ArgumentCaptor.forClass(TopicConfig.class);
doNothing().when(mqClientAPIExt).createTopic(addrArgumentCaptor.capture(), anyString(), topicConfigArgumentCaptor.capture(), anyLong());
assertTrue(defaultAdminService.createTopicOnTopicBrokerIfNotExist(
"createTopic",
"sampleTopic",
7,
8,
true,
1
));
assertEquals(2, addrArgumentCaptor.getAllValues().size());
Set<String> createAddr = new HashSet<>(addrArgumentCaptor.getAllValues());
assertTrue(createAddr.contains("127.0.0.1:10911"));
assertTrue(createAddr.contains("127.0.0.2:10911"));
assertEquals("createTopic", topicConfigArgumentCaptor.getValue().getTopicName());
assertEquals(7, topicConfigArgumentCaptor.getValue().getWriteQueueNums());
assertEquals(8, topicConfigArgumentCaptor.getValue().getReadQueueNums());
}
private TopicRouteData createTopicRouteData(int brokerNum) {
TopicRouteData topicRouteData = new TopicRouteData();
for (int i = 0; i < brokerNum; i++) {
BrokerData brokerData = new BrokerData();
HashMap<Long, String> addrMap = new HashMap<>();
addrMap.put(0L, "127.0.0." + (i + 1) + ":10911");
brokerData.setBrokerAddrs(addrMap);
brokerData.setBrokerName("broker-" + i);
brokerData.setCluster("cluster");
topicRouteData.getBrokerDatas().add(brokerData);
}
return topicRouteData;
}
}
|
DefaultAdminServiceTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/access/ElementCollectionInOneToManyTest.java
|
{
"start": 2756,
"end": 2927
}
|
class ____ {
@Id
@GeneratedValue
long id;
String name;
@ElementCollection
Collection<Chapter> chapters = new ArrayList<>();
}
@Embeddable
public static
|
Book
|
java
|
apache__maven
|
compat/maven-model/src/test/java/org/apache/maven/model/PluginConfigurationTest.java
|
{
"start": 1108,
"end": 1728
}
|
class ____ {
@Test
void testHashCodeNullSafe() {
new PluginConfiguration().hashCode();
}
@Test
void testEqualsNullSafe() {
assertFalse(new PluginConfiguration().equals(null));
new PluginConfiguration().equals(new PluginConfiguration());
}
@Test
void testEqualsIdentity() {
PluginConfiguration thing = new PluginConfiguration();
assertTrue(thing.equals(thing), "Expected " + thing + " to equal " + thing);
}
@Test
void testToStringNullSafe() {
assertNotNull(new PluginConfiguration().toString());
}
}
|
PluginConfigurationTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java
|
{
"start": 23295,
"end": 23663
}
|
class ____ extends BinaryTermValuesSource {
StringTermValuesSource(ValuesSourceConfig source) {
super(source);
}
@Override
public InternalMultiTerms.KeyConverter keyConverter() {
return InternalMultiTerms.KeyConverter.STRING;
}
}
/**
* IP doc values
*/
static
|
StringTermValuesSource
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/scripting/groovy/GroovyScriptFactory.java
|
{
"start": 10765,
"end": 12721
}
|
class ____ run it if necessary.
* @param scriptSource the source for the underlying script
* @param scriptClass the Groovy script class
* @return the result object (either an instance of the script class
* or the result of running the script instance)
* @throws ScriptCompilationException in case of instantiation failure
*/
protected @Nullable Object executeScript(ScriptSource scriptSource, Class<?> scriptClass) throws ScriptCompilationException {
try {
GroovyObject groovyObj = (GroovyObject) ReflectionUtils.accessibleConstructor(scriptClass).newInstance();
if (this.groovyObjectCustomizer != null) {
// Allow metaclass and other customization.
this.groovyObjectCustomizer.customize(groovyObj);
}
if (groovyObj instanceof Script script) {
// A Groovy script, probably creating an instance: let's execute it.
return script.run();
}
else {
// An instance of the scripted class: let's return it as-is.
return groovyObj;
}
}
catch (NoSuchMethodException ex) {
throw new ScriptCompilationException(
"No default constructor on Groovy script class: " + scriptClass.getName(), ex);
}
catch (InstantiationException ex) {
throw new ScriptCompilationException(
scriptSource, "Unable to instantiate Groovy script class: " + scriptClass.getName(), ex);
}
catch (IllegalAccessException | InaccessibleObjectException ex) {
throw new ScriptCompilationException(
scriptSource, "Could not access Groovy script constructor: " + scriptClass.getName(), ex);
}
catch (InvocationTargetException ex) {
throw new ScriptCompilationException(
"Failed to invoke Groovy script constructor: " + scriptClass.getName(), ex.getTargetException());
}
}
@Override
public String toString() {
return "GroovyScriptFactory: script source locator [" + this.scriptSourceLocator + "]";
}
/**
* Wrapper that holds a temporarily cached result object.
*/
private static
|
and
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/any/Poodle.java
|
{
"start": 242,
"end": 444
}
|
class ____ implements Dog<Poodle> {
@Override
public String getRace() {
return "poodle";
}
@Override
public Class<Poodle> getType() {
return Poodle.class;
}
}
|
Poodle
|
java
|
quarkusio__quarkus
|
extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/interceptors/ServerInterceptorProducerTest.java
|
{
"start": 3275,
"end": 3994
}
|
class ____ implements ServerInterceptor {
static volatile long callTime = 0;
@Override
public <ReqT, RespT> Listener<ReqT> interceptCall(ServerCall<ReqT, RespT> call, Metadata headers,
ServerCallHandler<ReqT, RespT> next) {
return next
.startCall(new ForwardingServerCall.SimpleForwardingServerCall<ReqT, RespT>(call) {
@Override
public void close(Status status, Metadata trailers) {
callTime = System.currentTimeMillis();
super.close(status, trailers);
}
}, headers);
}
}
}
|
MyInterceptor
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/resultmapping/NamedRepoTests.java
|
{
"start": 1137,
"end": 2185
}
|
class ____ {
@Test
public void testMappingResolution(SessionFactoryScope sessionFactoryScope) {
final QueryEngine queryEngine = sessionFactoryScope.getSessionFactory().getQueryEngine();
final NamedObjectRepository namedObjectRepository = queryEngine.getNamedObjectRepository();
final NamedResultSetMappingMemento mappingMemento = namedObjectRepository.getResultSetMappingMemento( "name" );
final ResultSetMapping mapping = new ResultSetMappingImpl( "test" );
final ResultSetMappingResolutionContext resolutionContext = new ResultSetMappingResolutionContext() {
@Override
public SessionFactoryImplementor getSessionFactory() {
return sessionFactoryScope.getSessionFactory();
}
};
mappingMemento.resolve( mapping, querySpace -> {
}, resolutionContext );
assertThat( mapping.getNumberOfResultBuilders(), is( 1 ) );
mapping.visitResultBuilders(
(position, builder) -> {
assertThat( position, is( 0 ) );
assertThat( builder, instanceOf( ResultBuilderBasicValued.class ) );
}
);
}
}
|
NamedRepoTests
|
java
|
quarkusio__quarkus
|
integration-tests/devtools/src/test/java/io/quarkus/devtools/codestarts/quarkus/FunqyKnativeEventsCodestartTest.java
|
{
"start": 376,
"end": 1220
}
|
class ____ {
@RegisterExtension
public static QuarkusCodestartTest codestartTest = QuarkusCodestartTest.builder()
.codestarts("funqy-knative-events")
.languages(JAVA)
.build();
@Test
void testContent() throws Throwable {
codestartTest.checkGeneratedSource("org.acme.funqy.cloudevent.CloudEventGreeting");
codestartTest.checkGeneratedSource("org.acme.funqy.cloudevent.Person");
codestartTest.checkGeneratedTestSource("org.acme.funqy.cloudevent.FunqyTest");
codestartTest.checkGeneratedTestSource("org.acme.funqy.cloudevent.FunqyIT");
}
@Test
@EnabledIfSystemProperty(named = "build-projects", matches = "true")
void buildAllProjectsForLocalUse() throws Throwable {
codestartTest.buildAllProjects();
}
}
|
FunqyKnativeEventsCodestartTest
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/chararray/CharArrayAssert_containsSubsequence_Test.java
|
{
"start": 977,
"end": 1351
}
|
class ____ extends CharArrayAssertBaseTest {
@Override
protected CharArrayAssert invoke_api_method() {
return assertions.containsSubsequence('a', 'b');
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertContainsSubsequence(getInfo(assertions), getActual(assertions), arrayOf('a', 'b'));
}
}
|
CharArrayAssert_containsSubsequence_Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/query/CachedQueryShallowSharedCollectionTest.java
|
{
"start": 1619,
"end": 4130
}
|
class ____ {
private static final String ACCOUNT_BY_NAME = "from Account where name = :name";
@Test
public void testQueryInSameTransaction(EntityManagerFactoryScope scope) {
final Statistics stats = getStatistics( scope );
stats.clear();
scope.inTransaction( entityManager -> {
// ensure the account is in 2LC and that the query cache is populated
executeQueryByName( entityManager, "test_account" );
} );
assertThat( stats.getQueryCacheHitCount() ).isEqualTo( 0 );
assertThat( stats.getQueryCacheMissCount() ).isEqualTo( 1 );
assertThat( stats.getQueryCachePutCount() ).isEqualTo( 1 );
stats.clear();
scope.inTransaction( entityManager -> {
// execute the query multiple times, ensure the returned account is always the same
Account old = null;
for ( int i = 1; i <= 2; i++ ) {
final Account account = executeQueryByName( entityManager, "test_account" );
assertThat( account.getDomainAccounts() ).hasSize( 2 );
assertThat( stats.getQueryCacheHitCount() ).isEqualTo( i );
assertThat( stats.getQueryCacheMissCount() ).isEqualTo( 0 );
assertThat( stats.getQueryCachePutCount() ).isEqualTo( 0 );
if ( old != null ) {
assertThat( account ).isSameAs( old );
}
old = account;
}
} );
}
private static Account executeQueryByName(
EntityManager entityManager,
@SuppressWarnings( "SameParameterValue" ) String name) {
return entityManager.createQuery( ACCOUNT_BY_NAME, Account.class )
.setParameter( "name", name )
.setHint( HINT_CACHEABLE, true )
.getSingleResult();
}
private static Statistics getStatistics(EntityManagerFactoryScope scope) {
return ( (SessionFactoryImplementor) scope.getEntityManagerFactory() ).getStatistics();
}
@BeforeAll
public void setUp(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
final Account account = new Account( 1L, "test_account" );
entityManager.persist( account );
entityManager.persist( new DomainAccount( 1L, account ) );
entityManager.persist( new DomainAccount( 2L, account ) );
} );
}
@AfterAll
public void tearDown(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
entityManager.createQuery( "delete from DomainAccount" ).executeUpdate();
entityManager.createQuery( "delete from Account" ).executeUpdate();
} );
}
@Entity( name = "Account" )
@Table( name = "account_table" )
@Cache( usage = CacheConcurrencyStrategy.READ_WRITE )
static
|
CachedQueryShallowSharedCollectionTest
|
java
|
quarkusio__quarkus
|
extensions/proxy-registry/deployment/src/test/java/io/quarkus/proxy/test/NamedProxyTest.java
|
{
"start": 596,
"end": 2202
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withConfigurationResource("named-proxy.properties");
@Inject
ProxyConfigurationRegistry registry;
@Test
public void testPresent() {
Optional<ProxyConfiguration> proxy = registry.get(Optional.of("my-proxy"));
assertTrue(proxy.isPresent());
ProxyConfiguration cfg = proxy.get();
assertEquals("localhost", cfg.host());
assertEquals(3128, cfg.port());
assertEquals(Optional.of("user"), cfg.username());
assertEquals(Optional.of("pwd"), cfg.password());
assertEquals(Optional.of(List.of("localhost", "example.com")), cfg.nonProxyHosts());
assertEquals(Optional.of(Duration.ofSeconds(1)), cfg.proxyConnectTimeout());
assertEquals(ProxyType.HTTP, cfg.type());
}
@Test
public void testMissing() {
assertThrows(IllegalStateException.class, () -> registry.get(Optional.of("missing")));
}
@Test
public void testNone() {
Optional<ProxyConfiguration> proxy = registry.get(Optional.of("none"));
assertTrue(proxy.isPresent());
ProxyConfiguration cfg = proxy.get();
assertEquals("none", cfg.host());
assertEquals(0, cfg.port());
assertEquals(Optional.empty(), cfg.username());
assertEquals(Optional.empty(), cfg.password());
assertEquals(Optional.empty(), cfg.nonProxyHosts());
assertEquals(Optional.empty(), cfg.proxyConnectTimeout());
assertEquals(ProxyType.HTTP, cfg.type());
}
}
|
NamedProxyTest
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/annotation/AutowiredAnnotationBeanPostProcessor.java
|
{
"start": 37112,
"end": 43772
}
|
class ____ implements BeanRegistrationAotContribution {
private static final String REGISTERED_BEAN_PARAMETER = "registeredBean";
private static final String INSTANCE_PARAMETER = "instance";
private final Class<?> target;
private final Collection<AutowiredElement> autowiredElements;
private final @Nullable AutowireCandidateResolver candidateResolver;
AotContribution(Class<?> target, Collection<AutowiredElement> autowiredElements,
@Nullable AutowireCandidateResolver candidateResolver) {
this.target = target;
this.autowiredElements = autowiredElements;
this.candidateResolver = candidateResolver;
}
@Override
public void applyTo(GenerationContext generationContext, BeanRegistrationCode beanRegistrationCode) {
GeneratedClass generatedClass = generationContext.getGeneratedClasses()
.addForFeatureComponent("Autowiring", this.target, type -> {
type.addJavadoc("Autowiring for {@link $T}.", this.target);
type.addModifiers(javax.lang.model.element.Modifier.PUBLIC);
});
GeneratedMethod generateMethod = generatedClass.getMethods().add("apply", method -> {
method.addJavadoc("Apply the autowiring.");
method.addModifiers(javax.lang.model.element.Modifier.PUBLIC,
javax.lang.model.element.Modifier.STATIC);
method.addParameter(RegisteredBean.class, REGISTERED_BEAN_PARAMETER);
method.addParameter(this.target, INSTANCE_PARAMETER);
method.returns(this.target);
CodeWarnings codeWarnings = new CodeWarnings();
codeWarnings.detectDeprecation(this.target);
method.addCode(generateMethodCode(codeWarnings,
generatedClass.getName(), generationContext.getRuntimeHints()));
codeWarnings.suppress(method);
});
beanRegistrationCode.addInstancePostProcessor(generateMethod.toMethodReference());
if (this.candidateResolver != null) {
registerHints(generationContext.getRuntimeHints());
}
}
private CodeBlock generateMethodCode(CodeWarnings codeWarnings,
ClassName targetClassName, RuntimeHints hints) {
CodeBlock.Builder code = CodeBlock.builder();
for (AutowiredElement autowiredElement : this.autowiredElements) {
code.addStatement(generateMethodStatementForElement(
codeWarnings, targetClassName, autowiredElement, hints));
}
code.addStatement("return $L", INSTANCE_PARAMETER);
return code.build();
}
private CodeBlock generateMethodStatementForElement(CodeWarnings codeWarnings,
ClassName targetClassName, AutowiredElement autowiredElement, RuntimeHints hints) {
Member member = autowiredElement.getMember();
boolean required = autowiredElement.required;
if (member instanceof Field field) {
return generateMethodStatementForField(
codeWarnings, targetClassName, field, required, hints);
}
if (member instanceof Method method) {
return generateMethodStatementForMethod(
codeWarnings, targetClassName, method, required, hints);
}
throw new IllegalStateException(
"Unsupported member type " + member.getClass().getName());
}
private CodeBlock generateMethodStatementForField(CodeWarnings codeWarnings,
ClassName targetClassName, Field field, boolean required, RuntimeHints hints) {
hints.reflection().registerField(field);
CodeBlock resolver = CodeBlock.of("$T.$L($S)",
AutowiredFieldValueResolver.class,
(!required ? "forField" : "forRequiredField"), field.getName());
AccessControl accessControl = AccessControl.forMember(field);
if (!accessControl.isAccessibleFrom(targetClassName)) {
return CodeBlock.of("$L.resolveAndSet($L, $L)", resolver,
REGISTERED_BEAN_PARAMETER, INSTANCE_PARAMETER);
}
else {
codeWarnings.detectDeprecation(field);
return CodeBlock.of("$L.$L = $L.resolve($L)", INSTANCE_PARAMETER,
field.getName(), resolver, REGISTERED_BEAN_PARAMETER);
}
}
private CodeBlock generateMethodStatementForMethod(CodeWarnings codeWarnings,
ClassName targetClassName, Method method, boolean required, RuntimeHints hints) {
CodeBlock.Builder code = CodeBlock.builder();
code.add("$T.$L", AutowiredMethodArgumentsResolver.class,
(!required ? "forMethod" : "forRequiredMethod"));
code.add("($S", method.getName());
if (method.getParameterCount() > 0) {
codeWarnings.detectDeprecation(method.getParameterTypes());
code.add(", $L", generateParameterTypesCode(method.getParameterTypes()));
}
code.add(")");
AccessControl accessControl = AccessControl.forMember(method);
if (!accessControl.isAccessibleFrom(targetClassName)) {
hints.reflection().registerMethod(method, ExecutableMode.INVOKE);
code.add(".resolveAndInvoke($L, $L)", REGISTERED_BEAN_PARAMETER, INSTANCE_PARAMETER);
}
else {
codeWarnings.detectDeprecation(method);
hints.reflection().registerType(method.getDeclaringClass());
CodeBlock arguments = new AutowiredArgumentsCodeGenerator(this.target,
method).generateCode(method.getParameterTypes());
CodeBlock injectionCode = CodeBlock.of("args -> $L.$L($L)",
INSTANCE_PARAMETER, method.getName(), arguments);
code.add(".resolve($L, $L)", REGISTERED_BEAN_PARAMETER, injectionCode);
}
return code.build();
}
private CodeBlock generateParameterTypesCode(Class<?>[] parameterTypes) {
return CodeBlock.join(Arrays.stream(parameterTypes)
.map(parameterType -> CodeBlock.of("$T.class", parameterType))
.toList(), ", ");
}
private void registerHints(RuntimeHints runtimeHints) {
this.autowiredElements.forEach(autowiredElement -> {
boolean required = autowiredElement.required;
Member member = autowiredElement.getMember();
if (member instanceof Field field) {
DependencyDescriptor dependencyDescriptor = new DependencyDescriptor(field, required);
registerProxyIfNecessary(runtimeHints, dependencyDescriptor);
}
if (member instanceof Method method) {
Class<?>[] parameterTypes = method.getParameterTypes();
for (int i = 0; i < parameterTypes.length; i++) {
MethodParameter methodParam = new MethodParameter(method, i);
DependencyDescriptor dependencyDescriptor = new DependencyDescriptor(methodParam, required);
registerProxyIfNecessary(runtimeHints, dependencyDescriptor);
}
}
});
}
private void registerProxyIfNecessary(RuntimeHints runtimeHints, DependencyDescriptor dependencyDescriptor) {
if (this.candidateResolver != null) {
Class<?> proxyClass =
this.candidateResolver.getLazyResolutionProxyClass(dependencyDescriptor, null);
if (proxyClass != null) {
ClassHintUtils.registerProxyIfNecessary(proxyClass, runtimeHints);
}
}
}
}
}
|
AotContribution
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/imports/referenced/GenericMapper.java
|
{
"start": 321,
"end": 859
}
|
class ____ {
@SuppressWarnings("unchecked")
public <T> T fromFoo(Foo fromFoo, @TargetType Class<T> toFooClass) {
if ( org.mapstruct.ap.test.imports.to.Foo.class == toFooClass ) {
org.mapstruct.ap.test.imports.to.Foo result = new org.mapstruct.ap.test.imports.to.Foo();
result.setName( fromFoo.getName() );
return (T) result;
}
return null;
}
public NotImportedDatatype identity(NotImportedDatatype notImported) {
return notImported;
}
}
|
GenericMapper
|
java
|
alibaba__nacos
|
common/src/test/java/com/alibaba/nacos/common/executor/NameThreadFactoryTest.java
|
{
"start": 759,
"end": 1155
}
|
class ____ {
@Test
void test() {
NameThreadFactory threadFactory = new NameThreadFactory("test");
Thread t1 = threadFactory.newThread(() -> {
});
Thread t2 = threadFactory.newThread(() -> {
});
assertEquals("test.0", t1.getName());
assertEquals("test.1", t2.getName());
}
}
|
NameThreadFactoryTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/basic/EagerAndLazyBasicUpdateTest.java
|
{
"start": 1415,
"end": 12419
}
|
class ____ {
private Long entityId;
SQLStatementInspector statementInspector(SessionFactoryScope scope) {
return (SQLStatementInspector) scope.getSessionFactory().getSessionFactoryOptions().getStatementInspector();
}
private void initNull(SessionFactoryScope scope) {
scope.inTransaction( s -> {
LazyEntity entity = new LazyEntity();
s.persist( entity );
entityId = entity.getId();
} );
}
private void initNonNull(SessionFactoryScope scope) {
scope.inTransaction( s -> {
LazyEntity entity = new LazyEntity();
entity.setEagerProperty( "eager_initial" );
entity.setLazyProperty1( "lazy1_initial" );
entity.setLazyProperty2( "lazy2_initial" );
s.persist( entity );
entityId = entity.getId();
} );
}
@BeforeEach
public void clearStatementInspector(SessionFactoryScope scope) {
statementInspector( scope ).clear();
}
@Test
public void updateOneLazyProperty_nullToNull(SessionFactoryScope scope) {
initNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setLazyProperty1( null );
} );
// When a lazy property is modified Hibernate does not perform any select
// but during flush an update is performed
statementInspector( scope ).assertUpdate();
}
@Test
public void updateOneLazyProperty_nullToNonNull(SessionFactoryScope scope) {
initNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setLazyProperty1( "lazy1_update" );
} );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
assertEquals( "lazy1_update", entity.getLazyProperty1() );
assertNull( entity.getEagerProperty() );
assertNull( entity.getLazyProperty2() );
} );
}
@Test
public void updateOneLazyProperty_nonNullToNonNull_differentValues(SessionFactoryScope scope) {
initNonNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setLazyProperty1( "lazy1_update" );
} );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
assertEquals( "lazy1_update", entity.getLazyProperty1() );
assertEquals( "eager_initial", entity.getEagerProperty() );
assertEquals( "lazy2_initial", entity.getLazyProperty2() );
} );
}
@Test
public void updateOneLazyProperty_nonNullToNonNull_sameValues(SessionFactoryScope scope) {
initNonNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setLazyProperty1( "lazy1_update" );
} );
// When a lazy property is modified Hibernate does not perform any select
// but during flush an update is performed
statementInspector( scope ).assertUpdate();
}
@Test
public void updateOneLazyProperty_nonNullToNull(SessionFactoryScope scope) {
initNonNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setLazyProperty1( null );
} );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
assertNull( entity.getLazyProperty1() );
assertEquals( "eager_initial", entity.getEagerProperty() );
assertEquals( "lazy2_initial", entity.getLazyProperty2() );
} );
}
@Test
public void updateOneEagerProperty_nullToNull(SessionFactoryScope scope) {
initNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setEagerProperty( null );
} );
// We should not update entities when property values did not change
statementInspector( scope ).assertNoUpdate();
}
@Test
public void updateOneEagerProperty_nullToNonNull(SessionFactoryScope scope) {
initNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setEagerProperty( "eager_update" );
} );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
assertEquals( "eager_update", entity.getEagerProperty() );
assertNull( entity.getLazyProperty1() );
assertNull( entity.getLazyProperty2() );
} );
}
@Test
public void updateOneEagerProperty_nonNullToNonNull_differentValues(SessionFactoryScope scope) {
initNonNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setEagerProperty( "eager_update" );
} );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
assertEquals( "eager_update", entity.getEagerProperty() );
assertEquals( "lazy1_initial", entity.getLazyProperty1() );
assertEquals( "lazy2_initial", entity.getLazyProperty2() );
} );
}
@Test
public void updateOneEagerProperty_nonNullToNonNull_sameValues(SessionFactoryScope scope) {
initNonNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setEagerProperty( "eager_initial" );
} );
// We should not update entities when property values did not change
statementInspector( scope ).assertNoUpdate();
}
@Test
public void updateOneEagerProperty_nonNullToNull(SessionFactoryScope scope) {
initNonNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setEagerProperty( null );
} );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
assertNull( entity.getEagerProperty() );
assertEquals( "lazy1_initial", entity.getLazyProperty1() );
assertEquals( "lazy2_initial", entity.getLazyProperty2() );
} );
}
@Test
public void updateOneEagerPropertyAndOneLazyProperty_nullToNull(SessionFactoryScope scope) {
initNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setEagerProperty( null );
entity.setLazyProperty1( null );
} );
// When a lazy property is modified Hibernate does not perform any select
// but during flush an update is performed
statementInspector( scope ).assertUpdate();
}
@Test
public void updateOneEagerPropertyAndOneLazyProperty_nullToNonNull(SessionFactoryScope scope) {
initNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setEagerProperty( "eager_update" );
entity.setLazyProperty1( "lazy1_update" );
} );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
assertEquals( "eager_update", entity.getEagerProperty() );
assertEquals( "lazy1_update", entity.getLazyProperty1() );
assertNull( entity.getLazyProperty2() );
} );
}
@Test
public void updateOneEagerPropertyAndOneLazyProperty_nonNullToNonNull_differentValues(SessionFactoryScope scope) {
initNonNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setEagerProperty( "eager_update" );
entity.setLazyProperty1( "lazy1_update" );
} );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
assertEquals( "eager_update", entity.getEagerProperty() );
assertEquals( "lazy1_update", entity.getLazyProperty1() );
assertEquals( "lazy2_initial", entity.getLazyProperty2() );
} );
}
@Test
public void updateOneEagerPropertyAndOneLazyProperty_nonNullToNonNull_sameValues(SessionFactoryScope scope) {
initNonNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setEagerProperty( entity.getEagerProperty() );
entity.setLazyProperty1( entity.getLazyProperty1() );
} );
// We should not update entities when property values did not change
statementInspector( scope ).assertNoUpdate();
}
@Test
public void updateOneEagerPropertyAndOneLazyProperty_nonNullToNull(SessionFactoryScope scope) {
initNonNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setEagerProperty( null );
entity.setLazyProperty1( null );
} );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
assertNull( entity.getEagerProperty() );
assertNull( entity.getLazyProperty1() );
assertEquals( "lazy2_initial", entity.getLazyProperty2() );
} );
}
@Test
public void updateAllLazyProperties_nullToNull(SessionFactoryScope scope) {
initNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setLazyProperty1( null );
entity.setLazyProperty2( null );
} );
// When a lazy property is modified Hibernate does not perform any select
// but during flush an update is performed
statementInspector( scope ).assertUpdate();
}
@Test
public void updateAllLazyProperties_nullToNonNull(SessionFactoryScope scope) {
initNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setLazyProperty1( "lazy1_update" );
entity.setLazyProperty2( "lazy2_update" );
} );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
assertEquals( "lazy1_update", entity.getLazyProperty1() );
assertEquals( "lazy2_update", entity.getLazyProperty2() );
assertNull( entity.getEagerProperty() );
} );
}
@Test
public void updateAllLazyProperties_nonNullToNonNull_differentValues(SessionFactoryScope scope) {
initNonNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setLazyProperty1( "lazy1_update" );
entity.setLazyProperty2( "lazy2_update" );
} );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
assertEquals( "lazy1_update", entity.getLazyProperty1() );
assertEquals( "lazy2_update", entity.getLazyProperty2() );
assertEquals( "eager_initial", entity.getEagerProperty() );
} );
}
@Test
public void updateAllLazyProperties_nonNullToNonNull_sameValues(SessionFactoryScope scope) {
initNonNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setLazyProperty1( entity.getLazyProperty1() );
entity.setLazyProperty2( entity.getLazyProperty2() );
} );
// We should not update entities when property values did not change
statementInspector( scope ).assertNoUpdate();
}
@Test
public void updateAllLazyProperties_nonNullToNull(SessionFactoryScope scope) {
initNonNull( scope );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
entity.setLazyProperty1( null );
entity.setLazyProperty2( null );
} );
scope.inTransaction( s -> {
LazyEntity entity = s.get( LazyEntity.class, entityId );
assertNull( entity.getLazyProperty1() );
assertNull( entity.getLazyProperty2() );
assertEquals( "eager_initial", entity.getEagerProperty() );
} );
}
@Entity
@Table(name = "LAZY_ENTITY")
static
|
EagerAndLazyBasicUpdateTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java
|
{
"start": 923,
"end": 4354
}
|
class ____ extends AbstractScalarFunctionTestCase {
public ToDegreesTests(@Name("TestCase") Supplier<TestCaseSupplier.TestCase> testCaseSupplier) {
this.testCase = testCaseSupplier.get();
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
// TODO multivalue fields
List<TestCaseSupplier> suppliers = new ArrayList<>();
TestCaseSupplier.forUnaryInt(
suppliers,
evaluatorName("ToDoubleFromIntEvaluator", "i"),
DataType.DOUBLE,
Math::toDegrees,
Integer.MIN_VALUE,
Integer.MAX_VALUE,
List.of()
);
TestCaseSupplier.forUnaryLong(
suppliers,
evaluatorName("ToDoubleFromLongEvaluator", "l"),
DataType.DOUBLE,
Math::toDegrees,
Long.MIN_VALUE,
Long.MAX_VALUE,
List.of()
);
TestCaseSupplier.forUnaryUnsignedLong(
suppliers,
evaluatorName("ToDoubleFromUnsignedLongEvaluator", "l"),
DataType.DOUBLE,
ul -> Math.toDegrees(ul.doubleValue()),
BigInteger.ZERO,
UNSIGNED_LONG_MAX,
List.of()
);
TestCaseSupplier.forUnaryDouble(suppliers, "ToDegreesEvaluator[deg=Attribute[channel=0]]", DataType.DOUBLE, d -> {
double deg = Math.toDegrees(d);
return Double.isNaN(deg) || Double.isInfinite(deg) ? null : deg;
}, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, d -> {
double deg = Math.toDegrees(d);
ArrayList<String> warnings = new ArrayList<>(2);
if (Double.isNaN(deg) || Double.isInfinite(deg)) {
warnings.add("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.");
warnings.add("Line 1:1: java.lang.ArithmeticException: not a finite double number: " + deg);
}
return warnings;
});
TestCaseSupplier.unary(
suppliers,
"ToDegreesEvaluator[deg=Attribute[channel=0]]",
List.of(
new TestCaseSupplier.TypedDataSupplier("Double.MAX_VALUE", () -> Double.MAX_VALUE, DataType.DOUBLE),
new TestCaseSupplier.TypedDataSupplier("-Double.MAX_VALUE", () -> -Double.MAX_VALUE, DataType.DOUBLE),
new TestCaseSupplier.TypedDataSupplier("Double.POSITIVE_INFINITY", () -> Double.POSITIVE_INFINITY, DataType.DOUBLE),
new TestCaseSupplier.TypedDataSupplier("Double.NEGATIVE_INFINITY", () -> Double.NEGATIVE_INFINITY, DataType.DOUBLE)
),
DataType.DOUBLE,
d -> null,
d -> List.of(
"Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.",
"Line 1:1: java.lang.ArithmeticException: not a finite double number: " + ((double) d > 0 ? "Infinity" : "-Infinity")
)
);
return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers);
}
private static String evaluatorName(String inner, String next) {
return "ToDegreesEvaluator[deg=" + inner + "[" + next + "=Attribute[channel=0]]]";
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new ToDegrees(source, args.get(0));
}
}
|
ToDegreesTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/contextualai/ContextualAiService.java
|
{
"start": 9318,
"end": 10785
}
|
class ____ {
public static InferenceServiceConfiguration get() {
return configuration.getOrCompute();
}
private static final LazyInitializable<InferenceServiceConfiguration, RuntimeException> configuration = new LazyInitializable<>(
() -> {
var configurationMap = new HashMap<String, SettingsConfiguration>();
configurationMap.put(
"model_id",
new SettingsConfiguration.Builder(SUPPORTED_TASK_TYPES).setDescription(
"The model ID to use for Contextual AI requests."
)
.setLabel("Model ID")
.setRequired(true)
.setSensitive(false)
.setUpdatable(false)
.setType(SettingsConfigurationFieldType.STRING)
.build()
);
configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(SUPPORTED_TASK_TYPES));
configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(SUPPORTED_TASK_TYPES));
return new InferenceServiceConfiguration.Builder().setService(NAME)
.setName(SERVICE_NAME)
.setTaskTypes(SUPPORTED_TASK_TYPES)
.setConfigurations(configurationMap)
.build();
}
);
}
}
|
Configuration
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/config/InvalidConfigDataPropertyExceptionTests.java
|
{
"start": 1610,
"end": 7169
}
|
class ____ {
private final ConfigDataResource resource = new TestConfigDataResource();
private final ConfigurationPropertyName replacement = ConfigurationPropertyName.of("replacement");
private final ConfigurationPropertyName invalid = ConfigurationPropertyName.of("invalid");
private final ConfigurationProperty property = new ConfigurationProperty(this.invalid, "bad",
MockOrigin.of("origin"));
private final ConversionService conversionService = DefaultConversionService.getSharedInstance();
@Test
void createHasCorrectMessage() {
assertThat(new InvalidConfigDataPropertyException(this.property, false, this.replacement, this.resource))
.hasMessage(
"Property 'invalid' imported from location 'test' is invalid and should be replaced with 'replacement' [origin: origin]");
}
@Test
void createWhenNoLocationHasCorrectMessage() {
assertThat(new InvalidConfigDataPropertyException(this.property, false, this.replacement, null))
.hasMessage("Property 'invalid' is invalid and should be replaced with 'replacement' [origin: origin]");
}
@Test
void createWhenNoReplacementHasCorrectMessage() {
assertThat(new InvalidConfigDataPropertyException(this.property, false, null, this.resource))
.hasMessage("Property 'invalid' imported from location 'test' is invalid [origin: origin]");
}
@Test
void createWhenNoOriginHasCorrectMessage() {
ConfigurationProperty property = new ConfigurationProperty(this.invalid, "bad", null);
assertThat(new InvalidConfigDataPropertyException(property, false, this.replacement, this.resource)).hasMessage(
"Property 'invalid' imported from location 'test' is invalid and should be replaced with 'replacement'");
}
@Test
void createWhenProfileSpecificHasCorrectMessage() {
ConfigurationProperty property = new ConfigurationProperty(this.invalid, "bad", null);
assertThat(new InvalidConfigDataPropertyException(property, true, null, this.resource))
.hasMessage("Property 'invalid' imported from location 'test' is invalid in a profile specific resource");
}
@Test
void getPropertyReturnsProperty() {
InvalidConfigDataPropertyException exception = new InvalidConfigDataPropertyException(this.property, false,
this.replacement, this.resource);
assertThat(exception.getProperty()).isEqualTo(this.property);
}
@Test
void getLocationReturnsLocation() {
InvalidConfigDataPropertyException exception = new InvalidConfigDataPropertyException(this.property, false,
this.replacement, this.resource);
assertThat(exception.getLocation()).isEqualTo(this.resource);
}
@Test
void getReplacementReturnsReplacement() {
InvalidConfigDataPropertyException exception = new InvalidConfigDataPropertyException(this.property, false,
this.replacement, this.resource);
assertThat(exception.getReplacement()).isEqualTo(this.replacement);
}
@Test
void throwOrWarnWhenHasInvalidPropertyThrowsException() {
MockPropertySource propertySource = new MockPropertySource();
propertySource.setProperty("spring.profiles", "a");
ConfigDataEnvironmentContributor contributor = ConfigDataEnvironmentContributor.ofExisting(propertySource,
this.conversionService);
assertThatExceptionOfType(InvalidConfigDataPropertyException.class)
.isThrownBy(() -> InvalidConfigDataPropertyException.throwIfPropertyFound(contributor))
.withMessageStartingWith("Property 'spring.profiles' is invalid and should be replaced with "
+ "'spring.config.activate.on-profile'");
}
@Test
void throwOrWarnWhenWhenHasInvalidProfileSpecificPropertyThrowsException() {
throwOrWarnWhenWhenHasInvalidProfileSpecificPropertyThrowsException("spring.profiles.include");
throwOrWarnWhenWhenHasInvalidProfileSpecificPropertyThrowsException("spring.profiles.active");
throwOrWarnWhenWhenHasInvalidProfileSpecificPropertyThrowsException("spring.profiles.default");
}
@Test
void throwOrWarnWhenWhenHasInvalidProfileSpecificPropertyOnIgnoringProfilesContributorDoesNotThrowException() {
ConfigDataEnvironmentContributor contributor = createInvalidProfileSpecificPropertyContributor(
"spring.profiles.active", ConfigData.Option.IGNORE_PROFILES);
assertThatNoException().isThrownBy(() -> InvalidConfigDataPropertyException.throwIfPropertyFound(contributor));
}
private void throwOrWarnWhenWhenHasInvalidProfileSpecificPropertyThrowsException(String name) {
ConfigDataEnvironmentContributor contributor = createInvalidProfileSpecificPropertyContributor(name);
assertThatExceptionOfType(InvalidConfigDataPropertyException.class)
.isThrownBy(() -> InvalidConfigDataPropertyException.throwIfPropertyFound(contributor))
.withMessageStartingWith("Property '" + name + "' is invalid in a profile specific resource");
}
private ConfigDataEnvironmentContributor createInvalidProfileSpecificPropertyContributor(String name,
ConfigData.Option... configDataOptions) {
MockPropertySource propertySource = new MockPropertySource();
propertySource.setProperty(name, "a");
return new ConfigDataEnvironmentContributor(Kind.BOUND_IMPORT, null, null, true, propertySource,
ConfigurationPropertySource.from(propertySource), null, ConfigData.Options.of(configDataOptions), null,
this.conversionService);
}
@Test
void throwOrWarnWhenHasNoInvalidPropertyDoesNothing() {
ConfigDataEnvironmentContributor contributor = ConfigDataEnvironmentContributor
.ofExisting(new MockPropertySource(), this.conversionService);
InvalidConfigDataPropertyException.throwIfPropertyFound(contributor);
}
private static final
|
InvalidConfigDataPropertyExceptionTests
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/test/benchmark/basic/ShortBenchmark_arrayMappinng_obj.java
|
{
"start": 205,
"end": 1602
}
|
class ____ {
static String json = "[-5972,5582,-2398,-9859,25417]";
static String json2 = "[\"-5972\",\"5582\",\"-2398\",\"-9859\",\"25417\"]";
public static void main(String[] args) throws Exception {
Model model = new Model();
model.v1 = (short) new Random().nextInt();
model.v2 = (short) new Random().nextInt();
model.v3 = (short) new Random().nextInt();
model.v4 = (short) new Random().nextInt();
model.v5 = (short) new Random().nextInt();
System.out.println(JSON.toJSONString(model));
for (int i = 0; i < 10; ++i) {
perf(); // 1087
// perf2(); // 1120
}
}
public static void perf() {
long start = System.currentTimeMillis();
for (int i = 0; i < 1000 * 1000 * 10; ++i) {
JSON.parseObject(json, Model.class, Feature.SupportArrayToBean);
}
long millis = System.currentTimeMillis() - start;
System.out.println("millis : " + millis);
}
public static void perf2() {
long start = System.currentTimeMillis();
for (int i = 0; i < 1000 * 1000 * 10; ++i) {
JSON.parseObject(json2, Model.class, Feature.SupportArrayToBean);
}
long millis = System.currentTimeMillis() - start;
System.out.println("millis : " + millis);
}
public static
|
ShortBenchmark_arrayMappinng_obj
|
java
|
spring-projects__spring-security
|
saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/provider/service/authentication/logout/Saml2LogoutValidatorResult.java
|
{
"start": 2448,
"end": 3007
}
|
class ____ {
private final Collection<Saml2Error> errors;
private Builder(Saml2Error... errors) {
this(Arrays.asList(errors));
}
private Builder(Collection<Saml2Error> errors) {
Assert.noNullElements(errors, "errors cannot have null elements");
this.errors = new ArrayList<>(errors);
}
public Builder errors(Consumer<Collection<Saml2Error>> errorsConsumer) {
errorsConsumer.accept(this.errors);
return this;
}
public Saml2LogoutValidatorResult build() {
return new Saml2LogoutValidatorResult(this.errors);
}
}
}
|
Builder
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/extractor/ByNameSingleExtractorTest.java
|
{
"start": 1443,
"end": 7609
}
|
class ____ {
private static final Employee YODA = new Employee(1L, new Name("Yoda"), 800);
@Test
void should_extract_field_values_even_if_property_does_not_exist() {
// GIVEN
ByNameSingleExtractor underTest = new ByNameSingleExtractor("id");
// WHEN
Object result = underTest.apply(YODA);
// THEN
then(result).isEqualTo(1L);
}
@Test
void should_extract_property_values_when_no_public_field_match_given_name() {
// GIVEN
ByNameSingleExtractor underTest = new ByNameSingleExtractor("age");
// WHEN
Object result = underTest.apply(YODA);
// THEN
then(result).isEqualTo(800);
}
@Test
void should_extract_pure_property_values() {
// GIVEN
ByNameSingleExtractor underTest = new ByNameSingleExtractor("adult");
// WHEN
Object result = underTest.apply(YODA);
// THEN
then(result).isEqualTo(true);
}
@Test
void should_throw_error_when_no_property_nor_public_field_match_given_name() {
// GIVEN
ByNameSingleExtractor underTest = new ByNameSingleExtractor("unknown");
// WHEN
Throwable thrown = catchThrowable(() -> underTest.apply(YODA));
// THEN
then(thrown).isInstanceOf(IntrospectionError.class);
}
@Test
void should_throw_exception_when_given_name_is_null() {
// GIVEN
ByNameSingleExtractor underTest = new ByNameSingleExtractor(null);
// WHEN
Throwable thrown = catchThrowable(() -> underTest.apply(YODA));
// THEN
then(thrown).isInstanceOf(IllegalArgumentException.class)
.hasMessage("The name of the property/field to read should not be null");
}
@Test
void should_throw_exception_when_given_name_is_empty() {
// GIVEN
ByNameSingleExtractor underTest = new ByNameSingleExtractor("");
// WHEN
Throwable thrown = catchThrowable(() -> underTest.apply(YODA));
// THEN
then(thrown).isInstanceOf(IllegalArgumentException.class)
.hasMessage("The name of the property/field to read should not be empty");
}
@Test
void should_fallback_to_field_if_exception_has_been_thrown_on_property_access() {
// GIVEN
Employee employee = new Employee(1L, new Name("Name"), 0) {
@Override
public Name getName() {
throw new RuntimeException();
}
};
ByNameSingleExtractor underTest = new ByNameSingleExtractor("name");
// WHEN
Object result = underTest.apply(employee);
// THEN
then(result).isEqualTo(new Name("Name"));
}
@Test
void should_prefer_properties_over_fields() {
// GIVEN
Employee employee = new Employee(1L, new Name("Name"), 0) {
@Override
public Name getName() {
return new Name("Overridden Name");
}
};
ByNameSingleExtractor underTest = new ByNameSingleExtractor("name");
// WHEN
Object result = underTest.apply(employee);
// THEN
then(result).isEqualTo(new Name("Overridden Name"));
}
@Test
void should_throw_exception_if_property_cannot_be_extracted_due_to_runtime_exception_during_property_access() {
// GIVEN
Employee employee = new Employee() {
@Override
public boolean isAdult() {
throw new RuntimeException();
}
};
ByNameSingleExtractor underTest = new ByNameSingleExtractor("adult");
// WHEN
Throwable thrown = catchThrowable(() -> underTest.apply(employee));
// THEN
then(thrown).isInstanceOf(IntrospectionError.class);
}
@Test
void should_throw_exception_if_no_object_is_given() {
// GIVEN
ByNameSingleExtractor underTest = new ByNameSingleExtractor("id");
// WHEN
Throwable thrown = catchThrowable(() -> underTest.apply(null));
// THEN
then(thrown).isInstanceOf(IllegalArgumentException.class);
}
@Test
void should_extract_single_value_from_map_by_key() {
// GIVEN
Map<String, Employee> map = mapOf(entry("key", YODA));
ByNameSingleExtractor underTest = new ByNameSingleExtractor("key");
// WHEN
Object result = underTest.apply(map);
// THEN
then(result).isEqualTo(YODA);
}
@Test
void should_throw_error_from_map_by_non_existing_key() {
// GIVEN
Map<String, Employee> map = mapOf(entry("key", YODA));
ByNameSingleExtractor underTest = new ByNameSingleExtractor("non-existing");
// WHEN
Throwable thrown = catchThrowable(() -> underTest.apply(map));
// THEN
then(thrown).isInstanceOf(IntrospectionError.class);
}
@Test
void should_extract_null_from_map_by_key_with_null_value() {
// GIVEN
Map<String, Employee> map = mapOf(entry("key", null));
ByNameSingleExtractor underTest = new ByNameSingleExtractor("key");
// WHEN
Object result = underTest.apply(map);
// THEN
then(result).isNull();
}
@Test
void should_extract_property_field_combinations() {
// GIVEN
Employee darth = new Employee(1L, new Name("Darth", "Vader"), 100);
Employee luke = new Employee(2L, new Name("Luke", "Skywalker"), 26);
darth.field = luke;
luke.field = darth;
luke.surname = new Name("Young", "Padawan");
ByNameSingleExtractor underTest = new ByNameSingleExtractor("me.field.me.field.me.field.surname.name");
// WHEN
Object result = underTest.apply(darth);
// THEN
then(result).isEqualTo("Young Padawan");
}
@Test
void should_extract_property_with_bare_name_method() {
// GIVEN
setExtractBareNamePropertyMethods(true);
BareOptionalIntHolder holder = new BareOptionalIntHolder(42);
ByNameSingleExtractor underTest = new ByNameSingleExtractor("value");
// WHEN
Object result = underTest.apply(holder);
// THEN
then(result).isEqualTo(OptionalInt.of(42));
}
@Test
void should_ignore_property_with_bare_name_method_when_disabled() {
// GIVEN
setExtractBareNamePropertyMethods(false);
BareOptionalIntHolder holder = new BareOptionalIntHolder(42);
ByNameSingleExtractor underTest = new ByNameSingleExtractor("value");
// WHEN
Object result = underTest.apply(holder);
// THEN
then(result).isEqualTo(42);
}
/** This style of Optional handling is emitted by Immutables code gen library. */
static
|
ByNameSingleExtractorTest
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/migrationsupport/rules/ExpectedExceptionSupportTests.java
|
{
"start": 4023,
"end": 4200
}
|
class ____ {
@Test
void success() {
/* no-op */
}
@Test
void failure() {
fail("must fail");
}
}
}
|
ExpectedExceptionSupportWithoutExpectedExceptionRuleTestCase
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/AbstractInferenceServiceTests.java
|
{
"start": 1695,
"end": 14925
}
|
class ____ extends AbstractInferenceServiceBaseTests {
public AbstractInferenceServiceTests(TestConfiguration testConfiguration) {
super(testConfiguration);
}
public void testParseRequestConfig_CreatesAnEmbeddingsModel() throws Exception {
Assume.assumeTrue(testConfiguration.commonConfig().supportedTaskTypes().contains(TaskType.TEXT_EMBEDDING));
var parseRequestConfigTestConfig = testConfiguration.commonConfig();
try (var service = parseRequestConfigTestConfig.createService(threadPool, clientManager)) {
var config = getRequestConfigMap(
parseRequestConfigTestConfig.createServiceSettingsMap(TaskType.TEXT_EMBEDDING, ConfigurationParseContext.REQUEST),
parseRequestConfigTestConfig.createTaskSettingsMap(),
parseRequestConfigTestConfig.createSecretSettingsMap()
);
var listener = new PlainActionFuture<Model>();
service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, listener);
var model = listener.actionGet(TIMEOUT);
var expectedChunkingSettings = ChunkingSettingsBuilder.fromMap(Map.of());
assertThat(model.getConfigurations().getChunkingSettings(), is(expectedChunkingSettings));
parseRequestConfigTestConfig.assertModel(model, TaskType.TEXT_EMBEDDING);
}
}
public void testParseRequestConfig_CreatesAnEmbeddingsModelWhenChunkingSettingsProvided() throws Exception {
Assume.assumeTrue(testConfiguration.commonConfig().supportedTaskTypes().contains(TaskType.TEXT_EMBEDDING));
var parseRequestConfigTestConfig = testConfiguration.commonConfig();
try (var service = parseRequestConfigTestConfig.createService(threadPool, clientManager)) {
var chunkingSettingsMap = createRandomChunkingSettingsMap();
var config = getRequestConfigMap(
parseRequestConfigTestConfig.createServiceSettingsMap(TaskType.TEXT_EMBEDDING, ConfigurationParseContext.REQUEST),
parseRequestConfigTestConfig.createTaskSettingsMap(),
chunkingSettingsMap,
parseRequestConfigTestConfig.createSecretSettingsMap()
);
var listener = new PlainActionFuture<Model>();
service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, listener);
var model = listener.actionGet(TIMEOUT);
var expectedChunkingSettings = ChunkingSettingsBuilder.fromMap(chunkingSettingsMap);
assertThat(model.getConfigurations().getChunkingSettings(), is(expectedChunkingSettings));
parseRequestConfigTestConfig.assertModel(model, TaskType.TEXT_EMBEDDING);
}
}
public void testParseRequestConfig_CreatesACompletionModel() throws Exception {
var parseRequestConfigTestConfig = testConfiguration.commonConfig();
try (var service = parseRequestConfigTestConfig.createService(threadPool, clientManager)) {
var config = getRequestConfigMap(
parseRequestConfigTestConfig.createServiceSettingsMap(TaskType.COMPLETION, ConfigurationParseContext.REQUEST),
parseRequestConfigTestConfig.createTaskSettingsMap(),
parseRequestConfigTestConfig.createSecretSettingsMap()
);
var listener = new PlainActionFuture<Model>();
service.parseRequestConfig("id", TaskType.COMPLETION, config, listener);
parseRequestConfigTestConfig.assertModel(listener.actionGet(TIMEOUT), TaskType.COMPLETION);
}
}
public void testParseRequestConfig_ThrowsUnsupportedModelType() throws Exception {
var parseRequestConfigTestConfig = testConfiguration.commonConfig();
try (var service = parseRequestConfigTestConfig.createService(threadPool, clientManager)) {
var config = getRequestConfigMap(
parseRequestConfigTestConfig.createServiceSettingsMap(
parseRequestConfigTestConfig.targetTaskType(),
ConfigurationParseContext.REQUEST
),
parseRequestConfigTestConfig.createTaskSettingsMap(),
parseRequestConfigTestConfig.createSecretSettingsMap()
);
var listener = new PlainActionFuture<Model>();
service.parseRequestConfig("id", parseRequestConfigTestConfig.unsupportedTaskType(), config, listener);
var exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT));
assertThat(
exception.getMessage(),
containsString(
Strings.format("service does not support task type [%s]", parseRequestConfigTestConfig.unsupportedTaskType())
)
);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws IOException {
var parseRequestConfigTestConfig = testConfiguration.commonConfig();
try (var service = parseRequestConfigTestConfig.createService(threadPool, clientManager)) {
var config = getRequestConfigMap(
parseRequestConfigTestConfig.createServiceSettingsMap(
parseRequestConfigTestConfig.targetTaskType(),
ConfigurationParseContext.REQUEST
),
parseRequestConfigTestConfig.createTaskSettingsMap(),
parseRequestConfigTestConfig.createSecretSettingsMap()
);
config.put("extra_key", "value");
var listener = new PlainActionFuture<Model>();
service.parseRequestConfig("id", parseRequestConfigTestConfig.targetTaskType(), config, listener);
var exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT));
assertThat(exception.getMessage(), containsString("Configuration contains settings [{extra_key=value}]"));
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInServiceSettingsMap() throws IOException {
var parseRequestConfigTestConfig = testConfiguration.commonConfig();
try (var service = parseRequestConfigTestConfig.createService(threadPool, clientManager)) {
var serviceSettings = parseRequestConfigTestConfig.createServiceSettingsMap(
parseRequestConfigTestConfig.targetTaskType(),
ConfigurationParseContext.REQUEST
);
serviceSettings.put("extra_key", "value");
var config = getRequestConfigMap(
serviceSettings,
parseRequestConfigTestConfig.createTaskSettingsMap(),
parseRequestConfigTestConfig.createSecretSettingsMap()
);
var listener = new PlainActionFuture<Model>();
service.parseRequestConfig("id", parseRequestConfigTestConfig.targetTaskType(), config, listener);
var exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT));
assertThat(exception.getMessage(), containsString("Configuration contains settings [{extra_key=value}]"));
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInTaskSettingsMap() throws IOException {
var parseRequestConfigTestConfig = testConfiguration.commonConfig();
try (var service = parseRequestConfigTestConfig.createService(threadPool, clientManager)) {
var taskSettings = parseRequestConfigTestConfig.createTaskSettingsMap();
taskSettings.put("extra_key", "value");
var config = getRequestConfigMap(
parseRequestConfigTestConfig.createServiceSettingsMap(
parseRequestConfigTestConfig.targetTaskType(),
ConfigurationParseContext.REQUEST
),
taskSettings,
parseRequestConfigTestConfig.createSecretSettingsMap()
);
var listener = new PlainActionFuture<Model>();
service.parseRequestConfig("id", parseRequestConfigTestConfig.targetTaskType(), config, listener);
var exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT));
assertThat(exception.getMessage(), containsString("Configuration contains settings [{extra_key=value}]"));
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInSecretSettingsMap() throws IOException {
var parseRequestConfigTestConfig = testConfiguration.commonConfig();
try (var service = parseRequestConfigTestConfig.createService(threadPool, clientManager)) {
var secretSettingsMap = parseRequestConfigTestConfig.createSecretSettingsMap();
secretSettingsMap.put("extra_key", "value");
var config = getRequestConfigMap(
parseRequestConfigTestConfig.createServiceSettingsMap(
parseRequestConfigTestConfig.targetTaskType(),
ConfigurationParseContext.REQUEST
),
parseRequestConfigTestConfig.createTaskSettingsMap(),
secretSettingsMap
);
var listener = new PlainActionFuture<Model>();
service.parseRequestConfig("id", parseRequestConfigTestConfig.targetTaskType(), config, listener);
var exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT));
assertThat(exception.getMessage(), containsString("Configuration contains settings [{extra_key=value}]"));
}
}
public void testInfer_ThrowsErrorWhenModelIsNotValid() throws IOException {
try (var service = testConfiguration.commonConfig().createService(threadPool, clientManager)) {
var listener = new PlainActionFuture<InferenceServiceResults>();
service.infer(
getInvalidModel("id", "service"),
null,
null,
null,
List.of(""),
false,
new HashMap<>(),
InputType.INTERNAL_SEARCH,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT));
assertThat(
exception.getMessage(),
is("The internal model was invalid, please delete the service [service] with id [id] and add it again.")
);
}
}
public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException {
Assume.assumeTrue(testConfiguration.updateModelConfiguration().isEnabled());
try (var service = testConfiguration.commonConfig().createService(threadPool, clientManager)) {
var exception = expectThrows(
ElasticsearchStatusException.class,
() -> service.updateModelWithEmbeddingDetails(getInvalidModel("id", "service"), randomNonNegativeInt())
);
assertThat(exception.getMessage(), containsString("Can't update embedding details for model"));
}
}
public void testUpdateModelWithEmbeddingDetails_NullSimilarityInOriginalModel() throws IOException {
Assume.assumeTrue(testConfiguration.updateModelConfiguration().isEnabled());
try (var service = testConfiguration.commonConfig().createService(threadPool, clientManager)) {
var embeddingSize = randomNonNegativeInt();
var model = testConfiguration.updateModelConfiguration().createEmbeddingModel(null);
Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize);
assertEquals(SimilarityMeasure.DOT_PRODUCT, updatedModel.getServiceSettings().similarity());
assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue());
}
}
public void testUpdateModelWithEmbeddingDetails_NonNullSimilarityInOriginalModel() throws IOException {
Assume.assumeTrue(testConfiguration.updateModelConfiguration().isEnabled());
try (var service = testConfiguration.commonConfig().createService(threadPool, clientManager)) {
var embeddingSize = randomNonNegativeInt();
var model = testConfiguration.updateModelConfiguration().createEmbeddingModel(SimilarityMeasure.COSINE);
Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize);
assertEquals(SimilarityMeasure.COSINE, updatedModel.getServiceSettings().similarity());
assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue());
}
}
// streaming tests
public void testSupportedStreamingTasks() throws Exception {
try (var service = testConfiguration.commonConfig().createService(threadPool, clientManager)) {
assertThat(service.supportedStreamingTasks(), is(testConfiguration.commonConfig().supportedStreamingTasks()));
assertFalse(service.canStream(TaskType.ANY));
}
}
}
|
AbstractInferenceServiceTests
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslOutputStream.java
|
{
"start": 1689,
"end": 6719
}
|
class ____ extends OutputStream {
private final OutputStream outStream;
// processed data ready to be written out
private byte[] saslToken;
private final SaslClient saslClient;
private final SaslServer saslServer;
// buffer holding one byte of incoming data
private final byte[] ibuffer = new byte[1];
private final boolean useWrap;
/**
* Constructs a SASLOutputStream from an OutputStream and a SaslServer <br>
* Note: if the specified OutputStream or SaslServer is null, a
* NullPointerException may be thrown later when they are used.
*
* @param outStream
* the OutputStream to be processed
* @param saslServer
* an initialized SaslServer object
*/
public SaslOutputStream(OutputStream outStream, SaslServer saslServer) {
this.saslServer = saslServer;
this.saslClient = null;
String qop = (String) saslServer.getNegotiatedProperty(Sasl.QOP);
this.useWrap = qop != null && !"auth".equalsIgnoreCase(qop);
if (useWrap) {
this.outStream = new BufferedOutputStream(outStream, 64*1024);
} else {
this.outStream = outStream;
}
}
/**
* Constructs a SASLOutputStream from an OutputStream and a SaslClient <br>
* Note: if the specified OutputStream or SaslClient is null, a
* NullPointerException may be thrown later when they are used.
*
* @param outStream
* the OutputStream to be processed
* @param saslClient
* an initialized SaslClient object
*/
public SaslOutputStream(OutputStream outStream, SaslClient saslClient) {
this.saslServer = null;
this.saslClient = saslClient;
String qop = (String) saslClient.getNegotiatedProperty(Sasl.QOP);
this.useWrap = qop != null && !"auth".equalsIgnoreCase(qop);
if (useWrap) {
this.outStream = new BufferedOutputStream(outStream, 64*1024);
} else {
this.outStream = outStream;
}
}
/**
* Disposes of any system resources or security-sensitive information Sasl
* might be using.
*
* @exception SaslException
* if a SASL error occurs.
*/
private void disposeSasl() throws SaslException {
if (saslClient != null) {
saslClient.dispose();
}
if (saslServer != null) {
saslServer.dispose();
}
}
/**
* Writes the specified byte to this output stream.
*
* @param b
* the <code>byte</code>.
* @exception IOException
* if an I/O error occurs.
*/
@Override
public void write(int b) throws IOException {
if (!useWrap) {
outStream.write(b);
return;
}
ibuffer[0] = (byte) b;
write(ibuffer, 0, 1);
}
/**
* Writes <code>b.length</code> bytes from the specified byte array to this
* output stream.
* <p>
* The <code>write</code> method of <code>SASLOutputStream</code> calls the
* <code>write</code> method of three arguments with the three arguments
* <code>b</code>, <code>0</code>, and <code>b.length</code>.
*
* @param b
* the data.
* @exception NullPointerException
* if <code>b</code> is null.
* @exception IOException
* if an I/O error occurs.
*/
@Override
public void write(byte[] b) throws IOException {
write(b, 0, b.length);
}
/**
* Writes <code>len</code> bytes from the specified byte array starting at
* offset <code>off</code> to this output stream.
*
* @param inBuf
* the data.
* @param off
* the start offset in the data.
* @param len
* the number of bytes to write.
* @exception IOException
* if an I/O error occurs.
*/
@Override
public void write(byte[] inBuf, int off, int len) throws IOException {
if (!useWrap) {
outStream.write(inBuf, off, len);
return;
}
try {
if (saslServer != null) { // using saslServer
saslToken = saslServer.wrap(inBuf, off, len);
} else { // using saslClient
saslToken = saslClient.wrap(inBuf, off, len);
}
} catch (SaslException se) {
try {
disposeSasl();
} catch (SaslException ignored) {
}
throw se;
}
if (saslToken != null) {
ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
DataOutputStream dout = new DataOutputStream(byteOut);
dout.writeInt(saslToken.length);
outStream.write(byteOut.toByteArray());
outStream.write(saslToken, 0, saslToken.length);
saslToken = null;
}
}
/**
* Flushes this output stream
*
* @exception IOException
* if an I/O error occurs.
*/
@Override
public void flush() throws IOException {
outStream.flush();
}
/**
* Closes this output stream and releases any system resources associated with
* this stream.
*
* @exception IOException
* if an I/O error occurs.
*/
@Override
public void close() throws IOException {
disposeSasl();
outStream.close();
}
}
|
SaslOutputStream
|
java
|
apache__camel
|
core/camel-util/src/main/java/org/apache/camel/util/concurrent/RejectableScheduledThreadPoolExecutor.java
|
{
"start": 1593,
"end": 1924
}
|
class ____ implements Runnable, Rejectable {
* @Override
* public void run() {
* // execute task
* }
* @Override
* public void reject() {
* // do something useful on rejection
* }
* }
* </pre></code>
* <p/>
* If the task does not implement {@link Rejectable}
|
RejectableTask
|
java
|
spring-projects__spring-framework
|
spring-tx/src/main/java/org/springframework/transaction/annotation/TransactionAnnotationParser.java
|
{
"start": 2123,
"end": 2626
}
|
class ____ method level; {@code true} otherwise. The default
* implementation returns {@code true}, leading to regular introspection.
* @since 5.2
*/
default boolean isCandidateClass(Class<?> targetClass) {
return true;
}
/**
* Parse the transaction attribute for the given method or class,
* based on an annotation type understood by this parser.
* <p>This essentially parses a known transaction annotation into Spring's metadata
* attribute class. Returns {@code null} if the method/
|
or
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRClientClusterFactory.java
|
{
"start": 1481,
"end": 3156
}
|
class ____ {
public static MiniMRClientCluster create(Class<?> caller, int noOfNMs,
Configuration conf) throws IOException {
return create(caller, caller.getSimpleName(), noOfNMs, conf);
}
public static MiniMRClientCluster create(Class<?> caller, String identifier,
int noOfNMs, Configuration conf) throws IOException {
if (conf == null) {
conf = new Configuration();
}
FileSystem fs = FileSystem.get(conf);
Path testRootDir = fs.makeQualified(
new Path("target", identifier + "-tmpDir"));
Path appJar = new Path(testRootDir, "MRAppJar.jar");
// Copy MRAppJar and make it private.
Path appMasterJar =
new Path(MiniMRYarnCluster.copyAppJarIntoTestDir(identifier));
fs.copyFromLocalFile(appMasterJar, appJar);
fs.setPermission(appJar, new FsPermission("744"));
Job job = Job.getInstance(conf);
job.addFileToClassPath(appJar);
Path callerJar = new Path(JarFinder.getJar(caller, identifier));
Path remoteCallerJar = new Path(testRootDir, callerJar.getName());
fs.copyFromLocalFile(callerJar, remoteCallerJar);
fs.setPermission(remoteCallerJar, new FsPermission("744"));
job.addFileToClassPath(remoteCallerJar);
MiniMRYarnCluster miniMRYarnCluster = new MiniMRYarnCluster(identifier,
noOfNMs);
job.getConfiguration().set("minimrclientcluster.caller.name",
identifier);
job.getConfiguration().setInt("minimrclientcluster.nodemanagers.number",
noOfNMs);
miniMRYarnCluster.init(job.getConfiguration());
miniMRYarnCluster.start();
return new MiniMRYarnClusterAdapter(miniMRYarnCluster);
}
}
|
MiniMRClientClusterFactory
|
java
|
elastic__elasticsearch
|
test/framework/src/test/java/org/elasticsearch/ingest/IngestDocumentMatcherTests.java
|
{
"start": 710,
"end": 3929
}
|
class ____ extends ESTestCase {
public void testDifferentMapData() {
Map<String, Object> sourceAndMetadata1 = new HashMap<>();
sourceAndMetadata1.put("foo", "bar");
IngestDocument document1 = TestIngestDocument.withDefaultVersion(sourceAndMetadata1);
IngestDocument document2 = TestIngestDocument.emptyIngestDocument();
assertThrowsOnComparision(document1, document2);
}
public void testDifferentLengthListData() {
String rootKey = "foo";
IngestDocument document1 = TestIngestDocument.withDefaultVersion(Map.of(rootKey, List.of("bar", "baz")));
IngestDocument document2 = TestIngestDocument.withDefaultVersion(Map.of(rootKey, List.of()));
assertThrowsOnComparision(document1, document2);
}
public void testDifferentNestedListFieldData() {
String rootKey = "foo";
IngestDocument document1 = TestIngestDocument.withDefaultVersion(Map.of(rootKey, List.of("bar", "baz")));
IngestDocument document2 = TestIngestDocument.withDefaultVersion(Map.of(rootKey, List.of("bar", "blub")));
assertThrowsOnComparision(document1, document2);
}
public void testDifferentNestedMapFieldData() {
String rootKey = "foo";
IngestDocument document1 = TestIngestDocument.withDefaultVersion(Map.of(rootKey, Map.of("bar", "baz")));
IngestDocument document2 = TestIngestDocument.withDefaultVersion(Map.of(rootKey, Map.of("bar", "blub")));
assertThrowsOnComparision(document1, document2);
}
public void testOnTypeConflict() {
String rootKey = "foo";
IngestDocument document1 = TestIngestDocument.withDefaultVersion(Map.of(rootKey, List.of("baz")));
IngestDocument document2 = TestIngestDocument.withDefaultVersion(Map.of(rootKey, Map.of("blub", "blab")));
assertThrowsOnComparision(document1, document2);
}
public void testNestedMapArrayEquivalence() {
IngestDocument ingestDocument = TestIngestDocument.emptyIngestDocument();
// Test that equality still works when the ingest document uses primitive arrays,
// since normal .equals() methods would not work for Maps containing these arrays.
byte[] numbers = new byte[] { 0, 1, 2 };
ingestDocument.setFieldValue("some.nested.array", numbers);
IngestDocument copy = new IngestDocument(ingestDocument);
byte[] copiedNumbers = copy.getFieldValue("some.nested.array", byte[].class);
assertArrayEquals(numbers, copiedNumbers);
assertNotEquals(numbers, copiedNumbers);
assertIngestDocument(ingestDocument, copy);
}
public void testNullsAreEqual() {
assertIngestDocument(null, null);
}
private static void assertThrowsOnComparision(IngestDocument document1, IngestDocument document2) {
expectThrows(AssertionError.class, () -> assertIngestDocument(document1, null));
expectThrows(AssertionError.class, () -> assertIngestDocument(null, document2));
expectThrows(AssertionError.class, () -> assertIngestDocument(document1, document2));
expectThrows(AssertionError.class, () -> assertIngestDocument(document2, document1));
}
}
|
IngestDocumentMatcherTests
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/JpaResultConverters.java
|
{
"start": 1250,
"end": 1479
}
|
class ____ {
/**
* {@code private} to prevent instantiation.
*/
private JpaResultConverters() {}
/**
* Converts the given {@link Blob} into a {@code byte[]}.
*
* @author Thomas Darimont
*/
public
|
JpaResultConverters
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/util/IntArrayList.java
|
{
"start": 983,
"end": 2477
}
|
class ____ {
private int size;
private int[] array;
public IntArrayList(final int capacity) {
this.size = 0;
this.array = new int[capacity];
}
public int size() {
return size;
}
public boolean add(final int number) {
grow(size + 1);
array[size++] = number;
return true;
}
public int removeLast() {
if (size == 0) {
throw new NoSuchElementException();
}
--size;
return array[size];
}
public void clear() {
size = 0;
}
public boolean isEmpty() {
return size == 0;
}
private void grow(final int length) {
if (length > array.length) {
final int newLength =
(int) Math.max(Math.min(2L * array.length, Integer.MAX_VALUE - 8), length);
final int[] t = new int[newLength];
System.arraycopy(array, 0, t, 0, size);
array = t;
}
}
public int[] toArray() {
return Arrays.copyOf(array, size);
}
public static final IntArrayList EMPTY =
new IntArrayList(0) {
@Override
public boolean add(int number) {
throw new UnsupportedOperationException();
}
@Override
public int removeLast() {
throw new UnsupportedOperationException();
}
;
};
}
|
IntArrayList
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirSatisfyStoragePolicyOp.java
|
{
"start": 1652,
"end": 5784
}
|
class ____ {
/**
* Private constructor for preventing FSDirSatisfyStoragePolicyOp object
* creation. Static-only class.
*/
private FSDirSatisfyStoragePolicyOp() {
}
/**
* Satisfy storage policy function which will add the entry to SPS call queue
* and will perform satisfaction async way.
*
* @param fsd
* fs directory
* @param bm
* block manager
* @param src
* source path
* @param logRetryCache
* whether to record RPC ids in editlog for retry cache rebuilding
* @return file status info
* @throws IOException
*/
static FileStatus satisfyStoragePolicy(FSDirectory fsd, BlockManager bm,
String src, boolean logRetryCache) throws IOException {
assert fsd.getFSNamesystem().hasWriteLock(RwLockMode.FS);
FSPermissionChecker pc = fsd.getPermissionChecker();
INodesInPath iip;
fsd.writeLock();
try {
// check operation permission.
iip = fsd.resolvePath(pc, src, DirOp.WRITE);
if (fsd.isPermissionEnabled()) {
fsd.checkPathAccess(pc, iip, FsAction.WRITE);
}
INode inode = FSDirectory.resolveLastINode(iip);
if (inode.isFile() && inode.asFile().numBlocks() == 0) {
if (NameNode.LOG.isInfoEnabled()) {
NameNode.LOG.info(
"Skipping satisfy storage policy on path:{} as "
+ "this file doesn't have any blocks!",
inode.getFullPathName());
}
} else if (inodeHasSatisfyXAttr(inode)) {
NameNode.LOG
.warn("Cannot request to call satisfy storage policy on path: "
+ inode.getFullPathName()
+ ", as this file/dir was already called for satisfying "
+ "storage policy.");
} else {
XAttr satisfyXAttr = XAttrHelper
.buildXAttr(XATTR_SATISFY_STORAGE_POLICY);
List<XAttr> xAttrs = Arrays.asList(satisfyXAttr);
List<XAttr> existingXAttrs = XAttrStorage.readINodeXAttrs(inode);
List<XAttr> newXAttrs = FSDirXAttrOp.setINodeXAttrs(fsd, existingXAttrs,
xAttrs, EnumSet.of(XAttrSetFlag.CREATE));
XAttrStorage.updateINodeXAttrs(inode, newXAttrs,
iip.getLatestSnapshotId());
fsd.getEditLog().logSetXAttrs(src, xAttrs, logRetryCache);
// Adding directory in the pending queue, so FileInodeIdCollector
// process directory child in batch and recursively
StoragePolicySatisfyManager spsManager =
fsd.getBlockManager().getSPSManager();
if (spsManager != null) {
spsManager.addPathId(inode.getId());
}
}
} finally {
fsd.writeUnlock();
}
return fsd.getAuditFileInfo(iip);
}
static boolean unprotectedSatisfyStoragePolicy(INode inode, FSDirectory fsd) {
if (inode.isFile() && inode.asFile().numBlocks() == 0) {
return false;
} else {
// Adding directory in the pending queue, so FileInodeIdCollector process
// directory child in batch and recursively
StoragePolicySatisfyManager spsManager =
fsd.getBlockManager().getSPSManager();
if (spsManager != null) {
spsManager.addPathId(inode.getId());
}
return true;
}
}
private static boolean inodeHasSatisfyXAttr(INode inode) {
final XAttrFeature f = inode.getXAttrFeature();
if (inode.isFile() && f != null
&& f.getXAttr(XATTR_SATISFY_STORAGE_POLICY) != null) {
return true;
}
return false;
}
static void removeSPSXattr(FSDirectory fsd, INode inode, XAttr spsXAttr)
throws IOException {
try {
fsd.writeLock();
List<XAttr> existingXAttrs = XAttrStorage.readINodeXAttrs(inode);
existingXAttrs.remove(spsXAttr);
XAttrStorage.updateINodeXAttrs(inode, existingXAttrs, INodesInPath
.fromINode(inode).getLatestSnapshotId());
List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1);
xAttrs.add(spsXAttr);
fsd.getEditLog().logRemoveXAttrs(inode.getFullPathName(), xAttrs, false);
} finally {
fsd.writeUnlock();
}
}
}
|
FSDirSatisfyStoragePolicyOp
|
java
|
apache__camel
|
tooling/camel-tooling-model/src/main/java/org/apache/camel/tooling/model/Strings.java
|
{
"start": 895,
"end": 4885
}
|
class ____ {
private Strings() {
//Helper class
}
/**
* Returns true if the given text is null or empty string or has <var>null</var> as the value
*/
public static boolean isNullOrEmpty(String text) {
return text == null || text.isEmpty() || "null".equals(text);
}
public static String cutLastZeroDigit(String version) {
String answer = version;
// cut last digit so its not 2.18.0 but 2.18
String[] parts = version.split("\\.");
if (parts.length == 3 && parts[2].equals("0")) {
answer = parts[0] + "." + parts[1];
}
return answer;
}
/**
* To wrap long camel cased texts by words.
*
* @param option the option which is camel cased.
* @param watermark a watermark to denote the size to cut after
* @param lineSep the new line to use when breaking into a new line
*/
public static String wrapCamelCaseWords(String option, int watermark, String lineSep) {
String text = option.replaceAll("(?=[A-Z][a-z])", " ");
text = wrapWords(text, "", lineSep, watermark, false);
text = Character.toUpperCase(text.charAt(0)) + text.substring(1);
if (text.startsWith(lineSep)) {
text = text.substring(lineSep.length());
}
if (text.endsWith(lineSep)) {
text = text.substring(0, text.length() - lineSep.length());
}
return text;
}
/**
* To wrap a big line by words.
*
* @param line the big line
* @param wordSep the word separator
* @param lineSep the new line to use when breaking into a new line
* @param watermark a watermark to denote the size to cut after
* @param wrapLongWords whether to wrap long words
*/
public static String wrapWords(String line, String wordSep, String lineSep, int watermark, boolean wrapLongWords) {
if (line == null) {
return null;
} else {
if (lineSep == null) {
lineSep = System.lineSeparator();
}
if (wordSep == null) {
wordSep = "";
}
if (watermark < 1) {
watermark = 1;
}
int inputLineLength = line.length();
int offset = 0;
StringBuilder sb = new StringBuilder(inputLineLength + 32);
int currentLength = 0;
while (offset < inputLineLength) {
if (line.charAt(offset) == ' ') {
offset++;
continue;
}
int next = line.indexOf(' ', offset);
if (next < 0) {
next = inputLineLength;
if (wrapLongWords && inputLineLength - offset > watermark) {
if (currentLength > 0) {
sb.append(wordSep);
currentLength += wordSep.length();
}
sb.append(line, offset, watermark - currentLength);
sb.append(lineSep);
offset += watermark - currentLength;
}
}
if (currentLength + (currentLength > 0 ? wordSep.length() : 0) + next - offset <= watermark) {
if (currentLength > 0) {
sb.append(wordSep);
currentLength += wordSep.length();
}
sb.append(line, offset, next);
currentLength += next - offset;
offset = next + 1;
} else {
sb.append(lineSep);
sb.append(line, offset, next);
currentLength = next - offset;
offset = next + 1;
}
}
return sb.toString();
}
}
/**
* Returns the base
|
Strings
|
java
|
quarkusio__quarkus
|
extensions/reactive-mssql-client/deployment/src/test/java/io/quarkus/reactive/mssql/client/SamePoolInstanceTest.java
|
{
"start": 291,
"end": 826
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withConfigurationResource("application-default-datasource.properties")
.overrideConfigKey("quarkus.devservices.enabled", "false");
@Inject
io.vertx.mutiny.sqlclient.Pool mutinyPool;
@Inject
io.vertx.sqlclient.Pool pool;
@Test
public void test() {
Assertions.assertThat(ClientProxy.unwrap(pool)).isSameAs(ClientProxy.unwrap(mutinyPool.getDelegate()));
}
}
|
SamePoolInstanceTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
|
{
"start": 1718,
"end": 6404
}
|
class ____ {
{
GenericTestUtils.setLogLevel(BlockPlacementPolicy.LOG, Level.TRACE);
}
protected NetworkTopology cluster;
protected DatanodeDescriptor dataNodes[];
protected static final int BLOCK_SIZE = 1024;
protected NameNode namenode;
protected DatanodeManager dnManager;
protected BlockPlacementPolicy replicator;
private BlockPlacementPolicy striptedPolicy;
protected final String filename = "/dummyfile.txt";
protected DatanodeStorageInfo[] storages;
protected String blockPlacementPolicy;
protected NamenodeProtocols nameNodeRpc = null;
void updateHeartbeatWithUsage(DatanodeDescriptor dn,
long capacity, long dfsUsed, long remaining, long blockPoolUsed,
long dnCacheCapacity, long dnCacheUsed, int xceiverCount,
int volFailures) {
dn.getStorageInfos()[0].setUtilizationForTesting(
capacity, dfsUsed, remaining, blockPoolUsed);
dnManager.getHeartbeatManager().updateHeartbeat(dn,
BlockManagerTestUtil.getStorageReportsForDatanode(dn),
dnCacheCapacity, dnCacheUsed, xceiverCount, volFailures, null);
}
abstract DatanodeDescriptor[] getDatanodeDescriptors(Configuration conf);
@BeforeEach
public void setupCluster() throws Exception {
Configuration conf = new HdfsConfiguration();
dataNodes = getDatanodeDescriptors(conf);
FileSystem.setDefaultUri(conf, "hdfs://localhost:0");
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "0.0.0.0:0");
File baseDir = PathUtils.getTestDir(TestReplicationPolicy.class);
conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY,
new File(baseDir, "name").getPath());
conf.set(DFSConfigKeys.DFS_BLOCK_REPLICATOR_CLASSNAME_KEY,
blockPlacementPolicy);
conf.setBoolean(
DFSConfigKeys.DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_READ_KEY, true);
conf.setBoolean(
DFSConfigKeys.DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_WRITE_KEY, true);
DFSTestUtil.formatNameNode(conf);
namenode = new NameNode(conf);
nameNodeRpc = namenode.getRpcServer();
final BlockManager bm = namenode.getNamesystem().getBlockManager();
replicator = bm.getBlockPlacementPolicy();
striptedPolicy = bm.getStriptedBlockPlacementPolicy();
cluster = bm.getDatanodeManager().getNetworkTopology();
dnManager = bm.getDatanodeManager();
// construct network topology
for (int i=0; i < dataNodes.length; i++) {
cluster.add(dataNodes[i]);
bm.getDatanodeManager().getHeartbeatManager().addDatanode(
dataNodes[i]);
bm.getDatanodeManager().getHeartbeatManager().updateDnStat(
dataNodes[i]);
}
updateHeartbeatWithUsage();
}
void updateHeartbeatWithUsage() {
for (int i=0; i < dataNodes.length; i++) {
updateHeartbeatWithUsage(dataNodes[i],
2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L,
2* HdfsServerConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L, 0L, 0L, 0, 0);
}
}
public BlockPlacementPolicy getStriptedPolicy() {
return striptedPolicy;
}
@AfterEach
public void tearDown() throws Exception {
namenode.stop();
}
boolean isOnSameRack(DatanodeStorageInfo left, DatanodeStorageInfo right) {
return isOnSameRack(left, right.getDatanodeDescriptor());
}
boolean isOnSameRack(DatanodeStorageInfo left, DatanodeDescriptor right) {
return cluster.isOnSameRack(left.getDatanodeDescriptor(), right);
}
DatanodeStorageInfo[] chooseTarget(int numOfReplicas) {
return chooseTarget(numOfReplicas, dataNodes[0]);
}
DatanodeStorageInfo[] chooseTarget(int numOfReplicas,
DatanodeDescriptor writer) {
return chooseTarget(numOfReplicas, writer,
new ArrayList<DatanodeStorageInfo>());
}
DatanodeStorageInfo[] chooseTarget(int numOfReplicas,
List<DatanodeStorageInfo> chosenNodes) {
return chooseTarget(numOfReplicas, dataNodes[0], chosenNodes);
}
DatanodeStorageInfo[] chooseTarget(int numOfReplicas,
DatanodeDescriptor writer, List<DatanodeStorageInfo> chosenNodes) {
return chooseTarget(numOfReplicas, writer, chosenNodes, null);
}
DatanodeStorageInfo[] chooseTarget(int numOfReplicas,
List<DatanodeStorageInfo> chosenNodes, Set<Node> excludedNodes) {
return chooseTarget(numOfReplicas, dataNodes[0], chosenNodes,
excludedNodes);
}
DatanodeStorageInfo[] chooseTarget(int numOfReplicas,
DatanodeDescriptor writer, List<DatanodeStorageInfo> chosenNodes,
Set<Node> excludedNodes) {
return replicator.chooseTarget(filename, numOfReplicas, writer,
chosenNodes, false, excludedNodes, BLOCK_SIZE,
TestBlockStoragePolicy.DEFAULT_STORAGE_POLICY, null);
}
}
|
BaseReplicationPolicyTest
|
java
|
apache__avro
|
lang/java/perf/src/main/java/org/apache/avro/perf/test/reflect/ReflectBigRecordTest.java
|
{
"start": 3257,
"end": 4369
}
|
class ____ extends BasicState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
final String jsonText = ReflectData.get().getSchema(BigRecord.class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
ReflectDatumWriter<BigRecord> writer = new ReflectDatumWriter<>(schema);
for (int i = 0; i < getBatchSize(); i++) {
final BigRecord r = new BigRecord(super.getRandom());
writer.write(r, encoder);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
}
|
TestStateDecode
|
java
|
spring-projects__spring-security
|
web/src/main/java/org/springframework/security/web/jackson/CookieDeserializer.java
|
{
"start": 1469,
"end": 2675
}
|
class ____ extends ValueDeserializer<Cookie> {
@Override
public Cookie deserialize(JsonParser jp, DeserializationContext ctxt) throws JacksonException {
JsonNode jsonNode = ctxt.readTree(jp);
Cookie cookie = new Cookie(readJsonNode(jsonNode, "name").stringValue(),
readJsonNode(jsonNode, "value").stringValue());
JsonNode domainNode = readJsonNode(jsonNode, "domain");
cookie.setDomain((domainNode.isMissingNode()) ? null : domainNode.stringValue());
cookie.setMaxAge(readJsonNode(jsonNode, "maxAge").asInt(-1));
cookie.setSecure(readJsonNode(jsonNode, "secure").asBoolean());
JsonNode pathNode = readJsonNode(jsonNode, "path");
cookie.setPath((pathNode.isMissingNode()) ? null : pathNode.stringValue());
JsonNode attributes = readJsonNode(jsonNode, "attributes");
cookie.setHttpOnly(readJsonNode(attributes, "HttpOnly") != null);
return cookie;
}
private JsonNode readJsonNode(JsonNode jsonNode, String field) {
return hasNonNullField(jsonNode, field) ? jsonNode.get(field) : MissingNode.getInstance();
}
private boolean hasNonNullField(JsonNode jsonNode, String field) {
return jsonNode.has(field) && !(jsonNode.get(field) instanceof NullNode);
}
}
|
CookieDeserializer
|
java
|
google__error-prone
|
docgen/src/main/java/com/google/errorprone/DocGenTool.java
|
{
"start": 1707,
"end": 1768
}
|
class ____ {
@Parameters(separators = "=")
static
|
DocGenTool
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/doris/parser/DorisStatementParser.java
|
{
"start": 380,
"end": 1145
}
|
class ____
extends StarRocksStatementParser {
public DorisStatementParser(SQLExprParser parser) {
super(parser);
dbType = DbType.doris;
}
public DorisStatementParser(String sql, SQLParserFeature... features) {
this(new DorisExprParser(sql, features));
}
@Override
public DorisSelectParser createSQLSelectParser() {
return new DorisSelectParser(this.exprParser, selectListCache);
}
public DorisCreateTableParser getSQLCreateTableParser() {
return new DorisCreateTableParser(this.exprParser);
}
@Override
protected void parseInsertOverwrite(SQLInsertInto insertStatement) {
insertStatement.setOverwrite(true);
accept(Token.TABLE);
}
}
|
DorisStatementParser
|
java
|
spring-projects__spring-boot
|
module/spring-boot-flyway/src/test/java/org/springframework/boot/flyway/autoconfigure/FlywayAutoConfigurationTests.java
|
{
"start": 42582,
"end": 43375
}
|
class ____ {
@Bean
FlywayMigrationInitializer customFlywayMigrationInitializer(Flyway flyway) {
return new FlywayMigrationInitializer(flyway);
}
@Bean
LocalContainerEntityManagerFactoryBean entityManagerFactoryBean(DataSource dataSource) {
LocalContainerEntityManagerFactoryBean localContainerEntityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean();
localContainerEntityManagerFactoryBean.setDataSource(dataSource);
localContainerEntityManagerFactoryBean.setJpaVendorAdapter(new HibernateJpaVendorAdapter());
localContainerEntityManagerFactoryBean.setJpaPropertyMap(configureJpaProperties());
return localContainerEntityManagerFactoryBean;
}
}
@Configuration(proxyBeanMethods = false)
static
|
CustomFlywayMigrationInitializerWithJpaConfiguration
|
java
|
quarkusio__quarkus
|
extensions/smallrye-fault-tolerance/deployment/src/test/java/io/quarkus/smallrye/faulttolerance/test/config/TestConfigExceptionB1.java
|
{
"start": 64,
"end": 125
}
|
class ____ extends TestConfigExceptionB {
}
|
TestConfigExceptionB1
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/core/publisher/SinkManyReplayProcessorTest.java
|
{
"start": 1568,
"end": 18603
}
|
class ____ {
@BeforeEach
public void virtualTime() {
VirtualTimeScheduler.getOrSet();
}
@AfterEach
public void teardownVirtualTime() {
VirtualTimeScheduler.reset();
}
@Test
public void currentSubscriberCount() {
Sinks.Many<Integer> sink = SinkManyReplayProcessor.create();
assertThat(sink.currentSubscriberCount()).isZero();
sink.asFlux().subscribe();
assertThat(sink.currentSubscriberCount()).isOne();
sink.asFlux().subscribe();
assertThat(sink.currentSubscriberCount()).isEqualTo(2);
}
@Test
public void unbounded() {
SinkManyReplayProcessor<Integer> rp = SinkManyReplayProcessor.create(16, true);
AssertSubscriber<Integer> ts = AssertSubscriber.create(0L);
rp.subscribe(ts);
rp.onNext(1);
rp.onNext(2);
rp.onNext(3);
rp.onComplete();
assertThat(rp.currentSubscriberCount()).as("has subscriber").isZero();
ts.assertNoValues();
ts.request(1);
ts.assertValues(1);
ts.request(2);
ts.assertValues(1, 2, 3)
.assertNoError()
.assertComplete();
}
@Test
public void bounded() {
SinkManyReplayProcessor<Integer> rp = SinkManyReplayProcessor.create(16, false);
AssertSubscriber<Integer> ts = AssertSubscriber.create(0L);
rp.subscribe(ts);
rp.onNext(1);
rp.onNext(2);
rp.onNext(3);
rp.onComplete();
assertThat(rp.currentSubscriberCount()).as("has subscriber").isZero();
ts.assertNoValues();
ts.request(1);
ts.assertValues(1);
ts.request(2);
ts.assertValues(1, 2, 3)
.assertNoError()
.assertComplete();
}
@Test
public void cancel() {
SinkManyReplayProcessor<Integer> rp = SinkManyReplayProcessor.create(16, false);
AssertSubscriber<Integer> ts = AssertSubscriber.create();
rp.subscribe(ts);
ts.cancel();
assertThat(rp.currentSubscriberCount()).as("has subscriber").isZero();
}
@Test
public void unboundedAfter() {
SinkManyReplayProcessor<Integer> rp = SinkManyReplayProcessor.create(16, true);
AssertSubscriber<Integer> ts = AssertSubscriber.create(0L);
rp.onNext(1);
rp.onNext(2);
rp.onNext(3);
rp.onComplete();
rp.subscribe(ts);
assertThat(rp.currentSubscriberCount()).as("has subscriber").isZero();
ts.assertNoValues();
ts.request(1);
ts.assertValues(1);
ts.request(2);
ts.assertValues(1, 2, 3)
.assertNoError()
.assertComplete();
}
@Test
public void boundedAfter() {
SinkManyReplayProcessor<Integer> rp = SinkManyReplayProcessor.create(16, false);
AssertSubscriber<Integer> ts = AssertSubscriber.create(0L);
rp.onNext(1);
rp.onNext(2);
rp.onNext(3);
rp.onComplete();
rp.subscribe(ts);
assertThat(rp.currentSubscriberCount()).as("has subscriber").isZero();
ts.assertNoValues();
ts.request(1);
ts.assertValues(1);
ts.request(2);
ts.assertValues(1, 2, 3)
.assertNoError()
.assertComplete();
}
@Test
public void unboundedLong() {
SinkManyReplayProcessor<Integer> rp = SinkManyReplayProcessor.create(16, true);
AssertSubscriber<Integer> ts = AssertSubscriber.create(0L);
for (int i = 0; i < 256; i++) {
rp.onNext(i);
}
rp.onComplete();
rp.subscribe(ts);
assertThat(rp.currentSubscriberCount()).as("has subscriber").isZero();
ts.assertNoValues();
ts.request(Long.MAX_VALUE);
ts.assertValueCount(256)
.assertNoError()
.assertComplete();
}
@Test
public void boundedLong() {
SinkManyReplayProcessor<Integer> rp = SinkManyReplayProcessor.create(16, false);
for (int i = 0; i < 256; i++) {
rp.onNext(i);
}
rp.onComplete();
StepVerifier.create(rp.hide())
.expectNextCount(16)
.verifyComplete();
}
@Test
public void boundedLongError() {
SinkManyReplayProcessor<Integer> rp = SinkManyReplayProcessor.create(16, false);
for (int i = 0; i < 256; i++) {
rp.onNext(i);
}
rp.onError(new Exception("test"));
StepVerifier.create(rp.hide())
.expectNextCount(16)
.verifyErrorMessage("test");
}
@Test
public void unboundedFused() {
SinkManyReplayProcessor<Integer> rp = SinkManyReplayProcessor.create(16, true);
for (int i = 0; i < 256; i++) {
rp.onNext(i);
}
rp.onComplete();
StepVerifier.create(rp)
.expectFusion(Fuseable.ASYNC)
.expectNextCount(256)
.verifyComplete();
}
@Test
public void unboundedFusedError() {
SinkManyReplayProcessor<Integer> rp = SinkManyReplayProcessor.create(16, true);
for (int i = 0; i < 256; i++) {
rp.onNext(i);
}
rp.onError(new Exception("test"));
StepVerifier.create(rp)
.expectFusion(Fuseable.ASYNC)
.expectNextCount(256)
.verifyErrorMessage("test");
}
@Test
public void boundedFused() {
SinkManyReplayProcessor<Integer> rp = SinkManyReplayProcessor.create(16, false);
for (int i = 0; i < 256; i++) {
rp.onNext(i);
}
rp.onComplete();
StepVerifier.create(rp)
.expectFusion(Fuseable.ASYNC)
.expectNextCount(256)
.verifyComplete();
}
@Test
public void boundedFusedError() {
SinkManyReplayProcessor<Integer> rp = SinkManyReplayProcessor.create(16, false);
for (int i = 0; i < 256; i++) {
rp.onNext(i);
}
rp.onError(new Exception("test"));
StepVerifier.create(rp)
.expectFusion(Fuseable.ASYNC)
.expectNextCount(16)
.verifyErrorMessage("test");
}
@Test
public void boundedFusedAfter() {
SinkManyReplayProcessor<Integer> rp = SinkManyReplayProcessor.create(16, false);
StepVerifier.create(rp)
.expectFusion(Fuseable.ASYNC)
.then(() -> {
for (int i = 0; i < 256; i++) {
rp.onNext(i);
}
rp.onComplete();
})
.expectNextCount(256)
.verifyComplete();
}
@Test
public void timed() throws Exception {
VirtualTimeScheduler.getOrSet();
SinkManyReplayProcessor<Integer> rp =
SinkManyReplayProcessor.createTimeout(Duration.ofSeconds(1));
for (int i = 0; i < 5; i++) {
rp.onNext(i);
}
VirtualTimeScheduler.get().advanceTimeBy(Duration.ofSeconds(2));
for (int i = 5; i < 10; i++) {
rp.onNext(i);
}
rp.onComplete();
StepVerifier.create(rp.hide())
.expectFusion(Fuseable.NONE)
.expectNext(5,6,7,8,9)
.verifyComplete();
}
@Test
public void timedError() throws Exception {
VirtualTimeScheduler.getOrSet();
SinkManyReplayProcessor<Integer> rp =
SinkManyReplayProcessor.createTimeout(Duration.ofSeconds(1));
for (int i = 0; i < 5; i++) {
rp.onNext(i);
}
VirtualTimeScheduler.get().advanceTimeBy(Duration.ofSeconds(2));
for (int i = 5; i < 10; i++) {
rp.onNext(i);
}
rp.onError(new Exception("test"));
StepVerifier.create(rp.hide())
.expectNext(5,6,7,8,9)
.verifyErrorMessage("test");
}
@Test
public void timedAfter() throws Exception {
SinkManyReplayProcessor<Integer> rp =
SinkManyReplayProcessor.createTimeout(Duration.ofSeconds(1));
StepVerifier.create(rp.hide())
.expectFusion(Fuseable.NONE)
.then(() -> {
for (int i = 0; i < 5; i++) {
rp.onNext(i);
}
VirtualTimeScheduler.get().advanceTimeBy(Duration.ofSeconds(2));
for (int i = 5; i < 10; i++) {
rp.onNext(i);
}
rp.onComplete();
})
.expectNext(0,1,2,3,4,5,6,7,8,9)
.verifyComplete();
}
@Test
public void timedFused() throws Exception {
VirtualTimeScheduler.getOrSet();
SinkManyReplayProcessor<Integer> rp =
SinkManyReplayProcessor.createTimeout(Duration.ofSeconds(1));
for (int i = 0; i < 5; i++) {
rp.onNext(i);
}
VirtualTimeScheduler.get().advanceTimeBy(Duration.ofSeconds(2));
for (int i = 5; i < 10; i++) {
rp.onNext(i);
}
rp.onComplete();
StepVerifier.create(rp)
.expectFusion(Fuseable.NONE)
.expectNext(5,6,7,8,9)
.verifyComplete();
}
@Test
public void timedFusedError() throws Exception {
VirtualTimeScheduler.getOrSet();
SinkManyReplayProcessor<Integer> rp =
SinkManyReplayProcessor.createTimeout(Duration.ofSeconds(1));
for (int i = 0; i < 5; i++) {
rp.onNext(i);
}
VirtualTimeScheduler.get().advanceTimeBy(Duration.ofSeconds(2));
for (int i = 5; i < 10; i++) {
rp.onNext(i);
}
rp.onError(new Exception("test"));
StepVerifier.create(rp)
.expectFusion(Fuseable.NONE)
.expectNext(5,6,7,8,9)
.verifyErrorMessage("test");
}
@Test
public void timedFusedAfter() throws Exception {
SinkManyReplayProcessor<Integer> rp =
SinkManyReplayProcessor.createTimeout(Duration.ofSeconds(1));
StepVerifier.create(rp)
.expectFusion(Fuseable.NONE)
.then(() -> {
for (int i = 0; i < 5; i++) {
rp.onNext(i);
}
VirtualTimeScheduler.get().advanceTimeBy(Duration.ofSeconds(2));
for (int i = 5; i < 10; i++) {
rp.onNext(i);
}
rp.onComplete();
})
.expectNext(0,1,2,3,4,5,6,7,8,9)
.verifyComplete();
}
@Test
public void timedAndBound() throws Exception {
SinkManyReplayProcessor<Integer> rp =
SinkManyReplayProcessor.createSizeAndTimeout(5, Duration.ofSeconds(1));
for (int i = 0; i < 10; i++) {
rp.onNext(i);
}
VirtualTimeScheduler.get().advanceTimeBy(Duration.ofSeconds(2));
for (int i = 10; i < 20; i++) {
rp.onNext(i);
}
rp.onComplete();
StepVerifier.create(rp.hide())
.expectFusion(Fuseable.NONE)
.expectNext(15,16,17,18,19)
.verifyComplete();
assertThat(rp.currentSubscriberCount()).as("has subscriber").isZero();
}
@Test
public void timedAndBoundError() throws Exception {
SinkManyReplayProcessor<Integer> rp =
SinkManyReplayProcessor.createSizeAndTimeout(5, Duration.ofSeconds(1));
for (int i = 0; i < 10; i++) {
rp.onNext(i);
}
VirtualTimeScheduler.get().advanceTimeBy(Duration.ofSeconds(2));
for (int i = 10; i < 20; i++) {
rp.onNext(i);
}
rp.onError(new Exception("test"));
StepVerifier.create(rp.hide())
.expectFusion(Fuseable.NONE)
.expectNext(15,16,17,18,19)
.verifyErrorMessage("test");
assertThat(rp.currentSubscriberCount()).as("has subscriber").isZero();
}
@Test
public void timedAndBoundAfter() throws Exception {
SinkManyReplayProcessor<Integer> rp =
SinkManyReplayProcessor.createSizeAndTimeout(5, Duration.ofSeconds(1));
StepVerifier.create(rp.hide())
.expectFusion(Fuseable.NONE)
.then(() -> {
for (int i = 0; i < 10; i++) {
rp.onNext(i);
}
VirtualTimeScheduler.get().advanceTimeBy(Duration.ofSeconds(2));
for (int i = 10; i < 20; i++) {
rp.onNext(i);
}
rp.onComplete();
})
.expectNextCount(20)
.verifyComplete();
assertThat(rp.currentSubscriberCount()).as("has subscriber").isZero();
}
@Test
public void timedAndBoundFused() throws Exception {
SinkManyReplayProcessor<Integer> rp =
SinkManyReplayProcessor.createSizeAndTimeout(5, Duration.ofSeconds(1));
for (int i = 0; i < 10; i++) {
rp.onNext(i);
}
VirtualTimeScheduler.get().advanceTimeBy(Duration.ofSeconds(2));
for (int i = 10; i < 20; i++) {
rp.onNext(i);
}
rp.onComplete();
StepVerifier.create(rp)
.expectFusion(Fuseable.ASYNC)
.expectNext(15,16,17,18,19)
.verifyComplete();
assertThat(rp.currentSubscriberCount()).as("has subscriber").isZero();
}
@Test
public void timedAndBoundFusedError() throws Exception {
SinkManyReplayProcessor<Integer> rp =
SinkManyReplayProcessor.createSizeAndTimeout(5, Duration.ofSeconds(1));
for (int i = 0; i < 10; i++) {
rp.onNext(i);
}
VirtualTimeScheduler.get().advanceTimeBy(Duration.ofSeconds(2));
for (int i = 10; i < 20; i++) {
rp.onNext(i);
}
rp.onError(new Exception("test"));
StepVerifier.create(rp)
.expectFusion(Fuseable.ASYNC)
.expectNext(15,16,17,18,19)
.verifyErrorMessage("test");
assertThat(rp.currentSubscriberCount()).as("has subscriber").isZero();
}
@Test
public void timedAndBoundedOnSubscribeAndState() {
testReplayProcessorState(SinkManyReplayProcessor.createSizeAndTimeout(1, Duration.ofSeconds(1)));
}
@Test
public void timedOnSubscribeAndState() {
testReplayProcessorState(SinkManyReplayProcessor.createTimeout(Duration.ofSeconds(1)));
}
@Test
public void unboundedOnSubscribeAndState() {
testReplayProcessorState(SinkManyReplayProcessor.create(1, true));
}
@Test
public void boundedOnSubscribeAndState() {
testReplayProcessorState(SinkManyReplayProcessor.cacheLast());
}
@SuppressWarnings("unchecked")
void testReplayProcessorState(SinkManyReplayProcessor<String> rp) {
TestLogger testLogger = new TestLogger();
LoggerUtils.enableCaptureWith(testLogger);
try {
Disposable d1 = rp.subscribe();
rp.subscribe();
SinkManyReplayProcessor.ReplayInner<String> s = ((SinkManyReplayProcessor.ReplayInner<String>) rp.inners()
.findFirst()
.get());
assertThat(d1).isEqualTo(s.actual());
assertThat(s.isEmpty()).isTrue();
assertThat(s.isCancelled()).isFalse();
assertThat(s.isCancelled()).isFalse();
assertThat(rp.getPrefetch()).isEqualTo(Integer.MAX_VALUE);
rp.tryEmitNext("test").orThrow();
rp.onComplete();
rp.onComplete();
Exception e = new RuntimeException("test");
rp.onError(e);
Assertions.assertThat(testLogger.getErrContent())
.contains("Operator called default onErrorDropped")
.contains(e.getMessage());
}
finally {
LoggerUtils.disableCapture();
}
}
@Test
public void failNegativeBufferSizeBounded() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> {
SinkManyReplayProcessor.create(-1);
});
}
@Test
public void failNegativeBufferBoundedAndTimed() {
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> {
SinkManyReplayProcessor.createSizeAndTimeout(-1, Duration.ofSeconds(1));
});
}
@Test
public void scanProcessor() {
SinkManyReplayProcessor<String> test = SinkManyReplayProcessor.create(16, false);
Subscription subscription = Operators.emptySubscription();
test.onSubscribe(subscription);
assertThat(test.scan(Scannable.Attr.PARENT)).isEqualTo(subscription);
assertThat(test.scan(Scannable.Attr.CAPACITY)).isEqualTo(16);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
assertThat(test.scan(Scannable.Attr.ERROR)).isNull();
test.onError(new IllegalStateException("boom"));
assertThat(test.scan(Scannable.Attr.ERROR)).hasMessage("boom");
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
}
@Test
public void scanProcessorUnboundedCapacity() {
SinkManyReplayProcessor<String> test = SinkManyReplayProcessor.create(16, true);
assertThat(test.scan(Scannable.Attr.CAPACITY)).isEqualTo(Integer.MAX_VALUE);
}
@Test
public void inners() {
Sinks.Many<Integer> sink = SinkManyReplayProcessor.create(1);
CoreSubscriber<Integer> notScannable = new BaseSubscriber<Integer>() {};
InnerConsumer<Integer> scannable = new LambdaSubscriber<>(null, null, null, null);
assertThat(sink.inners()).as("before subscriptions").isEmpty();
sink.asFlux().subscribe(notScannable);
sink.asFlux().subscribe(scannable);
assertThat(sink.inners())
.asInstanceOf(InstanceOfAssertFactories.LIST)
.as("after subscriptions")
.hasSize(2)
.extracting(l -> (Object) ((SinkManyReplayProcessor.ReplayInner<?>) l).actual)
.containsExactly(notScannable, scannable);
}
}
|
SinkManyReplayProcessorTest
|
java
|
quarkusio__quarkus
|
extensions/smallrye-graphql/deployment/src/main/java/io/quarkus/smallrye/graphql/deployment/OverridableIndex.java
|
{
"start": 6254,
"end": 6657
}
|
class
____ t.name().toString().compareTo(t1.name().toString());
}
return -1;
}
};
private Comparator<MethodInfo> methodInfoComparator = new Comparator<MethodInfo>() {
@Override
public int compare(MethodInfo t, MethodInfo t1) {
if (classInfoComparator.compare(t.declaringClass(), t1.declaringClass()) == 0) { // Same
|
return
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java
|
{
"start": 4925,
"end": 5530
}
|
class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory wkb;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory wkb) {
this.source = source;
this.wkb = wkb;
}
@Override
public ToStringFromGeoShapeEvaluator get(DriverContext context) {
return new ToStringFromGeoShapeEvaluator(source, wkb.get(context), context);
}
@Override
public String toString() {
return "ToStringFromGeoShapeEvaluator[" + "wkb=" + wkb + "]";
}
}
}
|
Factory
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/cache/QueryCacheIncompleteTest.java
|
{
"start": 2529,
"end": 2836
}
|
class ____ {
@Id
@GeneratedValue
private Long id;
@Column(nullable = false)
private int age;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
}
}
|
Admin
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
|
{
"start": 7173,
"end": 7525
}
|
class ____ extends LongParam {
/**
* Parameter name.
*/
public static final String NAME = HttpFSFileSystem.ACCESS_TIME_PARAM;
/**
* Constructor.
*/
public AccessTimeParam() {
super(NAME, -1l);
}
}
/**
* Class for block-size parameter.
*/
@InterfaceAudience.Private
public static
|
AccessTimeParam
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/orphan/one2one/OneToOneLazyNonOptionalOrphanRemovalTest.java
|
{
"start": 2960,
"end": 3956
}
|
class ____ {
@Id
private Integer id;
private String model;
// represents a bidirectional one-to-one
@OneToOne(orphanRemoval = true, fetch = FetchType.LAZY, optional = false)
private PaintColor paintColor;
// represents a unidirectional one-to-one
@OneToOne(orphanRemoval = true, fetch = FetchType.LAZY, optional = false)
private Engine engine;
Car() {
// Required by JPA
}
Car(Integer id, Engine engine, PaintColor paintColor) {
this.id = id;
this.engine = engine;
this.paintColor = paintColor;
paintColor.setCar( this );
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public PaintColor getPaintColor() {
return paintColor;
}
public void setPaintColor(PaintColor paintColor) {
this.paintColor = paintColor;
}
public Engine getEngine() {
return engine;
}
public void setEngine(Engine engine) {
this.engine = engine;
}
}
@Entity(name = "Engine")
public static
|
Car
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/builder/xml/XMLMapperEntityResolver.java
|
{
"start": 1032,
"end": 3129
}
|
class ____ implements EntityResolver {
private static final String IBATIS_CONFIG_SYSTEM = "ibatis-3-config.dtd";
private static final String IBATIS_MAPPER_SYSTEM = "ibatis-3-mapper.dtd";
private static final String MYBATIS_CONFIG_SYSTEM = "mybatis-3-config.dtd";
private static final String MYBATIS_MAPPER_SYSTEM = "mybatis-3-mapper.dtd";
private static final String MYBATIS_CONFIG_DTD = "org/apache/ibatis/builder/xml/mybatis-3-config.dtd";
private static final String MYBATIS_MAPPER_DTD = "org/apache/ibatis/builder/xml/mybatis-3-mapper.dtd";
/**
* Converts a public DTD into a local one.
*
* @param publicId
* The public id that is what comes after "PUBLIC"
* @param systemId
* The system id that is what comes after the public id.
*
* @return The InputSource for the DTD
*
* @throws org.xml.sax.SAXException
* If anything goes wrong
*/
@Override
public InputSource resolveEntity(String publicId, String systemId) throws SAXException {
try {
if (systemId != null) {
String lowerCaseSystemId = systemId.toLowerCase(Locale.ENGLISH);
if (lowerCaseSystemId.contains(MYBATIS_CONFIG_SYSTEM) || lowerCaseSystemId.contains(IBATIS_CONFIG_SYSTEM)) {
return getInputSource(MYBATIS_CONFIG_DTD, publicId, systemId);
}
if (lowerCaseSystemId.contains(MYBATIS_MAPPER_SYSTEM) || lowerCaseSystemId.contains(IBATIS_MAPPER_SYSTEM)) {
return getInputSource(MYBATIS_MAPPER_DTD, publicId, systemId);
}
}
return null;
} catch (Exception e) {
throw new SAXException(e.toString());
}
}
private InputSource getInputSource(String path, String publicId, String systemId) {
InputSource source = null;
if (path != null) {
try {
InputStream in = Resources.getResourceAsStream(path);
source = new InputSource(in);
source.setPublicId(publicId);
source.setSystemId(systemId);
} catch (IOException e) {
// ignore, null is ok
}
}
return source;
}
}
|
XMLMapperEntityResolver
|
java
|
apache__avro
|
lang/java/trevni/core/src/main/java/org/apache/trevni/Crc32Checksum.java
|
{
"start": 956,
"end": 1384
}
|
class ____ extends Checksum {
private CRC32 crc32 = new CRC32();
@Override
public int size() {
return 4;
}
@Override
public ByteBuffer compute(ByteBuffer data) {
crc32.reset();
crc32.update(data.array(), data.position(), data.remaining());
ByteBuffer result = ByteBuffer.allocate(size());
result.putInt((int) crc32.getValue());
((Buffer) result).flip();
return result;
}
}
|
Crc32Checksum
|
java
|
alibaba__nacos
|
console/src/main/java/com/alibaba/nacos/console/handler/HealthHandler.java
|
{
"start": 857,
"end": 1159
}
|
interface ____ {
/**
* Perform readiness check to determine if Nacos is ready to handle requests.
*
* @return readiness result
* @throws NacosException if an error occurs during readiness check
*/
Result<String> checkReadiness() throws NacosException;
}
|
HealthHandler
|
java
|
elastic__elasticsearch
|
modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerOverrideTokenFilterFactory.java
|
{
"start": 1038,
"end": 2825
}
|
class ____ extends AbstractTokenFilterFactory {
private final StemmerOverrideMap overrideMap;
StemmerOverrideTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) throws IOException {
super(name);
List<String> rules = Analysis.getWordList(env, settings, "rules");
if (rules == null) {
throw new IllegalArgumentException("stemmer override filter requires either `rules` or `rules_path` to be configured");
}
StemmerOverrideFilter.Builder builder = new StemmerOverrideFilter.Builder(false);
parseRules(rules, builder, "=>");
overrideMap = builder.build();
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new StemmerOverrideFilter(tokenStream, overrideMap);
}
static void parseRules(List<String> rules, StemmerOverrideFilter.Builder builder, String mappingSep) {
for (String rule : rules) {
String[] sides = rule.split(mappingSep, -1);
if (sides.length != 2) {
throw new RuntimeException("Invalid Keyword override Rule:" + rule);
}
String[] keys = sides[0].split(",", -1);
String override = sides[1].trim();
if (override.isEmpty() || override.indexOf(',') != -1) {
throw new RuntimeException("Invalid Keyword override Rule:" + rule);
}
for (String key : keys) {
String trimmedKey = key.trim();
if (trimmedKey.isEmpty()) {
throw new RuntimeException("Invalid Keyword override Rule:" + rule);
}
builder.add(trimmedKey, override);
}
}
}
}
|
StemmerOverrideTokenFilterFactory
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/types/parser/ByteParser.java
|
{
"start": 922,
"end": 5307
}
|
class ____ extends FieldParser<Byte> {
private byte result;
@Override
public int parseField(byte[] bytes, int startPos, int limit, byte[] delimiter, Byte reusable) {
if (startPos == limit) {
setErrorState(ParseErrorState.EMPTY_COLUMN);
return -1;
}
int val = 0;
boolean neg = false;
final int delimLimit = limit - delimiter.length + 1;
if (bytes[startPos] == '-') {
neg = true;
startPos++;
// check for empty field with only the sign
if (startPos == limit
|| (startPos < delimLimit && delimiterNext(bytes, startPos, delimiter))) {
setErrorState(ParseErrorState.NUMERIC_VALUE_ORPHAN_SIGN);
return -1;
}
}
for (int i = startPos; i < limit; i++) {
if (i < delimLimit && delimiterNext(bytes, i, delimiter)) {
if (i == startPos) {
setErrorState(ParseErrorState.EMPTY_COLUMN);
return -1;
}
this.result = (byte) (neg ? -val : val);
return i + delimiter.length;
}
if (bytes[i] < 48 || bytes[i] > 57) {
setErrorState(ParseErrorState.NUMERIC_VALUE_ILLEGAL_CHARACTER);
return -1;
}
val *= 10;
val += bytes[i] - 48;
if (val > Byte.MAX_VALUE && (!neg || val > -Byte.MIN_VALUE)) {
setErrorState(ParseErrorState.NUMERIC_VALUE_OVERFLOW_UNDERFLOW);
return -1;
}
}
this.result = (byte) (neg ? -val : val);
return limit;
}
@Override
public Byte createValue() {
return Byte.MIN_VALUE;
}
@Override
public Byte getLastResult() {
return Byte.valueOf(this.result);
}
/**
* Static utility to parse a field of type byte from a byte sequence that represents text
* characters (such as when read from a file stream).
*
* @param bytes The bytes containing the text data that should be parsed.
* @param startPos The offset to start the parsing.
* @param length The length of the byte sequence (counting from the offset).
* @return The parsed value.
* @throws NumberFormatException Thrown when the value cannot be parsed because the text
* represents not a correct number.
*/
public static final byte parseField(byte[] bytes, int startPos, int length) {
return parseField(bytes, startPos, length, (char) 0xffff);
}
/**
* Static utility to parse a field of type byte from a byte sequence that represents text
* characters (such as when read from a file stream).
*
* @param bytes The bytes containing the text data that should be parsed.
* @param startPos The offset to start the parsing.
* @param length The length of the byte sequence (counting from the offset).
* @param delimiter The delimiter that terminates the field.
* @return The parsed value.
* @throws NumberFormatException Thrown when the value cannot be parsed because the text
* represents not a correct number.
*/
public static final byte parseField(byte[] bytes, int startPos, int length, char delimiter) {
long val = 0;
boolean neg = false;
if (bytes[startPos] == delimiter) {
throw new NumberFormatException("Empty field.");
}
if (bytes[startPos] == '-') {
neg = true;
startPos++;
length--;
if (length == 0 || bytes[startPos] == delimiter) {
throw new NumberFormatException("Orphaned minus sign.");
}
}
for (; length > 0; startPos++, length--) {
if (bytes[startPos] == delimiter) {
return (byte) (neg ? -val : val);
}
if (bytes[startPos] < 48 || bytes[startPos] > 57) {
throw new NumberFormatException("Invalid character.");
}
val *= 10;
val += bytes[startPos] - 48;
if (val > Byte.MAX_VALUE && (!neg || val > -Byte.MIN_VALUE)) {
throw new NumberFormatException("Value overflow/underflow");
}
}
return (byte) (neg ? -val : val);
}
}
|
ByteParser
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java
|
{
"start": 872,
"end": 2819
}
|
class ____ extends AbstractWireSerializingTestCase<GetAutoFollowPatternAction.Response> {
@Override
protected Writeable.Reader<GetAutoFollowPatternAction.Response> instanceReader() {
return GetAutoFollowPatternAction.Response::new;
}
@Override
protected GetAutoFollowPatternAction.Response createTestInstance() {
int numPatterns = randomIntBetween(1, 8);
Map<String, AutoFollowPattern> patterns = Maps.newMapWithExpectedSize(numPatterns);
for (int i = 0; i < numPatterns; i++) {
AutoFollowPattern autoFollowPattern = new AutoFollowPattern(
"remote",
Collections.singletonList(randomAlphaOfLength(4)),
Collections.singletonList(randomAlphaOfLength(4)),
randomAlphaOfLength(4),
Settings.builder().put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), randomIntBetween(0, 4)).build(),
true,
randomIntBetween(0, Integer.MAX_VALUE),
randomIntBetween(0, Integer.MAX_VALUE),
randomIntBetween(0, Integer.MAX_VALUE),
randomIntBetween(0, Integer.MAX_VALUE),
ByteSizeValue.ofBytes(randomNonNegativeLong()),
ByteSizeValue.ofBytes(randomNonNegativeLong()),
randomIntBetween(0, Integer.MAX_VALUE),
ByteSizeValue.ofBytes(randomNonNegativeLong()),
TimeValue.timeValueMillis(500),
TimeValue.timeValueMillis(500)
);
patterns.put(randomAlphaOfLength(4), autoFollowPattern);
}
return new GetAutoFollowPatternAction.Response(patterns);
}
@Override
protected GetAutoFollowPatternAction.Response mutateInstance(GetAutoFollowPatternAction.Response instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
}
|
GetAutoFollowPatternResponseTests
|
java
|
apache__camel
|
components/camel-bindy/src/main/java/org/apache/camel/dataformat/bindy/BindyFixedLengthFactory.java
|
{
"start": 1769,
"end": 2033
}
|
class ____ allows to : Generate a model associated to a fixed length record, bind
* data from a record to the POJOs, export data of POJOs to a fixed length record and format data into String, Date,
* Double, ... according to the format/pattern defined
*/
public
|
who
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ABlockOutputStreamInterruption.java
|
{
"start": 16652,
"end": 18217
}
|
class ____
extends CountingProgressListener {
/** Event to trigger action. */
private final ProgressListenerEvent trigger;
/** Flag set when triggered. */
private final AtomicBoolean triggered = new AtomicBoolean(false);
/**
* Action to take on trigger.
*/
private final InvocationRaisingIOE action;
/**
* Create.
* @param thread thread to interrupt
* @param trigger event to trigger on
*/
private InterruptingProgressListener(
final Thread thread,
final ProgressListenerEvent trigger) {
this(trigger, thread::interrupt);
}
/**
* Create for any arbitrary action.
* @param trigger event to trigger on
* @param action action to take
*/
private InterruptingProgressListener(
final ProgressListenerEvent trigger,
final InvocationRaisingIOE action) {
this.trigger = trigger;
this.action = action;
}
@Override
public void progressChanged(final ProgressListenerEvent eventType,
final long transferredBytes) {
super.progressChanged(eventType, transferredBytes);
if (trigger == eventType && !triggered.getAndSet(true)) {
LOG.info("triggering action");
try {
action.apply();
} catch (IOException e) {
LOG.warn("action failed", e);
}
}
}
/**
* Assert that the trigger took place.
*/
private void assertTriggered() {
assertTrue(triggered.get(), "Not triggered");
}
}
}
|
InterruptingProgressListener
|
java
|
netty__netty
|
transport-classes-epoll/src/main/java/io/netty/channel/epoll/AbstractEpollStreamChannel.java
|
{
"start": 40932,
"end": 42136
}
|
class ____ {
private final AbstractEpollStreamChannel ch;
private final boolean autoRead;
private int len;
SpliceOutTask(AbstractEpollStreamChannel ch, int len, boolean autoRead) {
this.ch = ch;
this.len = len;
this.autoRead = autoRead;
}
public boolean spliceOut() throws Exception {
assert ch.eventLoop().inEventLoop();
try {
int splicedOut = Native.splice(ch.pipeIn.intValue(), -1, ch.socket.intValue(), -1, len);
len -= splicedOut;
if (len == 0) {
if (autoRead) {
// AutoRead was used and we spliced everything so start reading again
config().setAutoRead(true);
}
return true;
}
return false;
} catch (IOException e) {
if (autoRead) {
// AutoRead was used and we spliced everything so start reading again
config().setAutoRead(true);
}
throw e;
}
}
}
private final
|
SpliceOutTask
|
java
|
quarkusio__quarkus
|
independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/InjectionPointImpl.java
|
{
"start": 7806,
"end": 8663
}
|
class ____<X> extends AnnotatedBase implements AnnotatedMethod<X> {
private final Method method;
AnnotatedMethodImpl(Method method) {
super(method.getGenericReturnType(), null);
this.method = method;
}
@Override
public List<AnnotatedParameter<X>> getParameters() {
throw new UnsupportedOperationException();
}
@Override
public boolean isStatic() {
return Modifier.isStatic(method.getModifiers());
}
@SuppressWarnings("unchecked")
@Override
public AnnotatedType<X> getDeclaringType() {
return new AnnotatedTypeImpl<>((Class<X>) method.getDeclaringClass());
}
@Override
public Method getJavaMember() {
return method;
}
}
static
|
AnnotatedMethodImpl
|
java
|
quarkusio__quarkus
|
independent-projects/qute/core/src/main/java/io/quarkus/qute/EvaluatedParams.java
|
{
"start": 326,
"end": 8994
}
|
class ____ {
static final EvaluatedParams EMPTY = new EvaluatedParams(CompletedStage.ofVoid(), new Supplier<?>[0]);
/**
*
* @param context
* @return the evaluated params
*/
public static EvaluatedParams evaluate(EvalContext context) {
List<Expression> params = context.getParams();
if (params.isEmpty()) {
return EMPTY;
} else if (params.size() == 1) {
return new EvaluatedParams(context.evaluate(params.get(0)));
}
Supplier<?>[] allResults = new Supplier[params.size()];
List<CompletableFuture<?>> asyncResults = null;
int i = 0;
CompletedStage<?> failure = null;
Iterator<Expression> it = params.iterator();
while (it.hasNext()) {
Expression expression = it.next();
CompletionStage<?> result = context.evaluate(expression);
if (result instanceof CompletedStage) {
CompletedStage<?> completed = (CompletedStage<?>) result;
allResults[i++] = completed;
if (completed.isFailure()) {
failure = completed;
}
// No async computation needed
continue;
} else {
CompletableFuture<?> fu = result.toCompletableFuture();
if (asyncResults == null) {
asyncResults = new ArrayList<>();
}
asyncResults.add(fu);
allResults[i++] = Futures.toSupplier(fu);
}
}
CompletionStage<?> cs;
if (asyncResults == null) {
cs = failure != null ? failure : CompletedStage.ofVoid();
} else if (asyncResults.size() == 1) {
cs = asyncResults.get(0);
} else {
cs = CompletableFuture.allOf(asyncResults.toArray(new CompletableFuture[0]));
}
return new EvaluatedParams(cs, allResults);
}
public static EvaluatedParams evaluateMessageKey(EvalContext context) {
List<Expression> params = context.getParams();
if (params.isEmpty()) {
throw new IllegalArgumentException("No params to evaluate");
}
return new EvaluatedParams(context.evaluate(params.get(0)));
}
public static EvaluatedParams evaluateMessageParams(EvalContext context) {
List<Expression> params = context.getParams();
if (params.size() < 2) {
return EMPTY;
}
Supplier<?>[] allResults = new Supplier[params.size()];
List<CompletableFuture<?>> asyncResults = null;
int i = 0;
CompletedStage<?> failure = null;
Iterator<Expression> it = params.subList(1, params.size()).iterator();
while (it.hasNext()) {
CompletionStage<?> result = context.evaluate(it.next());
if (result instanceof CompletedStage) {
CompletedStage<?> completed = (CompletedStage<?>) result;
allResults[i++] = completed;
if (completed.isFailure()) {
failure = completed;
}
// No async computation needed
continue;
} else {
CompletableFuture<?> fu = result.toCompletableFuture();
if (asyncResults == null) {
asyncResults = new ArrayList<>();
}
asyncResults.add(fu);
allResults[i++] = Futures.toSupplier(fu);
}
}
CompletionStage<?> cs;
if (asyncResults == null) {
cs = failure != null ? failure : CompletedStage.ofVoid();
} else if (asyncResults.size() == 1) {
cs = asyncResults.get(0);
} else {
cs = CompletableFuture.allOf(asyncResults.toArray(new CompletableFuture[0]));
}
return new EvaluatedParams(cs, allResults);
}
public final CompletionStage<?> stage;
private final Supplier<?>[] results;
EvaluatedParams(CompletionStage<?> stage) {
this.stage = stage;
if (stage instanceof CompletedStage) {
this.results = new Supplier[] { (CompletedStage<?>) stage };
} else {
this.results = new Supplier[] { Futures.toSupplier(stage.toCompletableFuture()) };
}
}
EvaluatedParams(CompletionStage<?> stage, Supplier<?>[] results) {
this.stage = stage;
this.results = results;
}
public Object getResult(int index) throws InterruptedException, ExecutionException {
return results[index].get();
}
/**
*
* @param varargs
* @param types
* @return {@code true} if the parameter types match the type of the evaluated params
* @throws InterruptedException
* @throws ExecutionException
*/
public boolean parameterTypesMatch(boolean varargs, Class<?>[] types) throws InterruptedException, ExecutionException {
// Check the number of parameters and replace the last param type with component type if needed
Class<?> componentType = null;
if (types.length == results.length) {
if (varargs) {
componentType = boxType(types[types.length - 1].getComponentType());
}
} else {
if (varargs) {
int diff = types.length - results.length;
if (diff > 1) {
return false;
} else if (diff < 1) {
componentType = boxType(types[types.length - 1].getComponentType());
}
// if diff == 1 then vargs may be empty and we need to compare the result types
} else {
return false;
}
}
int i = 0;
Class<?> paramType = boxType(types[i]);
while (i < results.length) {
Object result = getResult(i);
if (result != null) {
Class<?> resultClass = boxType(result.getClass());
if (!paramType.isAssignableFrom(resultClass)
// For varargs we also try to match the component type
&& (componentType == null
|| i < (types.length - 1)
|| !componentType.isAssignableFrom(resultClass))) {
return false;
}
}
if (types.length > ++i) {
paramType = boxType(types[i]);
}
}
return true;
}
public Object getVarargsResults(int numberOfParameters, Class<?> componentType)
throws InterruptedException, ExecutionException {
// For varargs we want to skip all previous args
int skip = numberOfParameters - 1;
if (skip < 0 || skip >= results.length) {
return Array.newInstance(componentType, 0);
}
Object result = null;
int capacity = results.length - skip;
if (numberOfParameters == results.length) {
// If there is exactly one non-skipped argument
// test if it's not a matching array
result = getResult(skip);
Class<?> resultClass = result.getClass();
if (resultClass.isArray() && resultClass.getComponentType().equals(componentType)) {
return result;
}
skip++;
}
Object array = Array.newInstance(componentType, capacity);
int idx = 0;
if (result != null) {
Array.set(array, idx++, result);
}
for (int i = skip; i < results.length; i++) {
result = getResult(i);
Array.set(array, idx++, result);
}
return array;
}
private static Class<?> boxType(Class<?> type) {
if (!type.isPrimitive()) {
return type;
} else if (type.equals(Boolean.TYPE)) {
return Boolean.class;
} else if (type.equals(Character.TYPE)) {
return Character.class;
} else if (type.equals(Byte.TYPE)) {
return Byte.class;
} else if (type.equals(Short.TYPE)) {
return Short.class;
} else if (type.equals(Integer.TYPE)) {
return Integer.class;
} else if (type.equals(Long.TYPE)) {
return Long.class;
} else if (type.equals(Float.TYPE)) {
return Float.class;
} else if (type.equals(Double.TYPE)) {
return Double.class;
} else if (type.equals(Void.TYPE)) {
return Void.class;
} else {
throw new IllegalArgumentException();
}
}
}
|
EvaluatedParams
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
|
{
"start": 157107,
"end": 162187
}
|
class ____ implements SnapshotTask {
private final Snapshot snapshot;
private final ShardId shardId;
private final RepositoryShardId repoShardId;
private final ShardSnapshotStatus updatedState;
private final ActionListener<ShardSnapshotUpdateResult> listener;
ShardSnapshotUpdate(
Snapshot snapshot,
ShardId shardId,
RepositoryShardId repoShardId,
ShardSnapshotStatus updatedState,
ActionListener<ShardSnapshotUpdateResult> listener
) {
assert shardId != null ^ repoShardId != null;
this.snapshot = snapshot;
this.shardId = shardId;
this.repoShardId = repoShardId;
this.updatedState = updatedState;
this.listener = listener;
}
public boolean isClone() {
return repoShardId != null;
}
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if ((other instanceof ShardSnapshotUpdate) == false) {
return false;
}
final ShardSnapshotUpdate that = (ShardSnapshotUpdate) other;
return this.snapshot.equals(that.snapshot)
&& Objects.equals(this.shardId, that.shardId)
&& Objects.equals(this.repoShardId, that.repoShardId)
&& this.updatedState == that.updatedState;
}
@Override
public int hashCode() {
return Objects.hash(snapshot, shardId, updatedState, repoShardId);
}
@Override
public String toString() {
return "ShardSnapshotUpdate{"
+ "snapshot="
+ snapshot
+ ", shardId="
+ shardId
+ ", repoShardId="
+ repoShardId
+ ", updatedState="
+ updatedState
+ '}';
}
}
public void createAndSubmitRequestToUpdateSnapshotState(
Snapshot snapshot,
ShardId shardId,
RepositoryShardId repoShardId,
ShardSnapshotStatus updatedState,
ActionListener<Void> listener
) {
var update = new ShardSnapshotUpdate(
snapshot,
shardId,
repoShardId,
updatedState,
listener.delegateFailure((delegate, result) -> delegate.onResponse(null))
);
logger.trace("received updated snapshot restore state [{}]", update);
masterServiceTaskQueue.submitTask("update snapshot state", update, null);
}
/**
* Maybe kick off new shard clone operations for all repositories from all projects
*/
private void startExecutableClones(SnapshotsInProgress snapshotsInProgress) {
for (List<SnapshotsInProgress.Entry> entries : snapshotsInProgress.entriesByRepo()) {
startExecutableClones(entries);
}
}
/**
* Maybe kick off new shard clone operations for all repositories of the specified project
*/
private void startExecutableClones(SnapshotsInProgress snapshotsInProgress, ProjectId projectId) {
for (List<SnapshotsInProgress.Entry> entries : snapshotsInProgress.entriesByRepo(projectId)) {
startExecutableClones(entries);
}
}
/**
* Maybe kick off new shard clone operations for the single specified project repository
*/
private void startExecutableClones(SnapshotsInProgress snapshotsInProgress, ProjectRepo projectRepo) {
startExecutableClones(snapshotsInProgress.forRepo(Objects.requireNonNull(projectRepo)));
}
private void startExecutableClones(List<SnapshotsInProgress.Entry> entries) {
for (SnapshotsInProgress.Entry entry : entries) {
if (entry.isClone() && entry.state() == SnapshotsInProgress.State.STARTED) {
// this is a clone, see if new work is ready
for (Map.Entry<RepositoryShardId, ShardSnapshotStatus> clone : entry.shardSnapshotStatusByRepoShardId().entrySet()) {
if (clone.getValue().state() == ShardState.INIT) {
runReadyClone(
entry.snapshot(),
entry.source(),
clone.getValue(),
clone.getKey(),
repositoriesService.repository(entry.projectId(), entry.repository())
);
}
}
}
}
}
/**
* Cluster state update task that removes all {@link SnapshotsInProgress.Entry} and {@link SnapshotDeletionsInProgress.Entry} for a
* given repository from the cluster state and afterwards fails all relevant listeners in {@link #snapshotCompletionListeners} and
* {@link #snapshotDeletionListeners}.
*/
private final
|
ShardSnapshotUpdate
|
java
|
apache__flink
|
flink-queryable-state/flink-queryable-state-runtime/src/test/java/org/apache/flink/queryablestate/itcases/AbstractQueryableStateTestBase.java
|
{
"start": 52498,
"end": 53352
}
|
class ____
implements AggregateFunction<Tuple2<Integer, Long>, String, String> {
private static final long serialVersionUID = -6249227626701264599L;
@Override
public String createAccumulator() {
return "0";
}
@Override
public String add(Tuple2<Integer, Long> value, String accumulator) {
long acc = Long.valueOf(accumulator);
acc += value.f1;
return Long.toString(acc);
}
@Override
public String getResult(String accumulator) {
return accumulator;
}
@Override
public String merge(String a, String b) {
return Long.toString(Long.valueOf(a) + Long.valueOf(b));
}
}
/** Test {@link ReduceFunction} summing up its two arguments. */
protected static
|
SumAggr
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java
|
{
"start": 6681,
"end": 7952
}
|
class ____ extends BaseTasksResponse implements Writeable, ToXContentObject {
private final boolean closed;
public Response(boolean closed) {
super(null, null);
this.closed = closed;
}
public Response(StreamInput in) throws IOException {
super(in);
closed = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(closed);
}
public boolean isClosed() {
return closed;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("closed", closed);
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Response response = (Response) o;
return closed == response.closed;
}
@Override
public int hashCode() {
return Objects.hash(closed);
}
}
}
|
Response
|
java
|
apache__rocketmq
|
broker/src/main/java/org/apache/rocketmq/broker/processor/ConsumerManageProcessor.java
|
{
"start": 2837,
"end": 18441
}
|
class ____ implements NettyRequestProcessor {
private static final Logger LOGGER = LoggerFactory.getLogger(LoggerName.BROKER_LOGGER_NAME);
private final BrokerController brokerController;
public ConsumerManageProcessor(final BrokerController brokerController) {
this.brokerController = brokerController;
}
@Override
public RemotingCommand processRequest(ChannelHandlerContext ctx, RemotingCommand request)
throws RemotingCommandException {
switch (request.getCode()) {
case RequestCode.GET_CONSUMER_LIST_BY_GROUP:
return this.getConsumerListByGroup(ctx, request);
case RequestCode.UPDATE_CONSUMER_OFFSET:
return this.updateConsumerOffset(ctx, request);
case RequestCode.QUERY_CONSUMER_OFFSET:
return this.queryConsumerOffset(ctx, request);
default:
break;
}
return null;
}
@Override
public boolean rejectRequest() {
return false;
}
public RemotingCommand getConsumerListByGroup(ChannelHandlerContext ctx, RemotingCommand request)
throws RemotingCommandException {
final RemotingCommand response =
RemotingCommand.createResponseCommand(GetConsumerListByGroupResponseHeader.class);
final GetConsumerListByGroupRequestHeader requestHeader =
(GetConsumerListByGroupRequestHeader) request
.decodeCommandCustomHeader(GetConsumerListByGroupRequestHeader.class);
ConsumerGroupInfo consumerGroupInfo =
this.brokerController.getConsumerManager().getConsumerGroupInfo(
requestHeader.getConsumerGroup());
if (consumerGroupInfo != null) {
List<String> clientIds = consumerGroupInfo.getAllClientId();
if (!clientIds.isEmpty()) {
GetConsumerListByGroupResponseBody body = new GetConsumerListByGroupResponseBody();
body.setConsumerIdList(clientIds);
response.setBody(body.encode());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
} else {
LOGGER.warn("getAllClientId failed, {} {}", requestHeader.getConsumerGroup(),
RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
}
} else {
LOGGER.warn("getConsumerGroupInfo failed, {} {}", requestHeader.getConsumerGroup(),
RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
}
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("no consumer for this group, " + requestHeader.getConsumerGroup());
return response;
}
public RemotingCommand rewriteRequestForStaticTopic(final UpdateConsumerOffsetRequestHeader requestHeader,
final TopicQueueMappingContext mappingContext) {
try {
if (mappingContext.getMappingDetail() == null) {
return null;
}
TopicQueueMappingDetail mappingDetail = mappingContext.getMappingDetail();
if (!mappingContext.isLeader()) {
return buildErrorResponse(ResponseCode.NOT_LEADER_FOR_QUEUE, String.format("%s-%d does not exit in request process of current broker %s", requestHeader.getTopic(), requestHeader.getQueueId(), mappingDetail.getBname()));
}
Long globalOffset = requestHeader.getCommitOffset();
LogicQueueMappingItem mappingItem = TopicQueueMappingUtils.findLogicQueueMappingItem(mappingContext.getMappingItemList(), globalOffset, true);
requestHeader.setQueueId(mappingItem.getQueueId());
requestHeader.setLo(false);
requestHeader.setBrokerName(mappingItem.getBname());
requestHeader.setCommitOffset(mappingItem.computePhysicalQueueOffset(globalOffset));
//leader, let it go, do not need to rewrite the response
if (mappingDetail.getBname().equals(mappingItem.getBname())) {
return null;
}
RpcRequest rpcRequest = new RpcRequest(RequestCode.UPDATE_CONSUMER_OFFSET, requestHeader, null);
RpcResponse rpcResponse = this.brokerController.getBrokerOuterAPI().getRpcClient().invoke(rpcRequest, this.brokerController.getBrokerConfig().getForwardTimeout()).get();
if (rpcResponse.getException() != null) {
throw rpcResponse.getException();
}
return RpcClientUtils.createCommandForRpcResponse(rpcResponse);
} catch (Throwable t) {
return buildErrorResponse(ResponseCode.SYSTEM_ERROR, t.getMessage());
}
}
private RemotingCommand updateConsumerOffset(ChannelHandlerContext ctx, RemotingCommand request)
throws RemotingCommandException {
final RemotingCommand response =
RemotingCommand.createResponseCommand(UpdateConsumerOffsetResponseHeader.class);
final UpdateConsumerOffsetRequestHeader requestHeader =
(UpdateConsumerOffsetRequestHeader)
request.decodeCommandCustomHeader(UpdateConsumerOffsetRequestHeader.class);
TopicQueueMappingContext mappingContext =
this.brokerController.getTopicQueueMappingManager().buildTopicQueueMappingContext(requestHeader);
RemotingCommand rewriteResult = rewriteRequestForStaticTopic(requestHeader, mappingContext);
if (rewriteResult != null) {
return rewriteResult;
}
String topic = requestHeader.getTopic();
String group = requestHeader.getConsumerGroup();
Integer queueId = requestHeader.getQueueId();
Long offset = requestHeader.getCommitOffset();
if (!this.brokerController.getSubscriptionGroupManager().containsSubscriptionGroup(group)) {
response.setCode(ResponseCode.SUBSCRIPTION_GROUP_NOT_EXIST);
response.setRemark("Group " + group + " not exist!");
return response;
}
if (!this.brokerController.getTopicConfigManager().containsTopic(requestHeader.getTopic())) {
response.setCode(ResponseCode.TOPIC_NOT_EXIST);
response.setRemark("Topic " + topic + " not exist!");
return response;
}
if (queueId == null) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark("QueueId is null, topic is " + topic);
return response;
}
if (offset == null) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark("Offset is null, topic is " + topic);
return response;
}
ConsumerOffsetManager consumerOffsetManager = brokerController.getConsumerOffsetManager();
if (this.brokerController.getBrokerConfig().isUseServerSideResetOffset()) {
// Note, ignoring this update offset request
if (consumerOffsetManager.hasOffsetReset(topic, group, queueId)) {
response.setCode(ResponseCode.SUCCESS);
response.setRemark("Offset has been previously reset");
LOGGER.info("Update consumer offset is rejected because of previous offset-reset. Group={}, " +
"Topic={}, QueueId={}, Offset={}", group, topic, queueId, offset);
return response;
}
}
this.brokerController.getConsumerOffsetManager().commitOffset(
RemotingHelper.parseChannelRemoteAddr(ctx.channel()), group, topic, queueId, offset);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
public RemotingCommand rewriteRequestForStaticTopic(QueryConsumerOffsetRequestHeader requestHeader,
TopicQueueMappingContext mappingContext) {
try {
if (mappingContext.getMappingDetail() == null) {
return null;
}
TopicQueueMappingDetail mappingDetail = mappingContext.getMappingDetail();
if (!mappingContext.isLeader()) {
return buildErrorResponse(ResponseCode.NOT_LEADER_FOR_QUEUE, String.format("%s-%d does not exit in request process of current broker %s", requestHeader.getTopic(), requestHeader.getQueueId(), mappingDetail.getBname()));
}
List<LogicQueueMappingItem> mappingItemList = mappingContext.getMappingItemList();
if (mappingItemList.size() == 1
&& mappingItemList.get(0).getLogicOffset() == 0) {
//as physical, just let it go
mappingContext.setCurrentItem(mappingItemList.get(0));
requestHeader.setQueueId(mappingContext.getLeaderItem().getQueueId());
return null;
}
//double read check
List<LogicQueueMappingItem> itemList = mappingContext.getMappingItemList();
//by default, it is -1
long offset = -1;
//double read, first from leader, then from second leader
for (int i = itemList.size() - 1; i >= 0; i--) {
LogicQueueMappingItem mappingItem = itemList.get(i);
mappingContext.setCurrentItem(mappingItem);
if (mappingItem.getBname().equals(mappingDetail.getBname())) {
offset = this.brokerController.getConsumerOffsetManager().queryOffset(requestHeader.getConsumerGroup(), requestHeader.getTopic(), mappingItem.getQueueId());
if (offset >= 0) {
break;
} else {
//not found
continue;
}
} else {
//maybe we need to reconstruct an object
requestHeader.setBrokerName(mappingItem.getBname());
requestHeader.setQueueId(mappingItem.getQueueId());
requestHeader.setLo(false);
requestHeader.setSetZeroIfNotFound(false);
RpcRequest rpcRequest = new RpcRequest(RequestCode.QUERY_CONSUMER_OFFSET, requestHeader, null);
RpcResponse rpcResponse = this.brokerController.getBrokerOuterAPI().getRpcClient().invoke(rpcRequest, this.brokerController.getBrokerConfig().getForwardTimeout()).get();
if (rpcResponse.getException() != null) {
throw rpcResponse.getException();
}
if (rpcResponse.getCode() == ResponseCode.SUCCESS) {
offset = ((QueryConsumerOffsetResponseHeader) rpcResponse.getHeader()).getOffset();
break;
} else if (rpcResponse.getCode() == ResponseCode.QUERY_NOT_FOUND) {
continue;
} else {
//this should not happen
throw new RuntimeException("Unknown response code " + rpcResponse.getCode());
}
}
}
final RemotingCommand response = RemotingCommand.createResponseCommand(QueryConsumerOffsetResponseHeader.class);
final QueryConsumerOffsetResponseHeader responseHeader = (QueryConsumerOffsetResponseHeader) response.readCustomHeader();
if (offset >= 0) {
responseHeader.setOffset(offset);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
} else {
response.setCode(ResponseCode.QUERY_NOT_FOUND);
response.setRemark("Not found, maybe this group consumer boot first");
}
RemotingCommand rewriteResponseResult = rewriteResponseForStaticTopic(requestHeader, responseHeader, mappingContext, response.getCode());
if (rewriteResponseResult != null) {
return rewriteResponseResult;
}
return response;
} catch (Throwable t) {
return buildErrorResponse(ResponseCode.SYSTEM_ERROR, t.getMessage());
}
}
public RemotingCommand rewriteResponseForStaticTopic(final QueryConsumerOffsetRequestHeader requestHeader,
final QueryConsumerOffsetResponseHeader responseHeader,
final TopicQueueMappingContext mappingContext, final int code) {
try {
if (mappingContext.getMappingDetail() == null) {
return null;
}
if (code != ResponseCode.SUCCESS) {
return null;
}
LogicQueueMappingItem item = mappingContext.getCurrentItem();
responseHeader.setOffset(item.computeStaticQueueOffsetStrictly(responseHeader.getOffset()));
//no need to construct new object
return null;
} catch (Throwable t) {
return buildErrorResponse(ResponseCode.SYSTEM_ERROR, t.getMessage());
}
}
private RemotingCommand queryConsumerOffset(ChannelHandlerContext ctx, RemotingCommand request)
throws RemotingCommandException {
final RemotingCommand response =
RemotingCommand.createResponseCommand(QueryConsumerOffsetResponseHeader.class);
final QueryConsumerOffsetResponseHeader responseHeader =
(QueryConsumerOffsetResponseHeader) response.readCustomHeader();
final QueryConsumerOffsetRequestHeader requestHeader =
(QueryConsumerOffsetRequestHeader) request
.decodeCommandCustomHeader(QueryConsumerOffsetRequestHeader.class);
TopicQueueMappingContext mappingContext = this.brokerController.getTopicQueueMappingManager().buildTopicQueueMappingContext(requestHeader);
RemotingCommand rewriteResult = rewriteRequestForStaticTopic(requestHeader, mappingContext);
if (rewriteResult != null) {
return rewriteResult;
}
long offset =
this.brokerController.getConsumerOffsetManager().queryOffset(
requestHeader.getConsumerGroup(), requestHeader.getTopic(), requestHeader.getQueueId());
if (offset >= 0) {
responseHeader.setOffset(offset);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
} else {
long minOffset =
this.brokerController.getMessageStore().getMinOffsetInQueue(requestHeader.getTopic(),
requestHeader.getQueueId());
if (requestHeader.getSetZeroIfNotFound() != null && Boolean.FALSE.equals(requestHeader.getSetZeroIfNotFound())) {
response.setCode(ResponseCode.QUERY_NOT_FOUND);
response.setRemark("Not found, do not set to zero, maybe this group boot first");
} else if (minOffset <= 0
&& this.brokerController.getMessageStore().checkInMemByConsumeOffset(
requestHeader.getTopic(), requestHeader.getQueueId(), 0, 1)) {
responseHeader.setOffset(0L);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
} else {
response.setCode(ResponseCode.QUERY_NOT_FOUND);
response.setRemark("Not found, V3_0_6_SNAPSHOT maybe this group consumer boot first");
}
}
RemotingCommand rewriteResponseResult = rewriteResponseForStaticTopic(requestHeader, responseHeader, mappingContext, response.getCode());
if (rewriteResponseResult != null) {
return rewriteResponseResult;
}
return response;
}
}
|
ConsumerManageProcessor
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/creators/DelegatingCreatorsTest.java
|
{
"start": 1320,
"end": 1600
}
|
class ____
{
protected String name;
protected int age;
@JsonCreator(mode = JsonCreator.Mode.DELEGATING)
public CtorBean711(@JacksonInject String n, int a)
{
name = n;
age = a;
}
}
static
|
CtorBean711
|
java
|
apache__maven
|
impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvnup/goals/InferenceStrategyTest.java
|
{
"start": 14815,
"end": 24528
}
|
class ____ {
@Test
@DisplayName("should remove parent groupId when child doesn't have explicit groupId")
void shouldRemoveParentGroupIdWhenChildDoesntHaveExplicitGroupId() throws Exception {
String parentPomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.1.0">
<modelVersion>4.1.0</modelVersion>
<groupId>com.example</groupId>
<artifactId>parent-project</artifactId>
<version>1.0.0</version>
</project>
""";
String childPomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.1.0">
<modelVersion>4.1.0</modelVersion>
<parent>
<groupId>com.example</groupId>
<artifactId>parent-project</artifactId>
<version>1.0.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>child-project</artifactId>
<!-- No explicit groupId - will inherit from parent -->
<!-- No explicit version - will inherit from parent -->
</project>
""";
Document parentDoc = Document.of(parentPomXml);
Document childDoc = Document.of(childPomXml);
Map<Path, Document> pomMap = new HashMap<>();
pomMap.put(Paths.get("project", "pom.xml"), parentDoc);
pomMap.put(Paths.get("project", "child", "pom.xml"), childDoc);
Editor editor = new Editor(childDoc);
Element childRoot = editor.root();
Element parentElement = DomUtils.findChildElement(childRoot, "parent");
// Verify parent elements exist before inference
assertNotNull(DomUtils.findChildElement(parentElement, "groupId"));
assertNotNull(DomUtils.findChildElement(parentElement, "artifactId"));
assertNotNull(DomUtils.findChildElement(parentElement, "version"));
// Apply inference
UpgradeContext context = createMockContext();
strategy.apply(context, pomMap);
// Verify parent groupId and version were removed (since child doesn't have explicit ones)
assertNull(parentElement.child("groupId").orElse(null));
assertNull(parentElement.child("version").orElse(null));
// artifactId should also be removed since parent POM is in pomMap
assertNull(parentElement.child("artifactId").orElse(null));
}
@Test
@DisplayName("should keep parent groupId when child has explicit groupId")
void shouldKeepParentGroupIdWhenChildHasExplicitGroupId() throws Exception {
String parentPomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.1.0">
<modelVersion>4.1.0</modelVersion>
<groupId>com.example</groupId>
<artifactId>parent-project</artifactId>
<version>1.0.0</version>
</project>
""";
String childPomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.1.0">
<modelVersion>4.1.0</modelVersion>
<parent>
<groupId>com.example</groupId>
<artifactId>parent-project</artifactId>
<version>1.0.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
<groupId>com.example.child</groupId>
<artifactId>child-project</artifactId>
<version>2.0.0</version>
</project>
""";
Document parentDoc = Document.of(parentPomXml);
Document childDoc = Document.of(childPomXml);
Map<Path, Document> pomMap = new HashMap<>();
pomMap.put(Paths.get("project", "pom.xml"), parentDoc);
pomMap.put(Paths.get("project", "child", "pom.xml"), childDoc);
Editor editor = new Editor(childDoc);
Element childRoot = editor.root();
Element parentElement = DomUtils.findChildElement(childRoot, "parent");
// Apply inference
UpgradeContext context = createMockContext();
strategy.apply(context, pomMap);
// Verify parent elements are kept (since child has explicit values)
assertNotNull(parentElement.child("groupId").orElse(null));
assertNotNull(parentElement.child("version").orElse(null));
// artifactId should still be removed since parent POM is in pomMap
assertNull(parentElement.child("artifactId").orElse(null));
}
@Test
@DisplayName("should not trim parent elements when parent is external")
void shouldNotTrimParentElementsWhenParentIsExternal() throws Exception {
String childPomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.1.0">
<modelVersion>4.1.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>3.0.0</version>
<relativePath/>
</parent>
<artifactId>my-spring-app</artifactId>
<!-- No explicit groupId or version - would inherit from parent -->
</project>
""";
Document childDoc = Document.of(childPomXml);
Map<Path, Document> pomMap = Map.of(Paths.get("project", "pom.xml"), childDoc);
Editor editor = new Editor(childDoc);
Element childRoot = editor.root();
Element parentElement = DomUtils.findChildElement(childRoot, "parent");
// Apply inference
UpgradeContext context = createMockContext();
strategy.apply(context, pomMap);
// Verify correct behavior for external parent:
// - groupId should NOT be removed (external parents need groupId to be located)
// - artifactId should NOT be removed (external parents need artifactId to be located)
// - version should NOT be removed (external parents need version to be located)
// This prevents the "parent.groupId is missing" error reported in issue #7934
assertNotNull(parentElement.child("groupId").orElse(null));
assertNotNull(parentElement.child("artifactId").orElse(null));
assertNotNull(parentElement.child("version").orElse(null));
}
@Test
@DisplayName("should trim parent elements when parent is in reactor")
void shouldTrimParentElementsWhenParentIsInReactor() throws Exception {
// Create parent POM
String parentPomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.1.0">
<modelVersion>4.1.0</modelVersion>
<groupId>com.example</groupId>
<artifactId>parent-project</artifactId>
<version>1.0.0</version>
<packaging>pom</packaging>
</project>
""";
// Create child POM that references the parent
String childPomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.1.0">
<modelVersion>4.1.0</modelVersion>
<parent>
<groupId>com.example</groupId>
<artifactId>parent-project</artifactId>
<version>1.0.0</version>
</parent>
<artifactId>child-project</artifactId>
<!-- No explicit groupId or version - would inherit from parent -->
</project>
""";
Document parentDoc = Document.of(parentPomXml);
Document childDoc = Document.of(childPomXml);
// Both POMs are in the reactor
Map<Path, Document> pomMap = Map.of(
Paths.get("pom.xml"), parentDoc,
Paths.get("child", "pom.xml"), childDoc);
Element childRoot = childDoc.root();
Element parentElement = childRoot.child("parent").orElse(null);
// Apply inference
UpgradeContext context = createMockContext();
strategy.apply(context, pomMap);
// Verify correct behavior for reactor parent:
// - groupId should be removed (child has no explicit groupId, parent is in reactor)
// - artifactId should be removed (can be inferred from relativePath)
// - version should be removed (child has no explicit version, parent is in reactor)
assertNull(parentElement.child("groupId").orElse(null));
assertNull(parentElement.child("artifactId").orElse(null));
assertNull(parentElement.child("version").orElse(null));
}
}
@Nested
@DisplayName("Maven 4.0.0 Limited Inference")
|
ParentInferenceTests
|
java
|
apache__camel
|
components/camel-wordpress/src/test/java/org/apache/camel/component/wordpress/api/model/PublishableStatusTest.java
|
{
"start": 1062,
"end": 1819
}
|
class ____ {
@Test
public void testFromString() {
final String input1 = "PRIVATE";
final String input2 = "private";
assertThat(PublishableStatus.fromString(input1), is(PublishableStatus.private_));
assertThat(PublishableStatus.fromString(input2), is(PublishableStatus.private_));
}
@Test
public void testFromStringEmpty() {
final String input3 = "";
assertThrows(IllegalArgumentException.class,
() -> PublishableStatus.fromString(input3));
}
@Test
public void testFromStringNull() {
final String input4 = null;
assertThrows(NullPointerException.class,
() -> PublishableStatus.fromString(input4));
}
}
|
PublishableStatusTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-micrometer-metrics/src/main/java/org/springframework/boot/micrometer/metrics/autoconfigure/NoOpMeterRegistryConfiguration.java
|
{
"start": 1413,
"end": 1567
}
|
class ____ {
@Bean
CompositeMeterRegistry noOpMeterRegistry(Clock clock) {
return new CompositeMeterRegistry(clock);
}
}
|
NoOpMeterRegistryConfiguration
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/aot/nativex/BasicJsonWriterTests.java
|
{
"start": 1102,
"end": 5386
}
|
class ____ {
private final StringWriter out = new StringWriter();
private final BasicJsonWriter json = new BasicJsonWriter(out, "\t");
@Test
void writeObject() {
Map<String, Object> attributes = orderedMap("test", "value");
attributes.put("another", true);
this.json.writeObject(attributes);
assertThat(out.toString()).isEqualToNormalizingNewlines("""
{
"test": "value",
"another": true
}
""");
}
@Test
void writeObjectWithNestedObject() {
Map<String, Object> attributes = orderedMap("test", "value");
attributes.put("nested", orderedMap("enabled", false));
this.json.writeObject(attributes);
assertThat(out.toString()).isEqualToNormalizingNewlines("""
{
"test": "value",
"nested": {
"enabled": false
}
}
""");
}
@Test
void writeObjectWithNestedArrayOfString() {
Map<String, Object> attributes = orderedMap("test", "value");
attributes.put("nested", List.of("test", "value", "another"));
this.json.writeObject(attributes);
assertThat(out.toString()).isEqualToNormalizingNewlines("""
{
"test": "value",
"nested": [
"test",
"value",
"another"
]
}
""");
}
@Test
void writeObjectWithNestedArrayOfObject() {
Map<String, Object> attributes = orderedMap("test", "value");
LinkedHashMap<String, Object> secondNested = orderedMap("name", "second");
secondNested.put("enabled", false);
attributes.put("nested", List.of(orderedMap("name", "first"), secondNested, orderedMap("name", "third")));
this.json.writeObject(attributes);
assertThat(out.toString()).isEqualToNormalizingNewlines("""
{
"test": "value",
"nested": [
{
"name": "first"
},
{
"name": "second",
"enabled": false
},
{
"name": "third"
}
]
}
""");
}
@Test
void writeObjectWithNestedEmptyArray() {
Map<String, Object> attributes = orderedMap("test", "value");
attributes.put("nested", Collections.emptyList());
this.json.writeObject(attributes);
assertThat(out.toString()).isEqualToNormalizingNewlines("""
{
"test": "value",
"nested": [ ]
}
""");
}
@Test
void writeObjectWithNestedEmptyObject() {
Map<String, Object> attributes = orderedMap("test", "value");
attributes.put("nested", Collections.emptyMap());
this.json.writeObject(attributes);
assertThat(out.toString()).isEqualToNormalizingNewlines("""
{
"test": "value",
"nested": { }
}
""");
}
@Test
void writeWithEscapeDoubleQuote() {
assertStringAttribute("foo\"bar", "foo\\\"bar");
}
@Test
void writeWithEscapeBackslash() {
assertStringAttribute("foo\"bar", "foo\\\"bar");
}
@Test
void writeWithEscapeBackspace() {
assertStringAttribute("foo\bbar", "foo\\bbar");
}
@Test
void writeWithEscapeFormFeed() {
assertStringAttribute("foo\fbar", "foo\\fbar");
}
@Test
void writeWithEscapeNewline() {
assertStringAttribute("foo\nbar", "foo\\nbar");
}
@Test
void writeWithEscapeCarriageReturn() {
assertStringAttribute("foo\rbar", "foo\\rbar");
}
@Test
void writeWithEscapeTab() {
assertStringAttribute("foo\tbar", "foo\\tbar");
}
@Test
void writeWithEscapeUnicode() {
assertStringAttribute("foo\u001Fbar", "foo\\u001fbar");
}
@Test
void writeWithTypeReferenceForSimpleClass() {
assertStringAttribute(TypeReference.of(String.class), "java.lang.String");
}
@Test
void writeWithTypeReferenceForInnerClass() {
assertStringAttribute(TypeReference.of(Nested.class),
"org.springframework.aot.nativex.BasicJsonWriterTests$Nested");
}
@Test
void writeWithTypeReferenceForDoubleInnerClass() {
assertStringAttribute(TypeReference.of(Inner.class),
"org.springframework.aot.nativex.BasicJsonWriterTests$Nested$Inner");
}
void assertStringAttribute(Object value, String expectedValue) {
Map<String, Object> attributes = new LinkedHashMap<>();
attributes.put("test", value);
this.json.writeObject(attributes);
assertThat(out.toString()).contains("\"test\": \"" + expectedValue + "\"");
}
private static LinkedHashMap<String, Object> orderedMap(String key, Object value) {
LinkedHashMap<String, Object> map = new LinkedHashMap<>();
map.put(key, value);
return map;
}
static
|
BasicJsonWriterTests
|
java
|
apache__camel
|
components/camel-netty/src/main/java/org/apache/camel/component/netty/codec/DatagramPacketByteArrayDecoder.java
|
{
"start": 1268,
"end": 2271
}
|
class ____ extends MessageToMessageDecoder<AddressedEnvelope<Object, InetSocketAddress>> {
private static final Logger LOG = LoggerFactory.getLogger(DatagramPacketByteArrayDecoder.class);
private DelegateByteArrayDecoder delegateDecoder = new DelegateByteArrayDecoder();
@Override
protected void decode(ChannelHandlerContext ctx, AddressedEnvelope<Object, InetSocketAddress> msg, List<Object> out)
throws Exception {
if (msg.content() instanceof ByteBuf) {
delegateDecoder.decode(ctx, (ByteBuf) msg.content(), out);
byte[] content = (byte[]) out.remove(out.size() - 1);
AddressedEnvelope<Object, InetSocketAddress> addressedEnvelop
= new DefaultAddressedEnvelope<>(content, msg.recipient(), msg.sender());
out.add(addressedEnvelop);
} else {
LOG.debug("Ignoring message content as it is not an io.netty.buffer.ByteBuf instance.");
}
}
}
|
DatagramPacketByteArrayDecoder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/idgen/biginteger/increment/Entity.java
|
{
"start": 245,
"end": 593
}
|
class ____ {
private BigInteger id;
private String name;
public Entity() {
}
public Entity(String name) {
this.name = name;
}
public BigInteger getId() {
return id;
}
public void setId(BigInteger id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
|
Entity
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/RowComparatorTest.java
|
{
"start": 1679,
"end": 5523
}
|
class ____ extends ComparatorTestBase<Row> {
private static final RowTypeInfo typeInfo =
new RowTypeInfo(
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.DOUBLE_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO,
new TupleTypeInfo<Tuple3<Integer, Boolean, Short>>(
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.BOOLEAN_TYPE_INFO,
BasicTypeInfo.SHORT_TYPE_INFO),
TypeExtractor.createTypeInfo(MyPojo.class));
private static final MyPojo testPojo1 = new MyPojo();
private static final MyPojo testPojo2 = new MyPojo();
private static final MyPojo testPojo3 = new MyPojo();
private static final Row[] data =
new Row[] {
createRow(RowKind.INSERT, null, null, null, null, null),
createRow(RowKind.INSERT, 0, null, null, null, null),
createRow(RowKind.INSERT, 0, 0.0, null, null, null),
createRow(RowKind.INSERT, 0, 0.0, "a", null, null),
createRow(RowKind.INSERT, 1, 0.0, "a", null, null),
createRow(RowKind.INSERT, 1, 1.0, "a", null, null),
createRow(RowKind.INSERT, 1, 1.0, "b", null, null),
createRow(
RowKind.UPDATE_AFTER, 1, 1.0, "b", new Tuple3<>(1, false, (short) 2), null),
createRow(
RowKind.UPDATE_AFTER, 1, 1.0, "b", new Tuple3<>(2, false, (short) 2), null),
createRow(
RowKind.UPDATE_AFTER, 1, 1.0, "b", new Tuple3<>(2, true, (short) 2), null),
createRow(
RowKind.UPDATE_AFTER, 1, 1.0, "b", new Tuple3<>(2, true, (short) 3), null),
createRow(RowKind.DELETE, 1, 1.0, "b", new Tuple3<>(2, true, (short) 3), testPojo1),
createRow(RowKind.DELETE, 1, 1.0, "b", new Tuple3<>(2, true, (short) 3), testPojo2),
createRow(RowKind.DELETE, 1, 1.0, "b", new Tuple3<>(2, true, (short) 3), testPojo3)
};
@BeforeAll
static void init() {
// TODO we cannot test null here as PojoComparator has no support for null keys
testPojo1.name = "";
testPojo2.name = "Test1";
testPojo3.name = "Test2";
}
@Override
protected void deepEquals(String message, Row should, Row is) {
int arity = should.getArity();
assertThat(is.getArity()).as(message).isEqualTo(arity);
for (int i = 0; i < arity; i++) {
Object copiedValue = should.getField(i);
Object element = is.getField(i);
assertThat(element).as(message).isEqualTo(copiedValue);
}
}
@Override
protected TypeComparator<Row> createComparator(boolean ascending) {
return typeInfo.createComparator(
new int[] {0, 1, 2, 3, 4, 5, 6},
new boolean[] {
ascending, ascending, ascending, ascending, ascending, ascending, ascending
},
0,
new ExecutionConfig());
}
@Override
protected TypeSerializer<Row> createSerializer() {
return typeInfo.createSerializer(new SerializerConfigImpl());
}
@Override
protected Row[] getSortedTestData() {
return data;
}
@Override
protected boolean supportsNullKeys() {
return true;
}
private static Row createRow(
RowKind kind, Object f0, Object f1, Object f2, Object f3, Object f4) {
Row row = new Row(kind, 5);
row.setField(0, f0);
row.setField(1, f1);
row.setField(2, f2);
row.setField(3, f3);
row.setField(4, f4);
return row;
}
public static
|
RowComparatorTest
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/aot/hint/annotation/ReflectiveRuntimeHintsRegistrarTests.java
|
{
"start": 7521,
"end": 7667
}
|
class ____ {
@Reflective
private static final String MESSAGE = "Hello";
}
@SuppressWarnings("unused")
static
|
SampleTypeAndFieldAnnotatedBean
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/tck/BaseTck.java
|
{
"start": 3431,
"end": 3620
}
|
class ____ implements Iterable<Long> {
@Override
public Iterator<Long> iterator() {
return new InfiniteRangeIterator();
}
static final
|
InfiniteRange
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/io/AvailabilityProvider.java
|
{
"start": 3779,
"end": 6098
}
|
class ____ implements AvailabilityProvider {
private CompletableFuture<?> availableFuture = new CompletableFuture<>();
public CompletableFuture<?> and(CompletableFuture<?> other) {
return AvailabilityProvider.and(availableFuture, other);
}
public CompletableFuture<?> and(AvailabilityProvider other) {
return and(other.getAvailableFuture());
}
public CompletableFuture<?> or(CompletableFuture<?> other) {
return AvailabilityProvider.or(availableFuture, other);
}
public CompletableFuture<?> or(AvailabilityProvider other) {
return or(other.getAvailableFuture());
}
/** Judges to reset the current available state as unavailable. */
public void resetUnavailable() {
if (isAvailable()) {
availableFuture = new CompletableFuture<>();
}
}
/** Resets the constant completed {@link #AVAILABLE} as the current state. */
public void resetAvailable() {
availableFuture = AVAILABLE;
}
/**
* Returns the previously not completed future and resets the constant completed {@link
* #AVAILABLE} as the current state.
*/
public CompletableFuture<?> getUnavailableToResetAvailable() {
CompletableFuture<?> toNotify = availableFuture;
availableFuture = AVAILABLE;
return toNotify;
}
/**
* Creates a new uncompleted future as the current state and returns the previous
* uncompleted one.
*/
public CompletableFuture<?> getUnavailableToResetUnavailable() {
CompletableFuture<?> toNotify = availableFuture;
availableFuture = new CompletableFuture<>();
return toNotify;
}
/**
* @return a future that is completed if the respective provider is available.
*/
@Override
public CompletableFuture<?> getAvailableFuture() {
return availableFuture;
}
@Override
public String toString() {
if (availableFuture == AVAILABLE) {
return "AVAILABLE";
}
return availableFuture.toString();
}
}
}
|
AvailabilityHelper
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
|
{
"start": 40036,
"end": 40765
}
|
class ____<R> extends BaseTypeBuilder<UnionAccumulator<R>> {
private UnionBuilder(Completion<R> context, NameContext names) {
this(context, names, Collections.emptyList());
}
private static <R> UnionBuilder<R> create(Completion<R> context, NameContext names) {
return new UnionBuilder<>(context, names);
}
private UnionBuilder(Completion<R> context, NameContext names, List<Schema> schemas) {
super(new UnionCompletion<>(context, names, schemas), names);
}
}
/**
* A special Builder for Record fields. The API is very similar to
* {@link BaseTypeBuilder}. However, fields have their own names, properties,
* and default values.
* <p/>
* The methods on this
|
UnionBuilder
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/index/WaitUntilRefreshIT.java
|
{
"start": 8095,
"end": 8550
}
|
class ____ extends MockScriptPlugin {
@Override
public Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
return Collections.singletonMap("delete_plz", params -> {
@SuppressWarnings("unchecked")
Map<String, Object> ctx = (Map<String, Object>) params.get("ctx");
ctx.put("op", "delete");
return null;
});
}
}
}
|
DeletePlzPlugin
|
java
|
junit-team__junit5
|
junit-jupiter-params/src/main/java/org/junit/jupiter/params/ParameterizedInvocationParameterResolver.java
|
{
"start": 704,
"end": 2218
}
|
class ____ implements ParameterResolver {
private final ResolverFacade resolverFacade;
private final EvaluatedArgumentSet arguments;
private final int invocationIndex;
private final ResolutionCache resolutionCache;
ParameterizedInvocationParameterResolver(ResolverFacade resolverFacade, EvaluatedArgumentSet arguments,
int invocationIndex, ResolutionCache resolutionCache) {
this.resolverFacade = resolverFacade;
this.arguments = arguments;
this.invocationIndex = invocationIndex;
this.resolutionCache = resolutionCache;
}
@Override
public final ExtensionContextScope getTestInstantiationExtensionContextScope(ExtensionContext rootContext) {
return ExtensionContextScope.TEST_METHOD;
}
@Override
public final boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) {
return isSupportedOnConstructorOrMethod(parameterContext.getDeclaringExecutable(), extensionContext) //
&& this.resolverFacade.isSupportedParameter(parameterContext, this.arguments);
}
@Override
public final @Nullable Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException {
return this.resolverFacade.resolve(parameterContext, extensionContext, this.arguments, this.invocationIndex,
this.resolutionCache);
}
protected abstract boolean isSupportedOnConstructorOrMethod(Executable declaringExecutable,
ExtensionContext extensionContext);
}
|
ParameterizedInvocationParameterResolver
|
java
|
google__gson
|
gson/src/test/java/com/google/gson/functional/JsonAdapterAnnotationOnFieldsTest.java
|
{
"start": 24100,
"end": 24956
}
|
class ____ implements JsonSerializer<List<Integer>> {
@Override
public JsonElement serialize(
List<Integer> src, Type typeOfSrc, JsonSerializationContext context) {
return new JsonPrimitive(true);
}
}
}
/** Tests usage of {@link JsonDeserializer} as {@link JsonAdapter} value on a field */
@Test
public void testJsonDeserializer() {
Gson gson = new Gson();
WithJsonDeserializer deserialized = gson.fromJson("{\"f\":[5]}", WithJsonDeserializer.class);
// Uses custom deserializer which always returns `[3, 2, 1]`
assertThat(deserialized.f).isEqualTo(Arrays.asList(3, 2, 1));
// Verify that delegate serializer for List is used
String json = gson.toJson(new WithJsonDeserializer(Arrays.asList(4, 5, 6)));
assertThat(json).isEqualTo("{\"f\":[4,5,6]}");
}
private static
|
Serializer
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java
|
{
"start": 100543,
"end": 100644
}
|
interface ____ {
String apply(String b);
}
|
ImmutableFunction
|
java
|
elastic__elasticsearch
|
libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/AccessibleJdkMethods.java
|
{
"start": 2487,
"end": 3149
}
|
class ____ to module names (including later excluded modules) for lookup in 2nd step
final Map<String, String> moduleNameByClass = Utils.loadClassToModuleMapping();
final Map<String, Set<String>> exportsByModule = Utils.loadExportsByModule();
final AccessibleMethodsVisitor visitor = new AccessibleMethodsVisitor(modulePredicate, moduleNameByClass, exportsByModule);
// 2nd: calculate accessible implementations of classes in included modules
Utils.walkJdkModules(modulePredicate, exportsByModule, (moduleName, moduleClasses, moduleExports) -> {
for (var classFile : moduleClasses) {
// visit
|
names
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/configuration/BeanMethodQualificationTests.java
|
{
"start": 2269,
"end": 8861
}
|
class ____ {
@Test
void standard() {
AnnotationConfigApplicationContext ctx = context(StandardConfig.class, StandardPojo.class);
assertThat(ctx.getBeanFactory().containsSingleton("testBean1")).isFalse();
StandardPojo pojo = ctx.getBean(StandardPojo.class);
assertThat(pojo.testBean.getName()).isEqualTo("interesting");
assertThat(pojo.testBean2.getName()).isEqualTo("boring");
ctx.close();
}
@Test
void scoped() {
AnnotationConfigApplicationContext ctx = context(ScopedConfig.class, StandardPojo.class);
assertThat(ctx.getBeanFactory().containsSingleton("testBean1")).isFalse();
StandardPojo pojo = ctx.getBean(StandardPojo.class);
assertThat(pojo.testBean.getName()).isEqualTo("interesting");
assertThat(pojo.testBean2.getName()).isEqualTo("boring");
ctx.close();
}
@Test
void scopedProxy() {
AnnotationConfigApplicationContext ctx = context(ScopedProxyConfig.class, StandardPojo.class);
assertThat(ctx.getBeanFactory().containsSingleton("testBean1")).isTrue(); // a shared scoped proxy
StandardPojo pojo = ctx.getBean(StandardPojo.class);
assertThat(pojo.testBean.getName()).isEqualTo("interesting");
assertThat(pojo.testBean2.getName()).isEqualTo("boring");
ctx.close();
}
@SuppressWarnings("unchecked")
@ParameterizedTest
@ValueSource(classes = {PrimaryConfig.class, FallbackConfig.class})
void primaryVersusFallback(Class<?> configClass) {
AnnotationConfigApplicationContext ctx = context(configClass, StandardPojo.class, ConstructorPojo.class);
StandardPojo pojo = ctx.getBean(StandardPojo.class);
assertThat(pojo.testBean.getName()).isEqualTo("interesting");
assertThat(pojo.testBean2.getName()).isEqualTo("boring");
assertThat(pojo.testBean2.getSpouse().getName()).isEqualTo("interesting");
assertThat((List<Object>) pojo.testBean2.getPets()).contains(
ctx.getBean("testBean1x"), ctx.getBean("testBean2x")); // array injection
assertThat((List<Object>) pojo.testBean2.getSomeList()).contains(
ctx.getBean("testBean1x"), ctx.getBean("testBean2x")); // list injection
assertThat((Map<String, TestBean>) pojo.testBean2.getSomeMap()).containsKeys(
"testBean1x", "testBean2x"); // map injection
ConstructorPojo pojo2 = ctx.getBean(ConstructorPojo.class);
assertThat(pojo2.testBean).isSameAs(pojo.testBean);
assertThat(pojo2.testBean2).isSameAs(pojo.testBean2);
ctx.close();
}
/**
* One regular bean along with fallback beans is considered effective primary
*/
@Test
void effectivePrimary() {
AnnotationConfigApplicationContext ctx = context(EffectivePrimaryConfig.class);
TestBean testBean = ctx.getBean(TestBean.class);
assertThat(testBean.getName()).isEqualTo("effective-primary");
ctx.close();
}
@Test
void customWithLazyResolution() {
AnnotationConfigApplicationContext ctx = context(CustomConfig.class, CustomPojo.class);
assertThat(ctx.getBeanFactory().containsSingleton("testBean1")).isFalse();
assertThat(ctx.getBeanFactory().containsSingleton("testBean2")).isFalse();
assertThat(BeanFactoryAnnotationUtils.isQualifierMatch(value -> value.equals("boring"),
"testBean2", ctx.getDefaultListableBeanFactory())).isTrue();
CustomPojo pojo = ctx.getBean(CustomPojo.class);
assertThat(pojo.plainBean).isNull();
assertThat(pojo.testBean.getName()).isEqualTo("interesting");
TestBean testBean2 = BeanFactoryAnnotationUtils.qualifiedBeanOfType(
ctx.getDefaultListableBeanFactory(), TestBean.class, "boring");
assertThat(testBean2.getName()).isEqualTo("boring");
ctx.close();
}
@Test
void customWithEarlyResolution() {
AnnotationConfigApplicationContext ctx = context(CustomConfig.class, CustomPojo.class);
assertThat(ctx.getBeanFactory().containsSingleton("testBean1")).isFalse();
assertThat(ctx.getBeanFactory().containsSingleton("testBean2")).isFalse();
ctx.getBean("testBean2");
assertThat(BeanFactoryAnnotationUtils.isQualifierMatch(value -> value.equals("boring"),
"testBean2", ctx.getDefaultListableBeanFactory())).isTrue();
CustomPojo pojo = ctx.getBean(CustomPojo.class);
assertThat(pojo.testBean.getName()).isEqualTo("interesting");
ctx.close();
}
@Test
void customWithAsm() {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
ctx.registerBeanDefinition("customConfig", new RootBeanDefinition(CustomConfig.class.getName()));
RootBeanDefinition customPojo = new RootBeanDefinition(CustomPojo.class.getName());
customPojo.setLazyInit(true);
ctx.registerBeanDefinition("customPojo", customPojo);
ctx.refresh();
assertThat(ctx.getBeanFactory().containsSingleton("testBean1")).isFalse();
assertThat(ctx.getBeanFactory().containsSingleton("testBean2")).isFalse();
CustomPojo pojo = ctx.getBean(CustomPojo.class);
assertThat(pojo.testBean.getName()).isEqualTo("interesting");
ctx.close();
}
@Test
void customWithAttributeOverride() {
AnnotationConfigApplicationContext ctx = context(CustomConfigWithAttributeOverride.class, CustomPojo.class);
assertThat(ctx.getBeanFactory().containsSingleton("testBeanX")).isFalse();
CustomPojo pojo = ctx.getBean(CustomPojo.class);
assertThat(pojo.plainBean).isNull();
assertThat(pojo.testBean.getName()).isEqualTo("interesting");
assertThat(pojo.nestedTestBean).isNull();
ctx.close();
}
@Test
void customWithConstructor() {
AnnotationConfigApplicationContext ctx = context(CustomConfig.class, CustomPojoWithConstructor.class);
CustomPojoWithConstructor pojo = ctx.getBean(CustomPojoWithConstructor.class);
assertThat(pojo.plainBean).isNull();
assertThat(pojo.testBean.getName()).isEqualTo("interesting");
ctx.close();
}
@Test
void customWithMethod() {
AnnotationConfigApplicationContext ctx = context(CustomConfig.class, CustomPojoWithMethod.class);
CustomPojoWithMethod pojo = ctx.getBean(CustomPojoWithMethod.class);
assertThat(pojo.plainBean).isNull();
assertThat(pojo.testBean.getName()).isEqualTo("interesting");
ctx.close();
}
@Test
void beanNamesForAnnotation() {
AnnotationConfigApplicationContext ctx = context(StandardConfig.class);
assertThat(ctx.getBeanNamesForAnnotation(Configuration.class))
.containsExactly("beanMethodQualificationTests.StandardConfig");
assertThat(ctx.getBeanNamesForAnnotation(Scope.class)).isEmpty();
assertThat(ctx.getBeanNamesForAnnotation(Lazy.class)).containsExactly("testBean1");
assertThat(ctx.getBeanNamesForAnnotation(Boring.class))
.containsExactly("beanMethodQualificationTests.StandardConfig", "testBean2");
ctx.close();
}
@Configuration
@Boring
static
|
BeanMethodQualificationTests
|
java
|
grpc__grpc-java
|
binder/src/main/java/io/grpc/binder/internal/MultiMessageClientStream.java
|
{
"start": 1225,
"end": 1656
}
|
interface ____ implement), and sending them out
* on the transport, as well as receiving messages from the transport, and passing the resultant
* data back to the gRPC ClientCall instance (via calls on the ClientStreamListener instance we're
* given).
*
* <p>These two communication directions are largely independent of each other, with the {@link
* Outbound} handling the gRPC to transport direction, and the {@link Inbound}
|
we
|
java
|
grpc__grpc-java
|
alts/src/test/java/io/grpc/alts/internal/FakeChannelCrypter.java
|
{
"start": 884,
"end": 2275
}
|
class ____ implements ChannelCrypterNetty {
private static final int TAG_BYTES = 16;
private static final byte TAG_BYTE = (byte) 0xa1;
private boolean destroyCalled = false;
public static int getTagBytes() {
return TAG_BYTES;
}
@Override
public void encrypt(ByteBuf out, List<ByteBuf> plain) throws GeneralSecurityException {
checkState(!destroyCalled);
for (ByteBuf buf : plain) {
out.writeBytes(buf);
for (int i = 0; i < TAG_BYTES; ++i) {
out.writeByte(TAG_BYTE);
}
}
}
@Override
public void decrypt(ByteBuf out, ByteBuf tag, List<ByteBuf> ciphertext)
throws GeneralSecurityException {
checkState(!destroyCalled);
for (ByteBuf buf : ciphertext) {
out.writeBytes(buf);
}
while (tag.isReadable()) {
if (tag.readByte() != TAG_BYTE) {
throw new AEADBadTagException("Tag mismatch!");
}
}
}
@Override
public void decrypt(ByteBuf out, ByteBuf ciphertextAndTag) throws GeneralSecurityException {
checkState(!destroyCalled);
ByteBuf ciphertext = ciphertextAndTag.readSlice(ciphertextAndTag.readableBytes() - TAG_BYTES);
decrypt(out, /*tag=*/ ciphertextAndTag, Collections.singletonList(ciphertext));
}
@Override
public int getSuffixLength() {
return TAG_BYTES;
}
@Override
public void destroy() {
destroyCalled = true;
}
}
|
FakeChannelCrypter
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/HandlerMapping.java
|
{
"start": 2155,
"end": 2667
}
|
interface ____ be able to specify a sorting order and thus a priority for getting
* applied by DispatcherServlet. Non-Ordered instances get treated as the lowest priority.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @see org.springframework.core.Ordered
* @see org.springframework.web.servlet.handler.AbstractHandlerMapping
* @see org.springframework.web.servlet.handler.BeanNameUrlHandlerMapping
* @see org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerMapping
*/
public
|
to
|
java
|
spring-projects__spring-boot
|
module/spring-boot-web-server/src/test/java/org/springframework/boot/web/server/servlet/context/ServletComponentScanRegistrarTests.java
|
{
"start": 8927,
"end": 9095
}
|
class ____ {
}
@Configuration(proxyBeanMethods = false)
@ServletComponentScan(basePackageClasses = ServletComponentScanRegistrarTests.class)
static
|
AdditionalPackages
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
{
"start": 12386,
"end": 12701
}
|
class ____ not be instantiated. The exception
* contains a detailed message about the reason why the instantiation failed.
*/
public static <T> T instantiate(Class<T> clazz) {
if (clazz == null) {
throw new NullPointerException();
}
// try to instantiate the
|
could
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestUpdateDeleteTableFactory.java
|
{
"start": 35273,
"end": 36836
}
|
class ____ extends SupportsRowLevelModificationSink
implements SupportsDeletePushDown {
public SupportsDeleteSink(
ObjectIdentifier tableIdentifier,
ResolvedCatalogTable resolvedCatalogTable,
SupportsRowLevelDelete.RowLevelDeleteMode deleteMode,
SupportsRowLevelUpdate.RowLevelUpdateMode updateMode,
String dataId,
List<String> requireColumnsForDelete,
List<String> requireColumnsForUpdate,
boolean onlyRequireUpdatedColumns) {
super(
tableIdentifier,
resolvedCatalogTable,
deleteMode,
updateMode,
dataId,
requireColumnsForDelete,
requireColumnsForUpdate,
onlyRequireUpdatedColumns);
}
@Override
public boolean applyDeleteFilters(List<ResolvedExpression> filters) {
// only accept when the filters are empty
return filters.isEmpty();
}
@Override
public Optional<Long> executeDeletion() {
Collection<RowData> oldRows = registeredRowData.get(dataId);
if (oldRows != null) {
registeredRowData.put(dataId, new ArrayList<>());
return Optional.of((long) oldRows.size());
}
return Optional.empty();
}
}
/** The sink for update existing data. */
private static
|
SupportsDeleteSink
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.