language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/common/beans/support/InstantiationStrategy.java
|
{
"start": 1444,
"end": 5112
}
|
class ____ {
private final ScopeModelAccessor scopeModelAccessor;
public InstantiationStrategy() {
this(null);
}
public InstantiationStrategy(ScopeModelAccessor scopeModelAccessor) {
this.scopeModelAccessor = scopeModelAccessor;
}
@SuppressWarnings("unchecked")
public <T> T instantiate(Class<T> type) throws ReflectiveOperationException {
// should not use default constructor directly, maybe also has another constructor matched scope model arguments
// 1. try to get default constructor
Constructor<T> defaultConstructor = null;
try {
defaultConstructor = type.getConstructor();
} catch (NoSuchMethodException e) {
// ignore no default constructor
}
// 2. use matched constructor if found
List<Constructor<?>> matchedConstructors = new ArrayList<>();
Constructor<?>[] declaredConstructors = type.getConstructors();
for (Constructor<?> constructor : declaredConstructors) {
if (isMatched(constructor)) {
matchedConstructors.add(constructor);
}
}
// remove default constructor from matchedConstructors
if (defaultConstructor != null) {
matchedConstructors.remove(defaultConstructor);
}
// match order:
// 1. the only matched constructor with parameters
// 2. default constructor if absent
Constructor<?> targetConstructor;
if (matchedConstructors.size() > 1) {
throw new IllegalArgumentException("Expect only one but found " + matchedConstructors.size()
+ " matched constructors for type: " + type.getName() + ", matched constructors: "
+ matchedConstructors);
} else if (matchedConstructors.size() == 1) {
targetConstructor = matchedConstructors.get(0);
} else if (defaultConstructor != null) {
targetConstructor = defaultConstructor;
} else {
throw new IllegalArgumentException("None matched constructor was found for type: " + type.getName());
}
// create instance with arguments
Class<?>[] parameterTypes = targetConstructor.getParameterTypes();
Object[] args = new Object[parameterTypes.length];
for (int i = 0; i < parameterTypes.length; i++) {
args[i] = getArgumentValueForType(parameterTypes[i]);
}
return (T) targetConstructor.newInstance(args);
}
private boolean isMatched(Constructor<?> constructor) {
for (Class<?> parameterType : constructor.getParameterTypes()) {
if (!isSupportedConstructorParameterType(parameterType)) {
return false;
}
}
return true;
}
private boolean isSupportedConstructorParameterType(Class<?> parameterType) {
return ScopeModel.class.isAssignableFrom(parameterType);
}
private Object getArgumentValueForType(Class<?> parameterType) {
// get scope mode value
if (scopeModelAccessor != null) {
if (parameterType == ScopeModel.class) {
return scopeModelAccessor.getScopeModel();
} else if (parameterType == FrameworkModel.class) {
return scopeModelAccessor.getFrameworkModel();
} else if (parameterType == ApplicationModel.class) {
return scopeModelAccessor.getApplicationModel();
} else if (parameterType == ModuleModel.class) {
return scopeModelAccessor.getModuleModel();
}
}
return null;
}
}
|
InstantiationStrategy
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/http/converter/json/JsonbHttpMessageConverterTests.java
|
{
"start": 1661,
"end": 11969
}
|
class ____ {
private final JsonbHttpMessageConverter converter = new JsonbHttpMessageConverter();
@Test
void canRead() {
assertThat(this.converter.canRead(MyBean.class, new MediaType("application", "json"))).isTrue();
assertThat(this.converter.canRead(Map.class, new MediaType("application", "json"))).isTrue();
}
@Test
void canWrite() {
assertThat(this.converter.canWrite(MyBean.class, new MediaType("application", "json"))).isTrue();
assertThat(this.converter.canWrite(Map.class, new MediaType("application", "json"))).isTrue();
}
@Test
void canReadAndWriteMicroformats() {
assertThat(this.converter.canRead(MyBean.class, new MediaType("application", "vnd.test-micro-type+json"))).isTrue();
assertThat(this.converter.canWrite(MyBean.class, new MediaType("application", "vnd.test-micro-type+json"))).isTrue();
}
@Test
void readTyped() throws IOException {
String body = "{\"bytes\":[1,2],\"array\":[\"Foo\",\"Bar\"]," +
"\"number\":42,\"string\":\"Foo\",\"bool\":true,\"fraction\":42.0}";
MockHttpInputMessage inputMessage = new MockHttpInputMessage(body.getBytes(StandardCharsets.UTF_8));
inputMessage.getHeaders().setContentType(new MediaType("application", "json"));
MyBean result = (MyBean) this.converter.read(MyBean.class, inputMessage);
assertThat(result.getString()).isEqualTo("Foo");
assertThat(result.getNumber()).isEqualTo(42);
assertThat(result.getFraction()).isCloseTo(42F, within(0F));
assertThat(result.getArray()).isEqualTo(new String[] {"Foo", "Bar"});
assertThat(result.isBool()).isTrue();
assertThat(result.getBytes()).isEqualTo(new byte[] {0x1, 0x2});
}
@Test
@SuppressWarnings("unchecked")
public void readUntyped() throws IOException {
String body = "{\"bytes\":[1,2],\"array\":[\"Foo\",\"Bar\"]," +
"\"number\":42,\"string\":\"Foo\",\"bool\":true,\"fraction\":42.0}";
MockHttpInputMessage inputMessage = new MockHttpInputMessage(body.getBytes(StandardCharsets.UTF_8));
inputMessage.getHeaders().setContentType(new MediaType("application", "json"));
HashMap<String, Object> result = (HashMap<String, Object>) this.converter.read(HashMap.class, inputMessage);
assertThat(result.get("string")).isEqualTo("Foo");
Number n = (Number) result.get("number");
assertThat(n.longValue()).isEqualTo(42);
n = (Number) result.get("fraction");
assertThat(n.doubleValue()).isCloseTo(42D, within(0D));
List<String> array = new ArrayList<>();
array.add("Foo");
array.add("Bar");
assertThat(result.get("array")).isEqualTo(array);
assertThat(result.get("bool")).isEqualTo(Boolean.TRUE);
byte[] bytes = new byte[2];
List<Number> resultBytes = (ArrayList<Number>)result.get("bytes");
for (int i = 0; i < 2; i++) {
bytes[i] = resultBytes.get(i).byteValue();
}
assertThat(bytes).isEqualTo(new byte[] {0x1, 0x2});
}
@Test
void write() throws IOException {
MockHttpOutputMessage outputMessage = new MockHttpOutputMessage();
MyBean body = new MyBean();
body.setString("Foo");
body.setNumber(42);
body.setFraction(42F);
body.setArray(new String[] {"Foo", "Bar"});
body.setBool(true);
body.setBytes(new byte[] {0x1, 0x2});
this.converter.write(body, null, outputMessage);
Charset utf8 = StandardCharsets.UTF_8;
String result = outputMessage.getBodyAsString(utf8);
assertThat(result).contains("\"string\":\"Foo\"");
assertThat(result).contains("\"number\":42");
assertThat(result).contains("fraction\":42.0");
assertThat(result).contains("\"array\":[\"Foo\",\"Bar\"]");
assertThat(result).contains("\"bool\":true");
assertThat(result).contains("\"bytes\":[1,2]");
assertThat(outputMessage.getHeaders().getContentType())
.as("Invalid content-type").isEqualTo(new MediaType("application", "json", utf8));
}
@Test
void writeWithBaseType() throws IOException {
MockHttpOutputMessage outputMessage = new MockHttpOutputMessage();
MyBean body = new MyBean();
body.setString("Foo");
body.setNumber(42);
body.setFraction(42F);
body.setArray(new String[] {"Foo", "Bar"});
body.setBool(true);
body.setBytes(new byte[] {0x1, 0x2});
this.converter.write(body, MyBase.class, null, outputMessage);
Charset utf8 = StandardCharsets.UTF_8;
String result = outputMessage.getBodyAsString(utf8);
assertThat(result).contains("\"string\":\"Foo\"");
assertThat(result).contains("\"number\":42");
assertThat(result).contains("fraction\":42.0");
assertThat(result).contains("\"array\":[\"Foo\",\"Bar\"]");
assertThat(result).contains("\"bool\":true");
assertThat(result).contains("\"bytes\":[1,2]");
assertThat(outputMessage.getHeaders().getContentType())
.as("Invalid content-type").isEqualTo(new MediaType("application", "json", utf8));
}
@Test
void writeUTF16() throws IOException {
MediaType contentType = new MediaType("application", "json", StandardCharsets.UTF_16BE);
MockHttpOutputMessage outputMessage = new MockHttpOutputMessage();
String body = "H\u00e9llo W\u00f6rld";
this.converter.write(body, contentType, outputMessage);
assertThat(outputMessage.getBodyAsString(StandardCharsets.UTF_16BE)).as("Invalid result").isEqualTo("\"" + body + "\"");
assertThat(outputMessage.getHeaders().getContentType()).as("Invalid content-type").isEqualTo(contentType);
}
@Test
void readInvalidJson() {
String body = "FooBar";
MockHttpInputMessage inputMessage = new MockHttpInputMessage(body.getBytes(StandardCharsets.UTF_8));
inputMessage.getHeaders().setContentType(new MediaType("application", "json"));
assertThatExceptionOfType(HttpMessageNotReadableException.class).isThrownBy(() ->
this.converter.read(MyBean.class, inputMessage));
}
@Test
@SuppressWarnings("unchecked")
public void readAndWriteGenerics() throws Exception {
Field beansList = ListHolder.class.getField("listField");
String body = "[{\"bytes\":[1,2],\"array\":[\"Foo\",\"Bar\"]," +
"\"number\":42,\"string\":\"Foo\",\"bool\":true,\"fraction\":42.0}]";
MockHttpInputMessage inputMessage = new MockHttpInputMessage(body.getBytes(StandardCharsets.UTF_8));
inputMessage.getHeaders().setContentType(new MediaType("application", "json"));
Type genericType = beansList.getGenericType();
List<MyBean> results = (List<MyBean>) converter.read(genericType, MyBeanListHolder.class, inputMessage);
assertThat(results).hasSize(1);
MyBean result = results.get(0);
assertThat(result.getString()).isEqualTo("Foo");
assertThat(result.getNumber()).isEqualTo(42);
assertThat(result.getFraction()).isCloseTo(42F, within(0F));
assertThat(result.getArray()).isEqualTo(new String[] {"Foo", "Bar"});
assertThat(result.isBool()).isTrue();
assertThat(result.getBytes()).isEqualTo(new byte[] {0x1, 0x2});
MockHttpOutputMessage outputMessage = new MockHttpOutputMessage();
converter.write(results, genericType, new MediaType("application", "json"), outputMessage);
JSONAssert.assertEquals(body, outputMessage.getBodyAsString(StandardCharsets.UTF_8), true);
}
@Test
@SuppressWarnings("unchecked")
public void readAndWriteParameterizedType() throws Exception {
ParameterizedTypeReference<List<MyBean>> beansList = new ParameterizedTypeReference<>() {};
String body = "[{\"bytes\":[1,2],\"array\":[\"Foo\",\"Bar\"]," +
"\"number\":42,\"string\":\"Foo\",\"bool\":true,\"fraction\":42.0}]";
MockHttpInputMessage inputMessage = new MockHttpInputMessage(body.getBytes(StandardCharsets.UTF_8));
inputMessage.getHeaders().setContentType(new MediaType("application", "json"));
List<MyBean> results = (List<MyBean>) converter.read(beansList.getType(), null, inputMessage);
assertThat(results).hasSize(1);
MyBean result = results.get(0);
assertThat(result.getString()).isEqualTo("Foo");
assertThat(result.getNumber()).isEqualTo(42);
assertThat(result.getFraction()).isCloseTo(42F, within(0F));
assertThat(result.getArray()).isEqualTo(new String[] {"Foo", "Bar"});
assertThat(result.isBool()).isTrue();
assertThat(result.getBytes()).isEqualTo(new byte[] {0x1, 0x2});
MockHttpOutputMessage outputMessage = new MockHttpOutputMessage();
converter.write(results, beansList.getType(), new MediaType("application", "json"), outputMessage);
JSONAssert.assertEquals(body, outputMessage.getBodyAsString(StandardCharsets.UTF_8), true);
}
@Test
@SuppressWarnings("unchecked")
public void writeParameterizedBaseType() throws Exception {
ParameterizedTypeReference<List<MyBean>> beansList = new ParameterizedTypeReference<>() {};
ParameterizedTypeReference<List<MyBase>> baseList = new ParameterizedTypeReference<>() {};
String body = "[{\"bytes\":[1,2],\"array\":[\"Foo\",\"Bar\"]," +
"\"number\":42,\"string\":\"Foo\",\"bool\":true,\"fraction\":42.0}]";
MockHttpInputMessage inputMessage = new MockHttpInputMessage(body.getBytes(StandardCharsets.UTF_8));
inputMessage.getHeaders().setContentType(new MediaType("application", "json"));
List<MyBean> results = (List<MyBean>) converter.read(beansList.getType(), null, inputMessage);
assertThat(results).hasSize(1);
MyBean result = results.get(0);
assertThat(result.getString()).isEqualTo("Foo");
assertThat(result.getNumber()).isEqualTo(42);
assertThat(result.getFraction()).isCloseTo(42F, within(0F));
assertThat(result.getArray()).isEqualTo(new String[] {"Foo", "Bar"});
assertThat(result.isBool()).isTrue();
assertThat(result.getBytes()).isEqualTo(new byte[] {0x1, 0x2});
MockHttpOutputMessage outputMessage = new MockHttpOutputMessage();
converter.write(results, baseList.getType(), new MediaType("application", "json"), outputMessage);
JSONAssert.assertEquals(body, outputMessage.getBodyAsString(StandardCharsets.UTF_8), true);
}
@Test
void prefixJson() throws IOException {
MockHttpOutputMessage outputMessage = new MockHttpOutputMessage();
this.converter.setPrefixJson(true);
this.converter.writeInternal("foo", null, outputMessage);
assertThat(outputMessage.getBodyAsString(StandardCharsets.UTF_8)).isEqualTo(")]}', \"foo\"");
}
@Test
void prefixJsonCustom() throws IOException {
MockHttpOutputMessage outputMessage = new MockHttpOutputMessage();
this.converter.setJsonPrefix(")))");
this.converter.writeInternal("foo", null, outputMessage);
assertThat(outputMessage.getBodyAsString(StandardCharsets.UTF_8)).isEqualTo(")))\"foo\"");
}
public static
|
JsonbHttpMessageConverterTests
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/InjectOnBugCheckers.java
|
{
"start": 1846,
"end": 3016
}
|
class ____ extends BugChecker implements MethodTreeMatcher {
@Override
public Description matchMethod(MethodTree tree, VisitorState state) {
var symbol = getSymbol(tree);
if (!symbol.isConstructor()) {
return NO_MATCH;
}
if (isGeneratedConstructor(tree)) {
return NO_MATCH;
}
if (hasDirectAnnotationWithSimpleName(tree, "Inject")) {
return NO_MATCH;
}
if (!isSubtype(
symbol.owner.type, state.getTypeFromString(BugChecker.class.getCanonicalName()), state)
|| !hasAnnotation(symbol.owner, BUG_PATTERN_ANNOTATION, state)) {
return NO_MATCH;
}
if (tree.getParameters().isEmpty()
|| !tree.getParameters().stream()
.allMatch(
p ->
isSubtype(
getType(p),
state.getTypeFromString(ErrorProneFlags.class.getCanonicalName()),
state))) {
return NO_MATCH;
}
var fix = SuggestedFix.builder();
return describeMatch(
tree,
fix.prefixWith(tree, "@" + qualifyType(state, fix, "javax.inject.Inject") + " ").build());
}
}
|
InjectOnBugCheckers
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/stats/common/FoldValuesTests.java
|
{
"start": 590,
"end": 1908
}
|
class ____ extends AbstractBWCSerializationTestCase<FoldValues> {
private boolean lenient;
@Before
public void chooseLenient() {
lenient = randomBoolean();
}
@Override
protected boolean supportsUnknownFields() {
return lenient;
}
@Override
protected FoldValues doParseInstance(XContentParser parser) throws IOException {
return FoldValues.fromXContent(parser, lenient);
}
@Override
protected Writeable.Reader<FoldValues> instanceReader() {
return FoldValues::new;
}
@Override
protected FoldValues createTestInstance() {
return createRandom();
}
@Override
protected FoldValues mutateInstance(FoldValues instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
public static FoldValues createRandom() {
int valuesSize = randomIntBetween(0, 10);
double[] values = new double[valuesSize];
for (int i = 0; i < valuesSize; i++) {
values[i] = randomDouble();
}
return new FoldValues(randomIntBetween(0, Integer.MAX_VALUE), values);
}
@Override
protected FoldValues mutateInstanceForVersion(FoldValues instance, TransportVersion version) {
return instance;
}
}
|
FoldValuesTests
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-ws/src/test/java/org/apache/camel/component/spring/ws/addressing/ConsumerWSASameChannelParamsToTests.java
|
{
"start": 1281,
"end": 1885
}
|
class ____ extends AbstractConsumerTests {
@Override
public ActionCallback channelIn(String actionUri) throws URISyntaxException {
// same channel
return to(actionUri);
}
@Override
MessageAddressingProperties channelOut() {
return sameChannelParams();
}
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext(
new String[] { "org/apache/camel/component/spring/ws/addresing/ConsumerWSAParamsTOTests-context.xml" });
}
}
|
ConsumerWSASameChannelParamsToTests
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/client/protocol/decoder/ObjectMapEntryReplayDecoder.java
|
{
"start": 930,
"end": 1643
}
|
class ____ implements MultiDecoder<Set<Entry<Object, Object>>> {
@Override
public Decoder<Object> getDecoder(Codec codec, int paramNum, State state, long size) {
if (paramNum % 2 != 0) {
return codec.getMapValueDecoder();
}
return codec.getMapKeyDecoder();
}
@Override
public Set<Entry<Object, Object>> decode(List<Object> parts, State state) {
Map<Object, Object> result = MultiDecoder.newLinkedHashMap(parts.size()/2);
for (int i = 0; i < parts.size(); i++) {
if (i % 2 != 0) {
result.put(parts.get(i-1), parts.get(i));
}
}
return result.entrySet();
}
}
|
ObjectMapEntryReplayDecoder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/id/array/ByteArrayIdTest.java
|
{
"start": 3952,
"end": 4063
}
|
class ____ {
@Id
@JavaType( ByteArrayJavaType.class )
public Byte[] id;
public String name;
}
}
|
DemoEntity
|
java
|
apache__camel
|
components/camel-wordpress/src/main/java/org/apache/camel/component/wordpress/api/model/ClassifierSearchCriteria.java
|
{
"start": 876,
"end": 1755
}
|
class ____ extends SearchCriteria {
private static final long serialVersionUID = -4265001661257396589L;
private boolean hideEmpty;
private Integer postId;
private String slug;
private Context context;
public ClassifierSearchCriteria() {
}
public boolean isHideEmpty() {
return hideEmpty;
}
public void setHideEmpty(boolean hideEmpty) {
this.hideEmpty = hideEmpty;
}
public Integer getPostId() {
return postId;
}
public void setPostId(Integer postId) {
this.postId = postId;
}
public String getSlug() {
return slug;
}
public void setSlug(String slug) {
this.slug = slug;
}
public Context getContext() {
return context;
}
public void setContext(Context context) {
this.context = context;
}
}
|
ClassifierSearchCriteria
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/async/utils/AsyncClass.java
|
{
"start": 2889,
"end": 3230
}
|
class ____ as an example of how to transform synchronous
* operations into asynchronous ones using the AsyncUtil tools,
* which can be applied to other parts of the HDFS Federation
* router or similar systems to improve concurrency and
* performance.
* </p>
*
* @see SyncClass
* @see AsyncUtil
* @see CompletableFuture
*/
public
|
serves
|
java
|
google__guice
|
core/test/com/google/inject/ImplicitBindingTest.java
|
{
"start": 12461,
"end": 12656
}
|
class ____ implements Provider<NonEmptyEnum> {
@Override
public NonEmptyEnum get() {
return NonEmptyEnum.HEARTS;
}
}
@ProvidedBy(EmptyEnumProvider.class)
|
NonEmptyEnumProvider
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
|
{
"start": 18023,
"end": 18395
}
|
class ____ extends IntegerParam {
/**
* Parameter name.
*/
public static final String NAME = HttpFSFileSystem.SNAPSHOT_DIFF_INDEX;
/**
* Constructor.
*/
public SnapshotDiffIndexParam() {
super(NAME, null);
}
}
/**
* Class for FsAction parameter.
*/
@InterfaceAudience.Private
public static
|
SnapshotDiffIndexParam
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/expression/EnvironmentAccessor.java
|
{
"start": 1207,
"end": 2360
}
|
class ____ implements PropertyAccessor {
@Override
public Class<?>[] getSpecificTargetClasses() {
return new Class<?>[] {Environment.class};
}
@Override
public boolean canRead(EvaluationContext context, @Nullable Object target, String name) throws AccessException {
return (target instanceof Environment);
}
/**
* Access the given target object by resolving the given property name against
* the given target environment.
*/
@Override
public TypedValue read(EvaluationContext context, @Nullable Object target, String name) throws AccessException {
Assert.state(target instanceof Environment, "Target must be of type Environment");
return new TypedValue(((Environment) target).getProperty(name));
}
/**
* Read-only: returns {@code false}.
*/
@Override
public boolean canWrite(EvaluationContext context, @Nullable Object target, String name) throws AccessException {
return false;
}
@Override
public void write(EvaluationContext context, @Nullable Object target, String name, @Nullable Object newValue)
throws AccessException {
throw new AccessException("The Environment is read-only");
}
}
|
EnvironmentAccessor
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java
|
{
"start": 27239,
"end": 28328
}
|
class ____ extends TransportResponse {
private final RefCounted refs = AbstractRefCounted.of(this::release);
protected final BlockFactory blockFactory;
protected long reservedBytes = 0;
LookupResponse(BlockFactory blockFactory) {
this.blockFactory = blockFactory;
}
protected abstract List<Page> takePages();
private void release() {
blockFactory.breaker().addWithoutBreaking(-reservedBytes);
innerRelease();
}
protected abstract void innerRelease();
@Override
public void incRef() {
refs.incRef();
}
@Override
public boolean tryIncRef() {
return refs.tryIncRef();
}
@Override
public boolean decRef() {
return refs.decRef();
}
@Override
public boolean hasReferences() {
return refs.hasReferences();
}
}
/**
* Create a {@link LookupShardContext} for a locally allocated {@link ShardId}.
*/
public
|
LookupResponse
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/reference/ReferenceBeanSupport.java
|
{
"start": 2811,
"end": 3851
}
|
interface ____
String interfaceName = (String) attributes.get(ReferenceAttributes.INTERFACE);
if (interfaceName == null) {
interfaceName = (String) attributes.get(ReferenceAttributes.INTERFACE_NAME);
}
if (interfaceName == null) {
Object interfaceClassValue = attributes.get(ReferenceAttributes.INTERFACE_CLASS);
if (interfaceClassValue instanceof Class) {
interfaceName = ((Class<?>) interfaceClassValue).getName();
} else if (interfaceClassValue instanceof String) {
if (interfaceClassValue.equals("void")) {
attributes.remove(ReferenceAttributes.INTERFACE_CLASS);
} else {
interfaceName = (String) interfaceClassValue;
}
}
}
if (interfaceName == null && defaultInterfaceClass != GenericService.class) {
interfaceName = defaultInterfaceClass.getName();
}
Assert.notEmptyString(interfaceName, "The
|
class
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/HealthApiFeatureSetUsage.java
|
{
"start": 2236,
"end": 5551
}
|
class ____ extends XPackFeatureUsage {
private final Map<String, Object> usageStats;
public HealthApiFeatureSetUsage(StreamInput in) throws IOException {
super(in);
usageStats = in.readGenericMap();
}
public HealthApiFeatureSetUsage(boolean available, boolean enabled, @Nullable Counters stats) {
super(XPackField.HEALTH_API, available, enabled);
if (stats != null) {
usageStats = stats.toMutableNestedMap();
enrichUsageStatsWithValues(usageStats);
} else {
usageStats = Map.of();
}
}
// This method enriches the stats map with a list of encountered values for the statuses, the indicators and the diagnoses stats.
// Visible for testing
@SuppressWarnings("unchecked")
static void enrichUsageStatsWithValues(Map<String, Object> usageStats) {
if (usageStats.containsKey("statuses")) {
Map<String, Object> statuses = (Map<String, Object>) usageStats.get("statuses");
if (statuses.isEmpty() == false) {
statuses.put("values", statuses.keySet().stream().sorted().collect(Collectors.toList()));
}
}
if (usageStats.containsKey("indicators")) {
Map<String, Map<String, Object>> indicatorsByStatus = (Map<String, Map<String, Object>>) usageStats.get("indicators");
for (String status : indicatorsByStatus.keySet()) {
Map<String, Object> indicators = indicatorsByStatus.get(status);
if (indicators.isEmpty() == false) {
indicators.put("values", indicators.keySet().stream().sorted().collect(Collectors.toList()));
}
}
}
if (usageStats.containsKey("diagnoses")) {
Map<String, Map<String, Object>> diagnosesByStatus = (Map<String, Map<String, Object>>) usageStats.get("diagnoses");
for (String status : diagnosesByStatus.keySet()) {
Map<String, Object> diagnoses = diagnosesByStatus.get(status);
if (diagnoses.isEmpty() == false) {
diagnoses.put("values", diagnoses.keySet().stream().sorted().collect(Collectors.toList()));
}
}
}
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersions.V_8_7_0;
}
public Map<String, Object> stats() {
return usageStats;
}
@Override
protected void innerXContent(XContentBuilder builder, Params params) throws IOException {
super.innerXContent(builder, params);
for (Map.Entry<String, Object> entry : usageStats.entrySet()) {
builder.field(entry.getKey(), entry.getValue());
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeGenericMap(usageStats);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
HealthApiFeatureSetUsage that = (HealthApiFeatureSetUsage) o;
return Objects.equals(usageStats, that.usageStats);
}
@Override
public int hashCode() {
return Objects.hash(usageStats);
}
}
|
HealthApiFeatureSetUsage
|
java
|
quarkusio__quarkus
|
extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/customized/QuarkusProxyFactory.java
|
{
"start": 1107,
"end": 3083
}
|
class ____ implements ProxyFactory {
private static final CoreMessageLogger LOG = messageLogger(QuarkusProxyFactory.class);
private final ProxyDefinitions proxyClassDefinitions;
private Class<?> persistentClass;
private String entityName;
private Class<?>[] interfaces;
private Method getIdentifierMethod;
private Method setIdentifierMethod;
private CompositeType componentIdType;
// Following have been computed upfront during Augmentation:
private boolean overridesEquals;
private Constructor constructor;
public QuarkusProxyFactory(ProxyDefinitions proxyClassDefinitions) {
this.proxyClassDefinitions = proxyClassDefinitions;
}
@Override
public void postInstantiate(String entityName, Class<?> persistentClass, Set<Class<?>> interfaces,
Method getIdentifierMethod,
Method setIdentifierMethod, CompositeType componentIdType) throws HibernateException {
this.entityName = entityName;
this.persistentClass = persistentClass;
this.interfaces = toArray(interfaces);
this.getIdentifierMethod = getIdentifierMethod;
this.setIdentifierMethod = setIdentifierMethod;
this.componentIdType = componentIdType;
ProxyDefinitions.ProxyClassDetailsHolder detailsHolder = proxyClassDefinitions.getProxyForClass(persistentClass);
if (detailsHolder == null) {
String reason = null;
// Some Envers entity classes are final, e.g. org.hibernate.envers.DefaultRevisionEntity
// There's nothing users can do about it, so let's not fail in those cases.
if (persistentClass.getName().startsWith("org.hibernate.")) {
reason = "this is a limitation of this particular Hibernate class.";
}
// See also ProxyBuildingHelper#isProxiable
else if (Modifier.isFinal(persistentClass.getModifiers())) {
reason = "this
|
QuarkusProxyFactory
|
java
|
spring-projects__spring-boot
|
module/spring-boot-elasticsearch/src/main/java/org/springframework/boot/elasticsearch/autoconfigure/ElasticsearchConnectionDetails.java
|
{
"start": 3240,
"end": 3754
}
|
enum ____ {
/**
* HTTP.
*/
HTTP("http"),
/**
* HTTPS.
*/
HTTPS("https");
private final String scheme;
Protocol(String scheme) {
this.scheme = scheme;
}
String getScheme() {
return this.scheme;
}
static Protocol forScheme(String scheme) {
for (Protocol protocol : values()) {
if (protocol.scheme.equals(scheme)) {
return protocol;
}
}
throw new IllegalArgumentException("Unknown scheme '" + scheme + "'");
}
}
}
}
|
Protocol
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/odps/OdpsIdentifierLocationTest.java
|
{
"start": 349,
"end": 1760
}
|
class ____ extends TestCase {
public void test_0() throws Exception {
String sql = "--odps sql\n" +
"--********************************************************************--\n" +
"--author:dw_on_emr_qa3_testcloud_com\n" +
"--create time:2025-08-11 17:08:41\n" +
"--********************************************************************--\n" +
"\n" +
"CREATE TABLE IF NOT EXISTS partition_table1\n" +
"(\n" +
" a STRING COMMENT 'FIELD'\n" +
" ,b STRING COMMENT 'FIELD'\n" +
")\n" +
"COMMENT 'TABLE COMMENT'\n" +
"PARTITIONED BY (ds STRING COMMENT '分区')\n" +
"LIFECYCLE 70;";
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(
sql,
DbType.odps,
SQLParserFeature.KeepSourceLocation,
SQLParserFeature.KeepComments);
OdpsCreateTableStatement sqlCreateTableStatement = (OdpsCreateTableStatement) parser.parseStatement();
int column = sqlCreateTableStatement.getTableSource().getExpr().getSourceColumn();
int line = sqlCreateTableStatement.getTableSource().getExpr().getSourceLine();
assertEquals(column, 28);
assertEquals(line, 7);
}
}
|
OdpsIdentifierLocationTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java
|
{
"start": 4226,
"end": 6290
}
|
class ____ extends AddPage {
final Group[] groups;
final int positionCount;
int position;
AddWork(Page page, GroupingAggregatorFunction.AddInput addInput, int batchSize) {
super(blockFactory, emitBatchSize, addInput);
this.groups = specs.stream().map(s -> new Group(s, page, batchSize)).toArray(Group[]::new);
this.positionCount = page.getPositionCount();
}
/**
* Encodes one permutation of the keys at time into {@link #bytes} and adds it
* to the {@link #bytesRefHash}. The encoding is mostly provided by
* {@link BatchEncoder} with nulls living in a bit mask at the front of the bytes.
*/
void add() {
for (position = 0; position < positionCount; position++) {
boolean singleEntry = startPosition(groups);
if (singleEntry) {
addSingleEntry();
} else {
addMultipleEntries();
}
}
flushRemaining();
}
private void addSingleEntry() {
fillBytesSv(groups);
appendOrdSv(position, Math.toIntExact(hashOrdToGroup(bytesRefHash.add(bytes.get()))));
}
private void addMultipleEntries() {
int g = 0;
do {
fillBytesMv(groups, g);
appendOrdInMv(position, Math.toIntExact(hashOrdToGroup(bytesRefHash.add(bytes.get()))));
g = rewindKeys(groups);
} while (g >= 0);
finishMv();
for (Group group : groups) {
group.valueOffset += group.valueCount;
}
}
@Override
public void close() {
Releasables.closeExpectNoException(super::close, Releasables.wrap(groups));
}
}
@Override
public ReleasableIterator<IntBlock> lookup(Page page, ByteSizeValue targetBlockSize) {
return new LookupWork(page, targetBlockSize.getBytes(), DEFAULT_BATCH_SIZE);
}
|
AddWork
|
java
|
elastic__elasticsearch
|
x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/pagerduty/ExecutablePagerDutyAction.java
|
{
"start": 1089,
"end": 2543
}
|
class ____ extends ExecutableAction<PagerDutyAction> {
private final TextTemplateEngine templateEngine;
private final PagerDutyService pagerDutyService;
public ExecutablePagerDutyAction(
PagerDutyAction action,
Logger logger,
PagerDutyService pagerDutyService,
TextTemplateEngine templateEngine
) {
super(action, logger);
this.pagerDutyService = pagerDutyService;
this.templateEngine = templateEngine;
}
@Override
public Action.Result execute(final String actionId, WatchExecutionContext ctx, Payload payload) throws Exception {
PagerDutyAccount account = pagerDutyService.getAccount(action.event.account);
if (account == null) {
// the account associated with this action was deleted
throw new IllegalStateException("account [" + action.event.account + "] was not found. perhaps it was deleted");
}
Map<String, Object> model = Variables.createCtxParamsMap(ctx, payload);
IncidentEvent event = action.event.render(ctx.watch().id(), actionId, templateEngine, model, account.getDefaults());
if (ctx.simulateAction(actionId)) {
return new PagerDutyAction.Result.Simulated(event);
}
SentEvent sentEvent = account.send(event, payload, ctx.id().watchId());
return new PagerDutyAction.Result.Executed(account.getName(), sentEvent);
}
}
|
ExecutablePagerDutyAction
|
java
|
quarkusio__quarkus
|
extensions/quartz/deployment/src/test/java/io/quarkus/quartz/test/NonconcurrentProgrammaticTest.java
|
{
"start": 868,
"end": 2084
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot(root -> root
.addClasses(Jobs.class))
.overrideConfigKey("quarkus.scheduler.start-mode", "halted");
@Inject
QuartzScheduler scheduler;
@Test
public void testExecution() throws SchedulerException, InterruptedException {
JobDetail job = JobBuilder.newJob(Jobs.class)
.withIdentity("foo", Scheduler.class.getName())
.build();
Trigger trigger = TriggerBuilder.newTrigger()
.withIdentity("foo", Scheduler.class.getName())
.startNow()
.withSchedule(SimpleScheduleBuilder.simpleSchedule()
.withIntervalInSeconds(1)
.repeatForever())
.build();
scheduler.getScheduler().scheduleJob(job, trigger);
scheduler.resume();
assertTrue(Jobs.NONCONCURRENT_LATCH.await(10, TimeUnit.SECONDS),
String.format("nonconcurrent() executed: %sx", Jobs.NONCONCURRENT_COUNTER.get()));
}
@DisallowConcurrentExecution
static
|
NonconcurrentProgrammaticTest
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterInfoSimulatorTests.java
|
{
"start": 33481,
"end": 34065
}
|
class ____ {
private final Map<InternalSnapshotsInfoService.SnapshotShard, Long> snapshotShardSizes = new HashMap<>();
public SnapshotShardSizeInfoTestBuilder withShard(Snapshot snapshot, IndexId indexId, ShardId shardId, long size) {
snapshotShardSizes.put(new InternalSnapshotsInfoService.SnapshotShard(snapshot, indexId, shardId), size);
return this;
}
public SnapshotShardSizeInfo build() {
return new SnapshotShardSizeInfo(snapshotShardSizes);
}
}
private static
|
SnapshotShardSizeInfoTestBuilder
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/qualifiers/defaultvalues/QualifierDefaultValuesTest.java
|
{
"start": 792,
"end": 884
}
|
class ____ {
@Inject
@AnimalQualifier
Animal animal;
}
}
|
Consumer
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/vetoed/VetoedBean2.java
|
{
"start": 212,
"end": 284
}
|
class ____ {
@Inject
public BeanContext beanContext;
}
|
VetoedBean2
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/support/AbstractRefreshableApplicationContext.java
|
{
"start": 3114,
"end": 9679
}
|
class ____ extends AbstractApplicationContext {
private @Nullable Boolean allowBeanDefinitionOverriding;
private @Nullable Boolean allowCircularReferences;
/** Bean factory for this context. */
private volatile @Nullable DefaultListableBeanFactory beanFactory;
/**
* Create a new AbstractRefreshableApplicationContext with no parent.
*/
public AbstractRefreshableApplicationContext() {
}
/**
* Create a new AbstractRefreshableApplicationContext with the given parent context.
* @param parent the parent context
*/
public AbstractRefreshableApplicationContext(@Nullable ApplicationContext parent) {
super(parent);
}
/**
* Set whether it should be allowed to override bean definitions by registering
* a different definition with the same name, automatically replacing the former.
* If not, an exception will be thrown. Default is "true".
* @see org.springframework.beans.factory.support.DefaultListableBeanFactory#setAllowBeanDefinitionOverriding
*/
public void setAllowBeanDefinitionOverriding(boolean allowBeanDefinitionOverriding) {
this.allowBeanDefinitionOverriding = allowBeanDefinitionOverriding;
}
/**
* Set whether to allow circular references between beans - and automatically
* try to resolve them.
* <p>Default is "true". Turn this off to throw an exception when encountering
* a circular reference, disallowing them completely.
* @see org.springframework.beans.factory.support.DefaultListableBeanFactory#setAllowCircularReferences
*/
public void setAllowCircularReferences(boolean allowCircularReferences) {
this.allowCircularReferences = allowCircularReferences;
}
/**
* This implementation performs an actual refresh of this context's underlying
* bean factory, shutting down the previous bean factory (if any) and
* initializing a fresh bean factory for the next phase of the context's lifecycle.
*/
@Override
protected final void refreshBeanFactory() throws BeansException {
if (hasBeanFactory()) {
destroyBeans();
closeBeanFactory();
}
try {
DefaultListableBeanFactory beanFactory = createBeanFactory();
beanFactory.setSerializationId(getId());
beanFactory.setApplicationStartup(getApplicationStartup());
customizeBeanFactory(beanFactory);
loadBeanDefinitions(beanFactory);
this.beanFactory = beanFactory;
}
catch (IOException ex) {
throw new ApplicationContextException("I/O error parsing bean definition source for " + getDisplayName(), ex);
}
}
@Override
protected void cancelRefresh(Throwable ex) {
DefaultListableBeanFactory beanFactory = this.beanFactory;
if (beanFactory != null) {
beanFactory.setSerializationId(null);
}
super.cancelRefresh(ex);
}
@Override
protected final void closeBeanFactory() {
DefaultListableBeanFactory beanFactory = this.beanFactory;
if (beanFactory != null) {
beanFactory.setSerializationId(null);
this.beanFactory = null;
}
}
/**
* Determine whether this context currently holds a bean factory,
* i.e. has been refreshed at least once and not been closed yet.
*/
protected final boolean hasBeanFactory() {
return (this.beanFactory != null);
}
@Override
public final ConfigurableListableBeanFactory getBeanFactory() {
DefaultListableBeanFactory beanFactory = this.beanFactory;
if (beanFactory == null) {
throw new IllegalStateException("BeanFactory not initialized or already closed - " +
"call 'refresh' before accessing beans via the ApplicationContext");
}
return beanFactory;
}
/**
* Overridden to turn it into a no-op: With AbstractRefreshableApplicationContext,
* {@link #getBeanFactory()} serves a strong assertion for an active context anyway.
*/
@Override
protected void assertBeanFactoryActive() {
}
/**
* Create an internal bean factory for this context.
* Called for each {@link #refresh()} attempt.
* <p>The default implementation creates a
* {@link org.springframework.beans.factory.support.DefaultListableBeanFactory}
* with the {@linkplain #getInternalParentBeanFactory() internal bean factory} of this
* context's parent as parent bean factory. Can be overridden in subclasses,
* for example to customize DefaultListableBeanFactory's settings.
* @return the bean factory for this context
* @see org.springframework.beans.factory.support.DefaultListableBeanFactory#setAllowBeanDefinitionOverriding
* @see org.springframework.beans.factory.support.DefaultListableBeanFactory#setAllowEagerClassLoading
* @see org.springframework.beans.factory.support.DefaultListableBeanFactory#setAllowCircularReferences
* @see org.springframework.beans.factory.support.DefaultListableBeanFactory#setAllowRawInjectionDespiteWrapping
*/
protected DefaultListableBeanFactory createBeanFactory() {
return new DefaultListableBeanFactory(getInternalParentBeanFactory());
}
/**
* Customize the internal bean factory used by this context.
* Called for each {@link #refresh()} attempt.
* <p>The default implementation applies this context's
* {@linkplain #setAllowBeanDefinitionOverriding "allowBeanDefinitionOverriding"}
* and {@linkplain #setAllowCircularReferences "allowCircularReferences"} settings,
* if specified. Can be overridden in subclasses to customize any of
* {@link DefaultListableBeanFactory}'s settings.
* @param beanFactory the newly created bean factory for this context
* @see DefaultListableBeanFactory#setAllowBeanDefinitionOverriding
* @see DefaultListableBeanFactory#setAllowCircularReferences
* @see DefaultListableBeanFactory#setAllowRawInjectionDespiteWrapping
* @see DefaultListableBeanFactory#setAllowEagerClassLoading
*/
protected void customizeBeanFactory(DefaultListableBeanFactory beanFactory) {
if (this.allowBeanDefinitionOverriding != null) {
beanFactory.setAllowBeanDefinitionOverriding(this.allowBeanDefinitionOverriding);
}
if (this.allowCircularReferences != null) {
beanFactory.setAllowCircularReferences(this.allowCircularReferences);
}
}
/**
* Load bean definitions into the given bean factory, typically through
* delegating to one or more bean definition readers.
* @param beanFactory the bean factory to load bean definitions into
* @throws BeansException if parsing of the bean definitions failed
* @throws IOException if loading of bean definition files failed
* @see org.springframework.beans.factory.xml.XmlBeanDefinitionReader
*/
protected abstract void loadBeanDefinitions(DefaultListableBeanFactory beanFactory)
throws BeansException, IOException;
}
|
AbstractRefreshableApplicationContext
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/onexception/OnExceptionRetryUntilTest.java
|
{
"start": 1369,
"end": 3212
}
|
class ____ extends ContextTestSupport {
private static int invoked;
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("myRetryHandler", new MyRetryBean());
return jndi;
}
@Test
public void testRetryUntil() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
// as its based on a unit test we do not have any delays between
// and do not log the stack trace
errorHandler(deadLetterChannel("mock:error").maximumRedeliveries(1).redeliveryDelay(0).logStackTrace(false));
// START SNIPPET: e1
// we want to use a predicate for retries so we can determine in
// our bean
// when retry should stop, notice it will overrule the global
// error handler
// where we defined at most 1 redelivery attempt. Here we will
// continue until
// the predicate returns false
onException(MyFunctionalException.class).retryWhile(method("myRetryHandler")).handled(true).transform()
.constant("Sorry");
// END SNIPPET: e1
from("direct:start").process(new Processor() {
public void process(Exchange exchange) throws Exception {
throw new MyFunctionalException("Sorry you cannot do this");
}
});
}
});
Object out = template.requestBody("direct:start", "Hello World");
assertEquals("Sorry", out);
assertEquals(3, invoked);
}
// START SNIPPET: e2
public static
|
OnExceptionRetryUntilTest
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/util/OptionConverter.java
|
{
"start": 10362,
"end": 15497
}
|
class ____ which the new object should belong.
* @param defaultValue The object to return in case of non-fulfillment
* @return The created object.
*/
public static Object instantiateByClassName(
final String className, final Class<?> superClass, final Object defaultValue) {
if (className != null) {
try {
final Class<?> classObj = Loader.loadClass(className);
if (!superClass.isAssignableFrom(classObj)) {
LOGGER.error(
"A \"{}\" object is not assignable to a \"{}\" variable.", className, superClass.getName());
LOGGER.error(
"The class \"{}\" was loaded by [{}] whereas object of type [{}] was loaded by [{}].",
superClass.getName(),
superClass.getClassLoader(),
classObj.getTypeName(),
classObj.getName());
return defaultValue;
}
return LoaderUtil.newInstanceOf(classObj);
} catch (final Exception e) {
LOGGER.error("Could not instantiate class [{}].", className, e);
}
}
return defaultValue;
}
/**
* Perform variable substitution in string <code>val</code> from the
* values of keys found in the system propeties.
*
* <p>The variable substitution delimiters are <b>${</b> and <b>}</b>.</p>
*
* <p>For example, if the System properties contains "key=value", then
* the call</p>
* <pre>
* String s = OptionConverter.substituteVars("Value of key is ${key}.");
* </pre>
* <p>
* will set the variable <code>s</code> to "Value of key is value.".
* </p>
* <p>If no value could be found for the specified key, then the
* <code>props</code> parameter is searched, if the value could not
* be found there, then substitution defaults to the empty string.</p>
*
* <p>For example, if system properties contains no value for the key
* "inexistentKey", then the call
* </p>
* <pre>
* String s = OptionConverter.subsVars("Value of inexistentKey is [${inexistentKey}]");
* </pre>
* <p>
* will set <code>s</code> to "Value of inexistentKey is []"
* </p>
* <p>An {@link java.lang.IllegalArgumentException} is thrown if
* <code>val</code> contains a start delimeter "${" which is not
* balanced by a stop delimeter "}". </p>
*
* @param val The string on which variable substitution is performed.
* @param props The properties to use for substitution.
* @return The String after substitution.
* @throws IllegalArgumentException if <code>val</code> is malformed.
*/
public static String substVars(final String val, final Properties props) throws IllegalArgumentException {
return substVars(val, props, new ArrayList<>());
}
private static String substVars(final String val, final Properties props, final List<String> keys)
throws IllegalArgumentException {
final StringBuilder sbuf = new StringBuilder();
int i = 0;
int j;
int k;
while (true) {
j = val.indexOf(DELIM_START, i);
if (j == -1) {
// no more variables
if (i == 0) { // this is a simple string
return val;
}
// add the tail string which contails no variables and return the result.
sbuf.append(val.substring(i, val.length()));
return sbuf.toString();
}
sbuf.append(val.substring(i, j));
k = val.indexOf(DELIM_STOP, j);
if (k == -1) {
throw new IllegalArgumentException(
Strings.dquote(val) + " has no closing brace. Opening brace at position " + j + '.');
}
j += DELIM_START_LEN;
final String key = val.substring(j, k);
// first try in System properties
String replacement = PropertiesUtil.getProperties().getStringProperty(key, null);
// then try props parameter
if (replacement == null && props != null) {
replacement = props.getProperty(key);
}
if (replacement != null) {
// Do variable substitution on the replacement string
// such that we can solve "Hello ${x2}" as "Hello p1"
// the where the properties are
// x1=p1
// x2=${x1}
if (!keys.contains(key)) {
final List<String> usedKeys = new ArrayList<>(keys);
usedKeys.add(key);
final String recursiveReplacement = substVars(replacement, props, usedKeys);
sbuf.append(recursiveReplacement);
} else {
sbuf.append(replacement);
}
}
i = k + DELIM_STOP_LEN;
}
}
}
|
to
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/analysis/LowercaseNormalizer.java
|
{
"start": 813,
"end": 1261
}
|
class ____ extends Analyzer {
@Override
protected TokenStreamComponents createComponents(String s) {
final Tokenizer tokenizer = new KeywordTokenizer();
TokenStream stream = new LowerCaseFilter(tokenizer);
return new TokenStreamComponents(tokenizer, stream);
}
@Override
protected TokenStream normalize(String fieldName, TokenStream in) {
return new LowerCaseFilter(in);
}
}
|
LowercaseNormalizer
|
java
|
apache__camel
|
components/camel-consul/src/generated/java/org/apache/camel/component/consul/endpoint/ConsulSessionProducerInvokeOnHeaderFactory.java
|
{
"start": 419,
"end": 1330
}
|
class ____ implements InvokeOnHeaderStrategy {
@Override
public Object invoke(Object obj, String key, Exchange exchange, AsyncCallback callback) throws Exception {
org.apache.camel.component.consul.endpoint.ConsulSessionProducer target = (org.apache.camel.component.consul.endpoint.ConsulSessionProducer) obj;
switch (key) {
case "create":
case "CREATE": target.create(exchange.getMessage()); return null;
case "destroy":
case "DESTROY": target.destroy(exchange.getMessage()); return null;
case "info":
case "INFO": target.info(exchange.getMessage()); return null;
case "list":
case "LIST": target.list(exchange.getMessage()); return null;
case "renew":
case "RENEW": target.renew(exchange.getMessage()); return null;
default: return null;
}
}
}
|
ConsulSessionProducerInvokeOnHeaderFactory
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/FieldCanBeLocalTest.java
|
{
"start": 4576,
"end": 4995
}
|
class ____ {
int foo() {
@Field int a = 1;
return a;
}
}
""")
.doTest();
}
@Test
public void multipleVariableAnnotations() {
refactoringTestHelper
.addInputLines(
"Test.java",
"""
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
|
Test
|
java
|
apache__flink
|
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ddl/DropPartitionsOperation.java
|
{
"start": 1453,
"end": 3401
}
|
class ____ extends AlterTableOperation {
private final boolean ignoreIfPartitionNotExists;
private final List<CatalogPartitionSpec> partitionSpecs;
public DropPartitionsOperation(
ObjectIdentifier tableIdentifier,
boolean ignoreIfPartitionNotExists,
List<CatalogPartitionSpec> partitionSpecs) {
super(tableIdentifier, false);
this.ignoreIfPartitionNotExists = ignoreIfPartitionNotExists;
this.partitionSpecs = partitionSpecs;
}
public boolean ignoreIfPartitionNotExists() {
return ignoreIfPartitionNotExists;
}
public List<CatalogPartitionSpec> getPartitionSpecs() {
return partitionSpecs;
}
@Override
public String asSummaryString() {
StringBuilder builder =
new StringBuilder(
String.format("ALTER TABLE %s DROP", tableIdentifier.asSummaryString()));
if (ignoreIfPartitionNotExists) {
builder.append(" IF EXISTS");
}
for (CatalogPartitionSpec spec : partitionSpecs) {
builder.append(
String.format(" PARTITION (%s)", OperationUtils.formatPartitionSpec(spec)));
}
return builder.toString();
}
@Override
public TableResultInternal execute(Context ctx) {
ObjectPath tablePath = getTableIdentifier().toObjectPath();
Catalog catalog =
ctx.getCatalogManager()
.getCatalogOrThrowException(getTableIdentifier().getCatalogName());
try {
for (CatalogPartitionSpec spec : getPartitionSpecs()) {
catalog.dropPartition(tablePath, spec, ignoreIfPartitionNotExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (Exception e) {
throw new TableException(String.format("Could not execute %s", asSummaryString()), e);
}
}
}
|
DropPartitionsOperation
|
java
|
apache__kafka
|
group-coordinator/src/test/java/org/apache/kafka/coordinator/group/streams/StreamsGroupTest.java
|
{
"start": 4501,
"end": 56658
}
|
class ____ {
private static final LogContext LOG_CONTEXT = new LogContext();
private StreamsGroup createStreamsGroup(String groupId) {
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(LOG_CONTEXT);
return new StreamsGroup(
LOG_CONTEXT,
snapshotRegistry,
groupId
);
}
@Test
public void testGetOrCreateUninitializedMember() {
StreamsGroup streamsGroup = createStreamsGroup("foo");
StreamsGroupMember uninitializedMember = new StreamsGroupMember.Builder("member-id").build();
StreamsGroupMember member = streamsGroup.getOrCreateUninitializedMember("member-id");
assertEquals(uninitializedMember, member);
StreamsGroupMember updatedMember = new StreamsGroupMember.Builder(member).setInstanceId("unique-new-id").build();
streamsGroup.updateMember(updatedMember);
assertEquals(updatedMember, streamsGroup.getOrCreateUninitializedMember("member-id"));
assertNotEquals(uninitializedMember, streamsGroup.getOrCreateUninitializedMember("member-id"));
}
@Test
public void testGetOrCreateDefaultMember() {
StreamsGroup streamsGroup = createStreamsGroup("foo");
StreamsGroupMember defaultMember = StreamsGroupMember.Builder.withDefaults("member-id").build();
StreamsGroupMember member = streamsGroup.getOrCreateDefaultMember("member-id");
assertEquals(defaultMember, member);
StreamsGroupMember updatedMember = new StreamsGroupMember.Builder(member).setInstanceId("unique-new-id").build();
streamsGroup.updateMember(updatedMember);
assertEquals(updatedMember, streamsGroup.getOrCreateDefaultMember("member-id"));
assertNotEquals(defaultMember, streamsGroup.getOrCreateDefaultMember("member-id"));
}
@Test
public void testGetMemberOrThrow() {
StreamsGroup streamsGroup = createStreamsGroup("foo");
StreamsGroupMember member;
// Create a member.
member = streamsGroup.getOrCreateDefaultMember("member-id");
assertEquals("member-id", member.memberId());
// Add member to the group.
streamsGroup.updateMember(member);
// Get that member back.
member = streamsGroup.getMemberOrThrow("member-id");
assertEquals("member-id", member.memberId());
assertThrows(UnknownMemberIdException.class, () ->
streamsGroup.getMemberOrThrow("does-not-exist"));
}
@Test
public void testUpdateMember() {
StreamsGroup streamsGroup = createStreamsGroup("foo");
StreamsGroupMember member;
member = streamsGroup.getOrCreateDefaultMember("member");
member = new StreamsGroupMember.Builder(member).build();
streamsGroup.updateMember(member);
assertEquals(member, streamsGroup.getMemberOrThrow("member"));
}
@Test
public void testNoStaticMember() {
StreamsGroup streamsGroup = createStreamsGroup("foo");
// Create a new member which is not static
streamsGroup.getOrCreateDefaultMember("member");
assertNull(streamsGroup.staticMember("instance-id"));
}
@Test
public void testGetStaticMemberByInstanceId() {
StreamsGroup streamsGroup = createStreamsGroup("foo");
StreamsGroupMember member;
member = streamsGroup.getOrCreateDefaultMember("member");
member = new StreamsGroupMember.Builder(member)
.setInstanceId("instance")
.build();
streamsGroup.updateMember(member);
assertEquals(member, streamsGroup.staticMember("instance"));
assertEquals(member, streamsGroup.getMemberOrThrow("member"));
assertEquals(member.memberId(), streamsGroup.staticMemberId("instance"));
}
@Test
public void testRemoveMember() {
StreamsGroup streamsGroup = createStreamsGroup("foo");
StreamsGroupMember member = streamsGroup.getOrCreateDefaultMember("member");
streamsGroup.updateMember(member);
assertTrue(streamsGroup.hasMember("member"));
streamsGroup.removeMember("member");
assertFalse(streamsGroup.hasMember("member"));
}
@Test
public void testRemoveStaticMember() {
StreamsGroup streamsGroup = createStreamsGroup("foo");
StreamsGroupMember member = new StreamsGroupMember.Builder("member")
.setInstanceId("instance")
.build();
streamsGroup.updateMember(member);
assertTrue(streamsGroup.hasMember("member"));
streamsGroup.removeMember("member");
assertFalse(streamsGroup.hasMember("member"));
assertNull(streamsGroup.staticMember("instance"));
assertNull(streamsGroup.staticMemberId("instance"));
}
@Test
public void testUpdatingMemberUpdatesProcessId() {
String fooSubtopology = "foo-sub";
String barSubtopology = "bar-sub";
String zarSubtopology = "zar-sub";
StreamsGroup streamsGroup = createStreamsGroup("foo");
StreamsGroupMember member;
member = new StreamsGroupMember.Builder("member")
.setProcessId("process")
.setAssignedTasks(
new TasksTupleWithEpochs(
mkTasksPerSubtopologyWithCommonEpoch(10, mkTasks(fooSubtopology, 1)),
mkTasksPerSubtopology(mkTasks(fooSubtopology, 2)),
mkTasksPerSubtopology(mkTasks(fooSubtopology, 3))
)
)
.setTasksPendingRevocation(
new TasksTupleWithEpochs(
mkTasksPerSubtopologyWithCommonEpoch(10, mkTasks(barSubtopology, 4)),
mkTasksPerSubtopology(mkTasks(barSubtopology, 5)),
mkTasksPerSubtopology(mkTasks(barSubtopology, 6))
)
)
.build();
streamsGroup.updateMember(member);
assertEquals("process", streamsGroup.currentActiveTaskProcessId(fooSubtopology, 1));
assertEquals(Set.of("process"),
streamsGroup.currentStandbyTaskProcessIds(fooSubtopology, 2));
assertEquals(Set.of("process"),
streamsGroup.currentWarmupTaskProcessIds(fooSubtopology, 3));
assertEquals("process", streamsGroup.currentActiveTaskProcessId(barSubtopology, 4));
assertEquals(Set.of("process"),
streamsGroup.currentStandbyTaskProcessIds(barSubtopology, 5));
assertEquals(Set.of("process"),
streamsGroup.currentWarmupTaskProcessIds(barSubtopology, 6));
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 7));
assertEquals(Set.of(),
streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 8));
assertEquals(Set.of(),
streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 9));
member = new StreamsGroupMember.Builder(member)
.setProcessId("process1")
.setAssignedTasks(
new TasksTupleWithEpochs(
mkTasksPerSubtopologyWithCommonEpoch(10, mkTasks(fooSubtopology, 1)),
mkTasksPerSubtopology(mkTasks(fooSubtopology, 2)),
mkTasksPerSubtopology(mkTasks(fooSubtopology, 3))
)
)
.setTasksPendingRevocation(
new TasksTupleWithEpochs(
mkTasksPerSubtopologyWithCommonEpoch(10, mkTasks(barSubtopology, 4)),
mkTasksPerSubtopology(mkTasks(barSubtopology, 5)),
mkTasksPerSubtopology(mkTasks(barSubtopology, 6))
)
)
.build();
streamsGroup.updateMember(member);
assertEquals("process1", streamsGroup.currentActiveTaskProcessId(fooSubtopology, 1));
assertEquals(Set.of("process1"),
streamsGroup.currentStandbyTaskProcessIds(fooSubtopology, 2));
assertEquals(Set.of("process1"),
streamsGroup.currentWarmupTaskProcessIds(fooSubtopology, 3));
assertEquals("process1", streamsGroup.currentActiveTaskProcessId(barSubtopology, 4));
assertEquals(Set.of("process1"),
streamsGroup.currentStandbyTaskProcessIds(barSubtopology, 5));
assertEquals(Set.of("process1"),
streamsGroup.currentWarmupTaskProcessIds(barSubtopology, 6));
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 7));
assertEquals(Set.of(),
streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 8));
assertEquals(Set.of(),
streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 9));
}
@Test
public void testUpdatingMemberUpdatesTaskProcessIdWhenPartitionIsReassignedBeforeBeingRevoked() {
String fooSubtopologyId = "foo-sub";
StreamsGroup streamsGroup = createStreamsGroup("foo");
StreamsGroupMember member;
member = new StreamsGroupMember.Builder("member")
.setProcessId("process")
.setAssignedTasks(TasksTupleWithEpochs.EMPTY)
.setTasksPendingRevocation(
new TasksTupleWithEpochs(
mkTasksPerSubtopologyWithCommonEpoch(10, mkTasks(fooSubtopologyId, 1)),
mkTasksPerSubtopology(mkTasks(fooSubtopologyId, 2)),
mkTasksPerSubtopology(mkTasks(fooSubtopologyId, 3))
)
)
.build();
streamsGroup.updateMember(member);
assertEquals("process", streamsGroup.currentActiveTaskProcessId(fooSubtopologyId, 1));
member = new StreamsGroupMember.Builder(member)
.setProcessId("process1")
.setAssignedTasks(
new TasksTupleWithEpochs(
mkTasksPerSubtopologyWithCommonEpoch(10, mkTasks(fooSubtopologyId, 1)),
mkTasksPerSubtopology(mkTasks(fooSubtopologyId, 2)),
mkTasksPerSubtopology(mkTasks(fooSubtopologyId, 3))
)
)
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build();
streamsGroup.updateMember(member);
assertEquals("process1", streamsGroup.currentActiveTaskProcessId(fooSubtopologyId, 1));
}
@Test
public void testUpdatingMemberUpdatesTaskProcessIdWhenPartitionIsNotReleased() {
String fooSubtopologyId = "foo-sub";
StreamsGroup streamsGroup = createStreamsGroup("foo");
StreamsGroupMember m1 = new StreamsGroupMember.Builder("m1")
.setProcessId("process")
.setAssignedTasks(
new TasksTupleWithEpochs(
mkTasksPerSubtopologyWithCommonEpoch(10, mkTasks(fooSubtopologyId, 1)),
Map.of(),
Map.of()
)
)
.build();
streamsGroup.updateMember(m1);
StreamsGroupMember m2 = new StreamsGroupMember.Builder("m2")
.setProcessId("process")
.setAssignedTasks(
new TasksTupleWithEpochs(
mkTasksPerSubtopologyWithCommonEpoch(10, mkTasks(fooSubtopologyId, 1)),
Map.of(),
Map.of()
)
)
.build();
// m2 should not be able to acquire foo-1 because the partition is
// still owned by another member.
assertThrows(IllegalStateException.class, () -> streamsGroup.updateMember(m2));
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testRemoveTaskProcessIds(TaskRole taskRole) {
String fooSubtopologyId = "foo-sub";
StreamsGroup streamsGroup = createStreamsGroup("foo");
// Removing should fail because there is no epoch set.
assertThrows(IllegalStateException.class, () -> streamsGroup.removeTaskProcessIds(
TaskAssignmentTestUtil.mkTasksTupleWithCommonEpoch(taskRole, 10, mkTasks(fooSubtopologyId, 1)),
"process"
));
StreamsGroupMember m1 = new StreamsGroupMember.Builder("m1")
.setProcessId("process")
.setAssignedTasks(TaskAssignmentTestUtil.mkTasksTupleWithCommonEpoch(taskRole, 10, mkTasks(fooSubtopologyId, 1)))
.build();
streamsGroup.updateMember(m1);
// Removing should fail because the expected epoch is incorrect.
assertThrows(IllegalStateException.class, () -> streamsGroup.removeTaskProcessIds(
TaskAssignmentTestUtil.mkTasksTupleWithCommonEpoch(taskRole, 10, mkTasks(fooSubtopologyId, 1)),
"process1"
));
}
@Test
public void testAddTaskProcessIds() {
String fooSubtopologyId = "foo-sub";
StreamsGroup streamsGroup = createStreamsGroup("foo");
streamsGroup.addTaskProcessId(
new TasksTupleWithEpochs(
mkTasksPerSubtopologyWithCommonEpoch(10, mkTasks(fooSubtopologyId, 1)),
mkTasksPerSubtopology(mkTasks(fooSubtopologyId, 2)),
mkTasksPerSubtopology(mkTasks(fooSubtopologyId, 3))
),
"process"
);
// Changing the epoch should fail because the owner of the partition
// should remove it first.
assertThrows(IllegalStateException.class, () -> streamsGroup.addTaskProcessId(
new TasksTupleWithEpochs(
mkTasksPerSubtopologyWithCommonEpoch(10, mkTasks(fooSubtopologyId, 1)),
mkTasksPerSubtopology(mkTasks(fooSubtopologyId, 2)),
mkTasksPerSubtopology(mkTasks(fooSubtopologyId, 3))
),
"process"
));
}
@Test
public void testDeletingMemberRemovesProcessId() {
String fooSubtopology = "foo-sub";
String barSubtopology = "bar-sub";
String zarSubtopology = "zar-sub";
StreamsGroup streamsGroup = createStreamsGroup("foo");
StreamsGroupMember member;
member = new StreamsGroupMember.Builder("member")
.setProcessId("process")
.setAssignedTasks(
new TasksTupleWithEpochs(
mkTasksPerSubtopologyWithCommonEpoch(10, mkTasks(fooSubtopology, 1)),
mkTasksPerSubtopology(mkTasks(fooSubtopology, 2)),
mkTasksPerSubtopology(mkTasks(fooSubtopology, 3))
)
)
.setTasksPendingRevocation(
new TasksTupleWithEpochs(
mkTasksPerSubtopologyWithCommonEpoch(10, mkTasks(barSubtopology, 4)),
mkTasksPerSubtopology(mkTasks(barSubtopology, 5)),
mkTasksPerSubtopology(mkTasks(barSubtopology, 6))
)
)
.build();
streamsGroup.updateMember(member);
assertEquals("process", streamsGroup.currentActiveTaskProcessId(fooSubtopology, 1));
assertEquals(Set.of("process"), streamsGroup.currentStandbyTaskProcessIds(fooSubtopology, 2));
assertEquals(Set.of("process"), streamsGroup.currentWarmupTaskProcessIds(fooSubtopology, 3));
assertEquals("process", streamsGroup.currentActiveTaskProcessId(barSubtopology, 4));
assertEquals(Set.of("process"), streamsGroup.currentStandbyTaskProcessIds(barSubtopology, 5));
assertEquals(Set.of("process"), streamsGroup.currentWarmupTaskProcessIds(barSubtopology, 6));
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 7));
assertEquals(Set.of(), streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 8));
assertEquals(Set.of(), streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 9));
streamsGroup.removeMember(member.memberId());
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 1));
assertEquals(Set.of(), streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 2));
assertEquals(Set.of(), streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 3));
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 3));
assertEquals(Set.of(), streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 4));
assertEquals(Set.of(), streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 5));
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 7));
assertEquals(Set.of(), streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 8));
assertEquals(Set.of(), streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 9));
}
@Test
public void testGroupState() {
StreamsGroup streamsGroup = createStreamsGroup("foo");
assertEquals(StreamsGroupState.EMPTY, streamsGroup.state());
StreamsGroupMember member1 = new StreamsGroupMember.Builder("member1")
.setState(MemberState.STABLE)
.setMemberEpoch(1)
.setPreviousMemberEpoch(0)
.build();
streamsGroup.updateMember(member1);
streamsGroup.setGroupEpoch(1);
assertEquals(MemberState.STABLE, member1.state());
assertEquals(StreamsGroup.StreamsGroupState.NOT_READY, streamsGroup.state());
streamsGroup.setTopology(new StreamsTopology(1, Map.of()));
streamsGroup.setConfiguredTopology(new ConfiguredTopology(1, 0, Optional.of(new TreeMap<>()), Map.of(), Optional.empty()));
streamsGroup.setValidatedTopologyEpoch(1);
assertEquals(MemberState.STABLE, member1.state());
assertEquals(StreamsGroup.StreamsGroupState.ASSIGNING, streamsGroup.state());
StreamsGroupMember member2 = new StreamsGroupMember.Builder("member2")
.setState(MemberState.STABLE)
.setMemberEpoch(1)
.setPreviousMemberEpoch(0)
.build();
streamsGroup.updateMember(member2);
streamsGroup.setGroupEpoch(2);
assertEquals(MemberState.STABLE, member2.state());
assertEquals(StreamsGroup.StreamsGroupState.ASSIGNING, streamsGroup.state());
streamsGroup.setTargetAssignmentEpoch(2);
assertEquals(StreamsGroup.StreamsGroupState.RECONCILING, streamsGroup.state());
member1 = new StreamsGroupMember.Builder(member1)
.setState(MemberState.STABLE)
.setMemberEpoch(2)
.setPreviousMemberEpoch(1)
.build();
streamsGroup.updateMember(member1);
assertEquals(MemberState.STABLE, member1.state());
assertEquals(StreamsGroup.StreamsGroupState.RECONCILING, streamsGroup.state());
// Member 2 is not stable so the group stays in reconciling state.
member2 = new StreamsGroupMember.Builder(member2)
.setState(MemberState.UNREVOKED_TASKS)
.setMemberEpoch(2)
.setPreviousMemberEpoch(1)
.build();
streamsGroup.updateMember(member2);
assertEquals(MemberState.UNREVOKED_TASKS, member2.state());
assertEquals(StreamsGroup.StreamsGroupState.RECONCILING, streamsGroup.state());
member2 = new StreamsGroupMember.Builder(member2)
.setState(MemberState.STABLE)
.setMemberEpoch(2)
.setPreviousMemberEpoch(1)
.build();
streamsGroup.updateMember(member2);
assertEquals(MemberState.STABLE, member2.state());
assertEquals(StreamsGroup.StreamsGroupState.STABLE, streamsGroup.state());
streamsGroup.removeMember("member1");
streamsGroup.removeMember("member2");
assertEquals(StreamsGroup.StreamsGroupState.EMPTY, streamsGroup.state());
}
@Test
public void testMetadataRefreshDeadline() {
MockTime time = new MockTime();
StreamsGroup group = createStreamsGroup("group-foo");
// Group epoch starts at 0.
assertEquals(0, group.groupEpoch());
// The refresh time deadline should be empty when the group is created or loaded.
assertTrue(group.hasMetadataExpired(time.milliseconds()));
assertEquals(0L, group.metadataRefreshDeadline().deadlineMs);
assertEquals(0, group.metadataRefreshDeadline().epoch);
// Set the refresh deadline. The metadata remains valid because the deadline
// has not past and the group epoch is correct.
group.setMetadataRefreshDeadline(time.milliseconds() + 1000, group.groupEpoch());
assertFalse(group.hasMetadataExpired(time.milliseconds()));
assertEquals(time.milliseconds() + 1000, group.metadataRefreshDeadline().deadlineMs);
assertEquals(group.groupEpoch(), group.metadataRefreshDeadline().epoch);
// Advance past the deadline. The metadata should have expired.
time.sleep(1001L);
assertTrue(group.hasMetadataExpired(time.milliseconds()));
// Set the refresh time deadline with a higher group epoch. The metadata is considered
// as expired because the group epoch attached to the deadline is higher than the
// current group epoch.
group.setMetadataRefreshDeadline(time.milliseconds() + 1000, group.groupEpoch() + 1);
assertTrue(group.hasMetadataExpired(time.milliseconds()));
assertEquals(time.milliseconds() + 1000, group.metadataRefreshDeadline().deadlineMs);
assertEquals(group.groupEpoch() + 1, group.metadataRefreshDeadline().epoch);
// Advance the group epoch.
group.setGroupEpoch(group.groupEpoch() + 1);
// Set the refresh deadline. The metadata remains valid because the deadline
// has not past and the group epoch is correct.
group.setMetadataRefreshDeadline(time.milliseconds() + 1000, group.groupEpoch());
assertFalse(group.hasMetadataExpired(time.milliseconds()));
assertEquals(time.milliseconds() + 1000, group.metadataRefreshDeadline().deadlineMs);
assertEquals(group.groupEpoch(), group.metadataRefreshDeadline().epoch);
// Request metadata refresh. The metadata expires immediately.
group.requestMetadataRefresh();
assertTrue(group.hasMetadataExpired(time.milliseconds()));
assertEquals(0L, group.metadataRefreshDeadline().deadlineMs);
assertEquals(0, group.metadataRefreshDeadline().epoch);
}
@ParameterizedTest
@ApiKeyVersionsSource(apiKey = ApiKeys.TXN_OFFSET_COMMIT)
public void testValidateTransactionalOffsetCommit(short version) {
boolean isTransactional = true;
StreamsGroup group = createStreamsGroup("group-foo");
// Simulate a call from the admin client without member ID and member epoch.
// This should pass only if the group is empty.
group.validateOffsetCommit("", "", -1, isTransactional, version);
// The member does not exist.
assertThrows(UnknownMemberIdException.class, () ->
group.validateOffsetCommit("member-id", null, 0, isTransactional, version));
// Create a member.
group.updateMember(new StreamsGroupMember.Builder("member-id").setMemberEpoch(0).build());
// A call from the admin client should fail as the group is not empty.
assertThrows(UnknownMemberIdException.class, () ->
group.validateOffsetCommit("", "", -1, isTransactional, version));
// The member epoch is stale.
assertThrows(StaleMemberEpochException.class, () ->
group.validateOffsetCommit("member-id", "", 10, isTransactional, version));
// This should succeed.
group.validateOffsetCommit("member-id", "", 0, isTransactional, version);
// This should succeed.
group.validateOffsetCommit("", null, -1, isTransactional, version);
}
@ParameterizedTest
@ApiKeyVersionsSource(apiKey = ApiKeys.OFFSET_COMMIT)
public void testValidateOffsetCommit(short version) {
boolean isTransactional = false;
StreamsGroup group = createStreamsGroup("group-foo");
// Simulate a call from the admin client without member ID and member epoch.
// This should pass only if the group is empty.
group.validateOffsetCommit("", "", -1, isTransactional, version);
// The member does not exist.
assertThrows(UnknownMemberIdException.class, () ->
group.validateOffsetCommit("member-id", null, 0, isTransactional, version));
// Create members.
group.updateMember(
new StreamsGroupMember
.Builder("new-protocol-member-id").setMemberEpoch(0).build()
);
// A call from the admin client should fail as the group is not empty.
assertThrows(UnknownMemberIdException.class, () ->
group.validateOffsetCommit("", "", -1, isTransactional, version));
assertThrows(UnknownMemberIdException.class, () ->
group.validateOffsetCommit("", null, -1, isTransactional, version));
// The member epoch is stale (newer than current).
if (version >= 9) {
assertThrows(StaleMemberEpochException.class, () ->
group.validateOffsetCommit("new-protocol-member-id", "", 10, isTransactional, version));
} else {
assertThrows(UnsupportedVersionException.class, () ->
group.validateOffsetCommit("new-protocol-member-id", "", 10, isTransactional, version));
}
// This should succeed (matching member epoch).
if (version >= 9) {
group.validateOffsetCommit("new-protocol-member-id", "", 0, isTransactional, version);
} else {
assertThrows(UnsupportedVersionException.class, () ->
group.validateOffsetCommit("new-protocol-member-id", "", 0, isTransactional, version));
}
}
@Test
public void testValidateOffsetCommitWithOlderEpoch() {
StreamsGroup group = createStreamsGroup("group-foo");
group.setTopology(new StreamsTopology(1, Map.of("0", new StreamsGroupTopologyValue.Subtopology()
.setSubtopologyId("0")
.setSourceTopics(List.of("input-topic")))));
group.updateMember(new StreamsGroupMember.Builder("member-1")
.setMemberEpoch(2)
.setAssignedTasks(new TasksTupleWithEpochs(
Map.of("0", Map.of(0, 2, 1, 1)),
Map.of(), Map.of()))
.build());
CommitPartitionValidator validator = group.validateOffsetCommit(
"member-1", "", 1, false, ApiKeys.OFFSET_COMMIT.latestVersion());
// Received epoch (1) < assignment epoch (2) should throw
assertThrows(StaleMemberEpochException.class, () ->
validator.validate("input-topic", Uuid.ZERO_UUID, 0));
}
@Test
public void testValidateOffsetCommitWithOlderEpochMissingTopology() {
StreamsGroup group = createStreamsGroup("group-foo");
group.updateMember(new StreamsGroupMember.Builder("member-1")
.setMemberEpoch(2)
.build());
// Topology is retrieved when creating validator, so exception is thrown here
assertThrows(StaleMemberEpochException.class, () ->
group.validateOffsetCommit("member-1", "", 1, false, ApiKeys.OFFSET_COMMIT.latestVersion()));
}
@Test
public void testValidateOffsetCommitWithOlderEpochMissingSubtopology() {
StreamsGroup group = createStreamsGroup("group-foo");
group.setTopology(new StreamsTopology(1, Map.of("0", new StreamsGroupTopologyValue.Subtopology()
.setSubtopologyId("0")
.setSourceTopics(List.of("input-topic")))));
group.updateMember(new StreamsGroupMember.Builder("member-1")
.setMemberEpoch(2)
.build());
CommitPartitionValidator validator = group.validateOffsetCommit(
"member-1", "", 1, false, ApiKeys.OFFSET_COMMIT.latestVersion());
assertThrows(StaleMemberEpochException.class, () ->
validator.validate("unknown-topic", Uuid.ZERO_UUID, 0));
}
@Test
public void testValidateOffsetCommitWithOlderEpochUnassignedPartition() {
StreamsGroup group = createStreamsGroup("group-foo");
group.setTopology(new StreamsTopology(1, Map.of("0", new StreamsGroupTopologyValue.Subtopology()
.setSubtopologyId("0")
.setSourceTopics(List.of("input-topic")))));
group.updateMember(new StreamsGroupMember.Builder("member-1")
.setMemberEpoch(2)
.setAssignedTasks(new TasksTupleWithEpochs(
Map.of("0", Map.of(0, 1)),
Map.of(), Map.of()))
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build());
CommitPartitionValidator validator = group.validateOffsetCommit(
"member-1", "", 1, false, ApiKeys.OFFSET_COMMIT.latestVersion());
// Partition 1 not assigned should throw
assertThrows(StaleMemberEpochException.class, () ->
validator.validate("input-topic", Uuid.ZERO_UUID, 1));
}
@Test
public void testValidateOffsetCommitWithOlderEpochValidAssignment() {
StreamsGroup group = createStreamsGroup("group-foo");
group.setTopology(new StreamsTopology(1, Map.of("0", new StreamsGroupTopologyValue.Subtopology()
.setSubtopologyId("0")
.setSourceTopics(List.of("input-topic")))));
group.updateMember(new StreamsGroupMember.Builder("member-1")
.setMemberEpoch(5)
.setAssignedTasks(new TasksTupleWithEpochs(
Map.of("0", Map.of(0, 2, 1, 2)),
Map.of(), Map.of()))
.build());
CommitPartitionValidator validator = group.validateOffsetCommit(
"member-1", "", 2, false, ApiKeys.OFFSET_COMMIT.latestVersion());
// Received epoch 2 == assignment epoch 2 should succeed
validator.validate("input-topic", Uuid.ZERO_UUID, 0);
validator.validate("input-topic", Uuid.ZERO_UUID, 1);
}
@Test
public void testAsListedGroup() {
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(LOG_CONTEXT);
StreamsGroup group = new StreamsGroup(
LOG_CONTEXT,
snapshotRegistry,
"group-foo"
);
group.setGroupEpoch(1);
group.setTopology(new StreamsTopology(1, Map.of()));
group.setValidatedTopologyEpoch(1);
group.setConfiguredTopology(new ConfiguredTopology(1, 0, Optional.of(new TreeMap<>()), Map.of(), Optional.empty()));
group.setTargetAssignmentEpoch(1);
group.updateMember(new StreamsGroupMember.Builder("member1")
.setMemberEpoch(1)
.build());
snapshotRegistry.idempotentCreateSnapshot(1);
ListGroupsResponseData.ListedGroup listedGroup = group.asListedGroup(1);
assertEquals("group-foo", listedGroup.groupId());
assertEquals("streams", listedGroup.protocolType());
assertEquals("Reconciling", listedGroup.groupState());
assertEquals("streams", listedGroup.groupType());
}
@Test
public void testValidateOffsetFetch() {
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(LOG_CONTEXT);
StreamsGroup group = new StreamsGroup(
LOG_CONTEXT,
snapshotRegistry,
"group-foo"
);
// Simulate a call from the admin client without member ID and member epoch.
group.validateOffsetFetch(null, -1, Long.MAX_VALUE);
// The member does not exist.
assertThrows(UnknownMemberIdException.class, () ->
group.validateOffsetFetch("member-id", 0, Long.MAX_VALUE));
// Create a member.
snapshotRegistry.idempotentCreateSnapshot(0);
group.updateMember(new StreamsGroupMember.Builder("member-id").setMemberEpoch(0).build());
// The member does not exist at last committed offset 0.
assertThrows(UnknownMemberIdException.class, () ->
group.validateOffsetFetch("member-id", 0, 0));
// The member exists but the epoch is stale when the last committed offset is not considered.
assertThrows(StaleMemberEpochException.class, () ->
group.validateOffsetFetch("member-id", 10, Long.MAX_VALUE));
// This should succeed.
group.validateOffsetFetch("member-id", 0, Long.MAX_VALUE);
}
@Test
public void testValidateDeleteGroup() {
StreamsGroup streamsGroup = createStreamsGroup("foo");
assertEquals(StreamsGroupState.EMPTY, streamsGroup.state());
assertDoesNotThrow(streamsGroup::validateDeleteGroup);
StreamsGroupMember member1 = new StreamsGroupMember.Builder("member1")
.setMemberEpoch(1)
.setPreviousMemberEpoch(0)
.setState(MemberState.STABLE)
.build();
streamsGroup.updateMember(member1);
assertEquals(StreamsGroup.StreamsGroupState.NOT_READY, streamsGroup.state());
assertThrows(GroupNotEmptyException.class, streamsGroup::validateDeleteGroup);
streamsGroup.setTopology(new StreamsTopology(1, Map.of()));
streamsGroup.setConfiguredTopology(new ConfiguredTopology(1, 0, Optional.of(new TreeMap<>()), Map.of(), Optional.empty()));
streamsGroup.setValidatedTopologyEpoch(1);
assertEquals(StreamsGroup.StreamsGroupState.RECONCILING, streamsGroup.state());
assertThrows(GroupNotEmptyException.class, streamsGroup::validateDeleteGroup);
streamsGroup.setGroupEpoch(1);
assertEquals(StreamsGroup.StreamsGroupState.ASSIGNING, streamsGroup.state());
assertThrows(GroupNotEmptyException.class, streamsGroup::validateDeleteGroup);
streamsGroup.setTargetAssignmentEpoch(1);
assertEquals(StreamsGroup.StreamsGroupState.STABLE, streamsGroup.state());
assertThrows(GroupNotEmptyException.class, streamsGroup::validateDeleteGroup);
streamsGroup.removeMember("member1");
assertEquals(StreamsGroup.StreamsGroupState.EMPTY, streamsGroup.state());
assertDoesNotThrow(streamsGroup::validateDeleteGroup);
}
@Test
public void testOffsetExpirationCondition() {
long currentTimestamp = 30000L;
long commitTimestamp = 20000L;
long offsetsRetentionMs = 10000L;
OffsetAndMetadata offsetAndMetadata = new OffsetAndMetadata(15000L, OptionalInt.empty(), "", commitTimestamp, OptionalLong.empty(), Uuid.ZERO_UUID);
StreamsGroup group = new StreamsGroup(LOG_CONTEXT, new SnapshotRegistry(LOG_CONTEXT), "group-id");
Optional<OffsetExpirationCondition> offsetExpirationCondition = group.offsetExpirationCondition();
assertTrue(offsetExpirationCondition.isPresent());
OffsetExpirationConditionImpl condition = (OffsetExpirationConditionImpl) offsetExpirationCondition.get();
assertEquals(commitTimestamp, condition.baseTimestamp().apply(offsetAndMetadata));
assertTrue(condition.isOffsetExpired(offsetAndMetadata, currentTimestamp, offsetsRetentionMs));
}
@Test
public void testAsDescribedGroup() {
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new LogContext());
StreamsGroup group = new StreamsGroup(LOG_CONTEXT, snapshotRegistry, "group-id-1");
snapshotRegistry.idempotentCreateSnapshot(0);
assertEquals(StreamsGroup.StreamsGroupState.EMPTY.toString(), group.stateAsString(0));
group.setGroupEpoch(1);
group.setTopology(new StreamsTopology(1, Map.of()));
group.setConfiguredTopology(new ConfiguredTopology(1, 0, Optional.of(new TreeMap<>()), Map.of(), Optional.empty()));
group.setValidatedTopologyEpoch(1);
group.setTargetAssignmentEpoch(1);
group.updateMember(new StreamsGroupMember.Builder("member1")
.setMemberEpoch(1)
.setPreviousMemberEpoch(0)
.setState(MemberState.STABLE)
.setInstanceId("instance1")
.setRackId("rack1")
.setClientId("client1")
.setClientHost("host1")
.setRebalanceTimeoutMs(1000)
.setTopologyEpoch(1)
.setProcessId("process1")
.setUserEndpoint(new StreamsGroupMemberMetadataValue.Endpoint().setHost("host1").setPort(9092))
.setClientTags(Map.of("tag1", "value1"))
.setAssignedTasks(TasksTupleWithEpochs.EMPTY)
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build());
group.updateMember(new StreamsGroupMember.Builder("member2")
.setMemberEpoch(1)
.setPreviousMemberEpoch(0)
.setState(MemberState.STABLE)
.setInstanceId("instance2")
.setRackId("rack2")
.setClientId("client2")
.setClientHost("host2")
.setRebalanceTimeoutMs(1000)
.setTopologyEpoch(1)
.setProcessId("process2")
.setUserEndpoint(new StreamsGroupMemberMetadataValue.Endpoint().setHost("host2").setPort(9092))
.setClientTags(Map.of("tag2", "value2"))
.setAssignedTasks(TasksTupleWithEpochs.EMPTY)
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build());
snapshotRegistry.idempotentCreateSnapshot(1);
StreamsGroupDescribeResponseData.DescribedGroup expected = new StreamsGroupDescribeResponseData.DescribedGroup()
.setGroupId("group-id-1")
.setGroupState(StreamsGroup.StreamsGroupState.STABLE.toString())
.setGroupEpoch(1)
.setTopology(new StreamsGroupDescribeResponseData.Topology().setEpoch(1).setSubtopologies(List.of()))
.setAssignmentEpoch(1)
.setMembers(Arrays.asList(
new StreamsGroupDescribeResponseData.Member()
.setMemberId("member1")
.setMemberEpoch(1)
.setInstanceId("instance1")
.setRackId("rack1")
.setClientId("client1")
.setClientHost("host1")
.setTopologyEpoch(1)
.setProcessId("process1")
.setUserEndpoint(new StreamsGroupDescribeResponseData.Endpoint().setHost("host1").setPort(9092))
.setClientTags(List.of(new StreamsGroupDescribeResponseData.KeyValue().setKey("tag1").setValue("value1")))
.setAssignment(new StreamsGroupDescribeResponseData.Assignment())
.setTargetAssignment(new StreamsGroupDescribeResponseData.Assignment()),
new StreamsGroupDescribeResponseData.Member()
.setMemberId("member2")
.setMemberEpoch(1)
.setInstanceId("instance2")
.setRackId("rack2")
.setClientId("client2")
.setClientHost("host2")
.setTopologyEpoch(1)
.setProcessId("process2")
.setUserEndpoint(new StreamsGroupDescribeResponseData.Endpoint().setHost("host2").setPort(9092))
.setClientTags(List.of(new StreamsGroupDescribeResponseData.KeyValue().setKey("tag2").setValue("value2")))
.setAssignment(new StreamsGroupDescribeResponseData.Assignment())
.setTargetAssignment(new StreamsGroupDescribeResponseData.Assignment())
));
StreamsGroupDescribeResponseData.DescribedGroup actual = group.asDescribedGroup(1);
assertEquals(expected, actual);
}
@Test
public void testIsInStatesCaseInsensitiveAndUnderscored() {
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(LOG_CONTEXT);
StreamsGroup group = new StreamsGroup(LOG_CONTEXT, snapshotRegistry, "group-foo");
snapshotRegistry.idempotentCreateSnapshot(0);
assertTrue(group.isInStates(Set.of("empty"), 0));
assertFalse(group.isInStates(Set.of("Empty"), 0));
group.updateMember(new StreamsGroupMember.Builder("member1")
.build());
snapshotRegistry.idempotentCreateSnapshot(1);
assertTrue(group.isInStates(Set.of("empty"), 0));
assertTrue(group.isInStates(Set.of("not_ready"), 1));
assertFalse(group.isInStates(Set.of("empty"), 1));
}
@Test
public void testComputeMetadataHash() {
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(LOG_CONTEXT);
StreamsGroup streamsGroup = new StreamsGroup(
LOG_CONTEXT,
snapshotRegistry,
"group-foo"
);
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(Uuid.randomUuid(), "topic1", 1)
.build();
StreamsTopology topology = mock(StreamsTopology.class);
when(topology.requiredTopics()).thenReturn(Set.of("topic1"));
long metadataHash = streamsGroup.computeMetadataHash(new KRaftCoordinatorMetadataImage(metadataImage), new HashMap<>(), topology);
// The metadata hash means no topic.
assertNotEquals(0, metadataHash);
}
@Test
void testCreateGroupTombstoneRecords() {
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(LOG_CONTEXT);
StreamsGroup streamsGroup = new StreamsGroup(
LOG_CONTEXT,
snapshotRegistry,
"test-group"
);
streamsGroup.updateMember(new StreamsGroupMember.Builder("member1")
.setMemberEpoch(1)
.build());
List<CoordinatorRecord> records = new ArrayList<>();
streamsGroup.createGroupTombstoneRecords(records);
assertEquals(6, records.size());
for (CoordinatorRecord record : records) {
assertNotNull(record.key());
assertNull(record.value());
}
final Set<ApiMessage> keys = records.stream().map(CoordinatorRecord::key).collect(Collectors.toSet());
assertTrue(keys.contains(new StreamsGroupMetadataKey().setGroupId("test-group")));
assertTrue(keys.contains(new StreamsGroupTargetAssignmentMetadataKey().setGroupId("test-group")));
assertTrue(keys.contains(new StreamsGroupTopologyKey().setGroupId("test-group")));
assertTrue(keys.contains(new StreamsGroupMemberMetadataKey().setGroupId("test-group").setMemberId("member1")));
assertTrue(keys.contains(new StreamsGroupTargetAssignmentMemberKey().setGroupId("test-group").setMemberId("member1")));
assertTrue(keys.contains(new StreamsGroupCurrentMemberAssignmentKey().setGroupId("test-group").setMemberId("member1")));
}
@Test
public void testIsSubscribedToTopic() {
LogContext logContext = new LogContext();
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(logContext);
StreamsGroup streamsGroup = new StreamsGroup(logContext, snapshotRegistry, "test-group");
assertFalse(streamsGroup.isSubscribedToTopic("test-topic1"));
assertFalse(streamsGroup.isSubscribedToTopic("test-topic2"));
assertFalse(streamsGroup.isSubscribedToTopic("non-existent-topic"));
StreamsTopology topology = new StreamsTopology(1,
Map.of("test-subtopology",
new StreamsGroupTopologyValue.Subtopology()
.setSubtopologyId("test-subtopology")
.setSourceTopics(List.of("test-topic1"))
.setRepartitionSourceTopics(List.of(new StreamsGroupTopologyValue.TopicInfo().setName("test-topic2")))
.setRepartitionSinkTopics(List.of("test-topic2"))
));
streamsGroup.setTopology(topology);
streamsGroup.updateMember(streamsGroup.getOrCreateDefaultMember("member-id"));
assertTrue(streamsGroup.isSubscribedToTopic("test-topic1"));
assertTrue(streamsGroup.isSubscribedToTopic("test-topic2"));
assertFalse(streamsGroup.isSubscribedToTopic("non-existent-topic"));
streamsGroup.removeMember("member-id");
assertFalse(streamsGroup.isSubscribedToTopic("test-topic1"));
assertFalse(streamsGroup.isSubscribedToTopic("test-topic2"));
assertFalse(streamsGroup.isSubscribedToTopic("non-existent-topic"));
}
@Test
public void testShutdownRequestedMethods() {
String memberId1 = "test-member-id1";
String memberId2 = "test-member-id2";
LogContext logContext = new LogContext();
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(logContext);
StreamsGroup streamsGroup = new StreamsGroup(logContext, snapshotRegistry, "test-group");
streamsGroup.updateMember(streamsGroup.getOrCreateDefaultMember(memberId1));
streamsGroup.updateMember(streamsGroup.getOrCreateDefaultMember(memberId2));
// Initially, shutdown should not be requested
assertTrue(streamsGroup.getShutdownRequestMemberId().isEmpty());
// Set shutdown requested
streamsGroup.setShutdownRequestMemberId(memberId1);
assertEquals(Optional.of(memberId1), streamsGroup.getShutdownRequestMemberId());
// Setting shutdown requested again will be ignored
streamsGroup.setShutdownRequestMemberId(memberId2);
assertEquals(Optional.of(memberId1), streamsGroup.getShutdownRequestMemberId());
// As long as group not empty, remain in shutdown requested state
streamsGroup.removeMember(memberId1);
assertEquals(Optional.of(memberId1), streamsGroup.getShutdownRequestMemberId());
// As soon as the group is empty, clear the shutdown requested state
streamsGroup.removeMember(memberId2);
assertEquals(Optional.empty(), streamsGroup.getShutdownRequestMemberId());
}
@Test
public void testAsDescribedGroupWithStreamsTopologyHavingSubtopologies() {
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new LogContext());
StreamsGroup group = new StreamsGroup(LOG_CONTEXT, snapshotRegistry, "group-id-with-topology");
snapshotRegistry.idempotentCreateSnapshot(0);
// Create a topology with subtopologies
Map<String, StreamsGroupTopologyValue.Subtopology> subtopologies = Map.of(
"sub-1", new StreamsGroupTopologyValue.Subtopology()
.setSubtopologyId("sub-1")
.setSourceTopics(List.of("input-topic"))
.setRepartitionSourceTopics(List.of(
new StreamsGroupTopologyValue.TopicInfo().setName("repartition-topic")
))
.setStateChangelogTopics(List.of(
new StreamsGroupTopologyValue.TopicInfo().setName("changelog-topic")
))
);
group.setGroupEpoch(2);
group.setTopology(new StreamsTopology(2, subtopologies));
group.setTargetAssignmentEpoch(2);
group.updateMember(new StreamsGroupMember.Builder("member1")
.setMemberEpoch(2)
.setPreviousMemberEpoch(1)
.setState(MemberState.STABLE)
.setInstanceId("instance1")
.setRackId("rack1")
.setClientId("client1")
.setClientHost("host1")
.setRebalanceTimeoutMs(1000)
.setTopologyEpoch(2)
.setProcessId("process1")
.setUserEndpoint(new StreamsGroupMemberMetadataValue.Endpoint().setHost("host1").setPort(9092))
.setClientTags(Map.of("tag1", "value1"))
.setAssignedTasks(TasksTupleWithEpochs.EMPTY)
.setTasksPendingRevocation(TasksTupleWithEpochs.EMPTY)
.build());
snapshotRegistry.idempotentCreateSnapshot(1);
StreamsGroupDescribeResponseData.DescribedGroup describedGroup = group.asDescribedGroup(1);
assertEquals("group-id-with-topology", describedGroup.groupId());
assertEquals(StreamsGroup.StreamsGroupState.NOT_READY.toString(), describedGroup.groupState());
assertEquals(2, describedGroup.groupEpoch());
assertEquals(2, describedGroup.assignmentEpoch());
// Verify topology is correctly described
assertNotNull(describedGroup.topology());
assertEquals(2, describedGroup.topology().epoch());
assertEquals(1, describedGroup.topology().subtopologies().size());
StreamsGroupDescribeResponseData.Subtopology subtopology = describedGroup.topology().subtopologies().get(0);
assertEquals("sub-1", subtopology.subtopologyId());
assertEquals(List.of("input-topic"), subtopology.sourceTopics());
assertEquals(1, subtopology.repartitionSourceTopics().size());
assertEquals("repartition-topic", subtopology.repartitionSourceTopics().get(0).name());
assertEquals(1, subtopology.stateChangelogTopics().size());
assertEquals("changelog-topic", subtopology.stateChangelogTopics().get(0).name());
assertEquals(1, describedGroup.members().size());
assertEquals("member1", describedGroup.members().get(0).memberId());
}
@Test
public void testAsDescribedGroupPrefersConfiguredTopologyOverStreamsTopology() {
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new LogContext());
StreamsGroup group = new StreamsGroup(LOG_CONTEXT, snapshotRegistry, "group-id-configured");
snapshotRegistry.idempotentCreateSnapshot(0);
// Create both StreamsTopology and ConfiguredTopology
Map<String, StreamsGroupTopologyValue.Subtopology> subtopologies = Map.of(
"sub-1", new StreamsGroupTopologyValue.Subtopology()
.setSubtopologyId("sub-1")
.setSourceTopics(List.of("streams-topic"))
);
group.setGroupEpoch(3);
group.setTopology(new StreamsTopology(2, subtopologies));
group.setConfiguredTopology(new ConfiguredTopology(3, 0, Optional.of(new TreeMap<>()), Map.of(), Optional.empty()));
group.setTargetAssignmentEpoch(3);
snapshotRegistry.idempotentCreateSnapshot(1);
StreamsGroupDescribeResponseData.DescribedGroup describedGroup = group.asDescribedGroup(1);
// Should prefer ConfiguredTopology over StreamsTopology
assertNotNull(describedGroup.topology());
assertEquals(3, describedGroup.topology().epoch()); // ConfiguredTopology epoch
assertEquals(0, describedGroup.topology().subtopologies().size()); // Empty configured topology
}
@Test
public void testAsDescribedGroupFallbackToStreamsTopologyWhenConfiguredTopologyEmpty() {
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new LogContext());
StreamsGroup group = new StreamsGroup(LOG_CONTEXT, snapshotRegistry, "group-id-fallback");
snapshotRegistry.idempotentCreateSnapshot(0);
// Create StreamsTopology with subtopologies
Map<String, StreamsGroupTopologyValue.Subtopology> subtopologies = Map.of(
"sub-1", new StreamsGroupTopologyValue.Subtopology()
.setSubtopologyId("sub-1")
.setSourceTopics(List.of("fallback-topic"))
);
group.setGroupEpoch(4);
group.setTopology(new StreamsTopology(4, subtopologies));
// No ConfiguredTopology set, so should fallback to StreamsTopology
group.setTargetAssignmentEpoch(4);
snapshotRegistry.idempotentCreateSnapshot(1);
StreamsGroupDescribeResponseData.DescribedGroup describedGroup = group.asDescribedGroup(1);
// Should use StreamsTopology when ConfiguredTopology is not available
assertNotNull(describedGroup.topology());
assertEquals(4, describedGroup.topology().epoch()); // StreamsTopology epoch
assertEquals(1, describedGroup.topology().subtopologies().size());
assertEquals("sub-1", describedGroup.topology().subtopologies().get(0).subtopologyId());
assertEquals(List.of("fallback-topic"), describedGroup.topology().subtopologies().get(0).sourceTopics());
}
@Test
public void testCancelTimers() {
StreamsGroup streamsGroup = createStreamsGroup("test-group");
CoordinatorTimer<Void, CoordinatorRecord> timer = mock(CoordinatorTimer.class);
streamsGroup.cancelTimers(timer);
verify(timer).cancel("initial-rebalance-timeout-test-group");
}
}
|
StreamsGroupTest
|
java
|
spring-projects__spring-framework
|
spring-expression/src/main/java/org/springframework/expression/spel/ast/OpLE.java
|
{
"start": 1236,
"end": 4169
}
|
class ____ extends Operator {
public OpLE(int startPos, int endPos, SpelNodeImpl... operands) {
super("<=", startPos, endPos, operands);
this.exitTypeDescriptor = "Z";
}
@Override
public BooleanTypedValue getValueInternal(ExpressionState state) throws EvaluationException {
Object left = getLeftOperand().getValueInternal(state).getValue();
Object right = getRightOperand().getValueInternal(state).getValue();
this.leftActualDescriptor = CodeFlow.toDescriptorFromObject(left);
this.rightActualDescriptor = CodeFlow.toDescriptorFromObject(right);
if (left instanceof Number leftNumber && right instanceof Number rightNumber) {
if (leftNumber instanceof BigDecimal || rightNumber instanceof BigDecimal) {
BigDecimal leftBigDecimal = NumberUtils.convertNumberToTargetClass(leftNumber, BigDecimal.class);
BigDecimal rightBigDecimal = NumberUtils.convertNumberToTargetClass(rightNumber, BigDecimal.class);
return BooleanTypedValue.forValue(leftBigDecimal.compareTo(rightBigDecimal) <= 0);
}
else if (leftNumber instanceof Double || rightNumber instanceof Double) {
return BooleanTypedValue.forValue(leftNumber.doubleValue() <= rightNumber.doubleValue());
}
else if (leftNumber instanceof Float || rightNumber instanceof Float) {
return BooleanTypedValue.forValue(leftNumber.floatValue() <= rightNumber.floatValue());
}
else if (leftNumber instanceof BigInteger || rightNumber instanceof BigInteger) {
BigInteger leftBigInteger = NumberUtils.convertNumberToTargetClass(leftNumber, BigInteger.class);
BigInteger rightBigInteger = NumberUtils.convertNumberToTargetClass(rightNumber, BigInteger.class);
return BooleanTypedValue.forValue(leftBigInteger.compareTo(rightBigInteger) <= 0);
}
else if (leftNumber instanceof Long || rightNumber instanceof Long) {
return BooleanTypedValue.forValue(leftNumber.longValue() <= rightNumber.longValue());
}
else if (leftNumber instanceof Integer || rightNumber instanceof Integer) {
return BooleanTypedValue.forValue(leftNumber.intValue() <= rightNumber.intValue());
}
else if (leftNumber instanceof Short || rightNumber instanceof Short) {
return BooleanTypedValue.forValue(leftNumber.shortValue() <= rightNumber.shortValue());
}
else if (leftNumber instanceof Byte || rightNumber instanceof Byte) {
return BooleanTypedValue.forValue(leftNumber.byteValue() <= rightNumber.byteValue());
}
else {
// Unknown Number subtypes -> best guess is double comparison
return BooleanTypedValue.forValue(leftNumber.doubleValue() <= rightNumber.doubleValue());
}
}
return BooleanTypedValue.forValue(state.getTypeComparator().compare(left, right) <= 0);
}
@Override
public boolean isCompilable() {
return isCompilableOperatorUsingNumerics();
}
@Override
public void generateCode(MethodVisitor mv, CodeFlow cf) {
generateComparisonCode(mv, cf, IFGT, IF_ICMPGT);
}
}
|
OpLE
|
java
|
google__guava
|
guava/src/com/google/common/collect/RegularImmutableSortedSet.java
|
{
"start": 1402,
"end": 9203
}
|
class ____<E> extends ImmutableSortedSet<E> {
static final RegularImmutableSortedSet<Comparable> NATURAL_EMPTY_SET =
new RegularImmutableSortedSet<>(ImmutableList.of(), Ordering.natural());
private final transient ImmutableList<E> elements;
RegularImmutableSortedSet(ImmutableList<E> elements, Comparator<? super E> comparator) {
super(comparator);
this.elements = elements;
}
@Override
Object @Nullable [] internalArray() {
return elements.internalArray();
}
@Override
int internalArrayStart() {
return elements.internalArrayStart();
}
@Override
int internalArrayEnd() {
return elements.internalArrayEnd();
}
@Override
public UnmodifiableIterator<E> iterator() {
return elements.iterator();
}
@GwtIncompatible // NavigableSet
@Override
public UnmodifiableIterator<E> descendingIterator() {
return elements.reverse().iterator();
}
@Override
public Spliterator<E> spliterator() {
return asList().spliterator();
}
@Override
public void forEach(Consumer<? super E> action) {
elements.forEach(action);
}
@Override
public int size() {
return elements.size();
}
@Override
public boolean contains(@Nullable Object o) {
try {
return o != null && unsafeBinarySearch(o) >= 0;
} catch (ClassCastException e) {
return false;
}
}
@Override
public boolean containsAll(Collection<?> targets) {
// TODO(jlevy): For optimal performance, use a binary search when
// targets.size() < size() / log(size())
// TODO(kevinb): see if we can share code with OrderedIterator after it
// graduates from labs.
if (targets instanceof Multiset) {
targets = ((Multiset<?>) targets).elementSet();
}
if (!SortedIterables.hasSameComparator(comparator(), targets) || (targets.size() <= 1)) {
return super.containsAll(targets);
}
/*
* If targets is a sorted set with the same comparator, containsAll can run
* in O(n) time stepping through the two collections.
*/
Iterator<E> thisIterator = iterator();
Iterator<?> thatIterator = targets.iterator();
// known nonempty since we checked targets.size() > 1
if (!thisIterator.hasNext()) {
return false;
}
Object target = thatIterator.next();
E current = thisIterator.next();
try {
while (true) {
int cmp = unsafeCompare(current, target);
if (cmp < 0) {
if (!thisIterator.hasNext()) {
return false;
}
current = thisIterator.next();
} else if (cmp == 0) {
if (!thatIterator.hasNext()) {
return true;
}
target = thatIterator.next();
} else if (cmp > 0) {
return false;
}
}
} catch (NullPointerException | ClassCastException e) {
return false;
}
}
private int unsafeBinarySearch(Object key) throws ClassCastException {
return Collections.binarySearch(elements, key, unsafeComparator());
}
@Override
boolean isPartialView() {
return elements.isPartialView();
}
@Override
int copyIntoArray(@Nullable Object[] dst, int offset) {
return elements.copyIntoArray(dst, offset);
}
@Override
public boolean equals(@Nullable Object object) {
if (object == this) {
return true;
}
if (!(object instanceof Set)) {
return false;
}
Set<?> that = (Set<?>) object;
if (size() != that.size()) {
return false;
} else if (isEmpty()) {
return true;
}
if (SortedIterables.hasSameComparator(comparator, that)) {
Iterator<?> otherIterator = that.iterator();
try {
Iterator<E> iterator = iterator();
while (iterator.hasNext()) {
Object element = iterator.next();
Object otherElement = otherIterator.next();
if (otherElement == null || unsafeCompare(element, otherElement) != 0) {
return false;
}
}
return true;
} catch (ClassCastException e) {
return false;
} catch (NoSuchElementException e) {
return false; // concurrent change to other set
}
}
return containsAll(that);
}
@Override
public E first() {
if (isEmpty()) {
throw new NoSuchElementException();
}
return elements.get(0);
}
@Override
public E last() {
if (isEmpty()) {
throw new NoSuchElementException();
}
return elements.get(size() - 1);
}
@Override
public @Nullable E lower(E element) {
int index = headIndex(element, false) - 1;
return (index == -1) ? null : elements.get(index);
}
@Override
public @Nullable E floor(E element) {
int index = headIndex(element, true) - 1;
return (index == -1) ? null : elements.get(index);
}
@Override
public @Nullable E ceiling(E element) {
int index = tailIndex(element, true);
return (index == size()) ? null : elements.get(index);
}
@Override
public @Nullable E higher(E element) {
int index = tailIndex(element, false);
return (index == size()) ? null : elements.get(index);
}
@Override
ImmutableSortedSet<E> headSetImpl(E toElement, boolean inclusive) {
return getSubSet(0, headIndex(toElement, inclusive));
}
int headIndex(E toElement, boolean inclusive) {
int index = Collections.binarySearch(elements, checkNotNull(toElement), comparator());
if (index >= 0) {
return inclusive ? index + 1 : index;
} else {
return ~index;
}
}
@Override
ImmutableSortedSet<E> subSetImpl(
E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) {
return tailSetImpl(fromElement, fromInclusive).headSetImpl(toElement, toInclusive);
}
@Override
ImmutableSortedSet<E> tailSetImpl(E fromElement, boolean inclusive) {
return getSubSet(tailIndex(fromElement, inclusive), size());
}
int tailIndex(E fromElement, boolean inclusive) {
int index = Collections.binarySearch(elements, checkNotNull(fromElement), comparator());
if (index >= 0) {
return inclusive ? index : index + 1;
} else {
return ~index;
}
}
// Pretend the comparator can compare anything. If it turns out it can't
// compare two elements, it'll throw a CCE. Only methods that are specified to
// throw CCE should call this.
@SuppressWarnings("unchecked")
Comparator<Object> unsafeComparator() {
return (Comparator<Object>) comparator;
}
RegularImmutableSortedSet<E> getSubSet(int newFromIndex, int newToIndex) {
if (newFromIndex == 0 && newToIndex == size()) {
return this;
} else if (newFromIndex < newToIndex) {
return new RegularImmutableSortedSet<>(
elements.subList(newFromIndex, newToIndex), comparator);
} else {
return emptySet(comparator);
}
}
@Override
int indexOf(@Nullable Object target) {
if (target == null) {
return -1;
}
int position;
try {
position = Collections.binarySearch(elements, target, unsafeComparator());
} catch (ClassCastException e) {
return -1;
}
return (position >= 0) ? position : -1;
}
@Override
ImmutableList<E> createAsList() {
return (size() <= 1) ? elements : new ImmutableSortedAsList<E>(this, elements);
}
@Override
ImmutableSortedSet<E> createDescendingSet() {
Comparator<? super E> reversedOrder = Collections.reverseOrder(comparator);
return isEmpty()
? emptySet(reversedOrder)
: new RegularImmutableSortedSet<E>(elements.reverse(), reversedOrder);
}
// redeclare to help optimizers with b/310253115
@SuppressWarnings("RedundantOverride")
@Override
@J2ktIncompatible
@GwtIncompatible
Object writeReplace() {
return super.writeReplace();
}
}
|
RegularImmutableSortedSet
|
java
|
hibernate__hibernate-orm
|
tooling/metamodel-generator/src/main/java/org/hibernate/processor/annotation/LifecycleMethod.java
|
{
"start": 1402,
"end": 1900
}
|
class ____ extends AbstractAnnotatedMethod {
private final String entity;
private final String actualEntity;
private final String methodName;
private final String parameterName;
private final String operationName;
private final boolean addNonnullAnnotation;
private final ParameterKind parameterKind;
private final boolean returnArgument;
private final boolean hasGeneratedId;
private final Collection<String> methodTypeParameters;
private final TypeElement element;
public
|
LifecycleMethod
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestConnCache.java
|
{
"start": 1286,
"end": 1373
}
|
class ____ the client connection caching in a single node
* mini-cluster.
*/
public
|
tests
|
java
|
google__dagger
|
javatests/dagger/functional/producers/subcomponent/ModuleSubcomponentsInterop.java
|
{
"start": 1541,
"end": 1598
}
|
interface ____ {
@Subcomponent.Builder
|
ProvisionChild
|
java
|
spring-projects__spring-framework
|
spring-orm/src/main/java/org/springframework/orm/jpa/LocalContainerEntityManagerFactoryBean.java
|
{
"start": 4864,
"end": 17308
}
|
class ____ extends AbstractEntityManagerFactoryBean
implements ResourceLoaderAware, LoadTimeWeaverAware {
private @Nullable PersistenceUnitManager persistenceUnitManager;
private final DefaultPersistenceUnitManager internalPersistenceUnitManager = new DefaultPersistenceUnitManager();
private @Nullable PersistenceUnitInfo persistenceUnitInfo;
/**
* Set the PersistenceUnitManager to use for obtaining the JPA persistence unit
* that this FactoryBean is supposed to build an EntityManagerFactory for.
* <p>The default is to rely on the local settings specified on this FactoryBean,
* such as "persistenceXmlLocation", "dataSource" and "loadTimeWeaver".
* <p>For reuse of existing persistence unit configuration or more advanced forms
* of custom persistence unit handling, consider defining a separate
* PersistenceUnitManager bean (typically a DefaultPersistenceUnitManager instance)
* and linking it in here. {@code persistence.xml} location, DataSource
* configuration and LoadTimeWeaver will be defined on that separate
* DefaultPersistenceUnitManager bean in such a scenario.
* @see #setPersistenceXmlLocation
* @see #setDataSource
* @see #setLoadTimeWeaver
* @see org.springframework.orm.jpa.persistenceunit.DefaultPersistenceUnitManager
*/
public void setPersistenceUnitManager(PersistenceUnitManager persistenceUnitManager) {
this.persistenceUnitManager = persistenceUnitManager;
}
/**
* Set the location of the {@code persistence.xml} file
* we want to use. This is a Spring resource location.
* <p>Default is "classpath:META-INF/persistence.xml".
* <p><b>NOTE: Only applied if no external PersistenceUnitManager specified.</b>
* @param persistenceXmlLocation a Spring resource String
* identifying the location of the {@code persistence.xml} file
* that this LocalContainerEntityManagerFactoryBean should parse
* @see #setPersistenceUnitManager
*/
public void setPersistenceXmlLocation(String persistenceXmlLocation) {
this.internalPersistenceUnitManager.setPersistenceXmlLocation(persistenceXmlLocation);
}
/**
* Uses the specified persistence unit name as the name of the default
* persistence unit, if applicable.
* <p><b>NOTE: Only applied if no external PersistenceUnitManager specified.</b>
* @see DefaultPersistenceUnitManager#setDefaultPersistenceUnitName
*/
@Override
public void setPersistenceUnitName(@Nullable String persistenceUnitName) {
super.setPersistenceUnitName(persistenceUnitName);
if (persistenceUnitName != null) {
this.internalPersistenceUnitManager.setDefaultPersistenceUnitName(persistenceUnitName);
}
}
/**
* Set a persistence unit root location for the default persistence unit.
* <p>Default is "classpath:", that is, the root of the current classpath
* (nearest root directory). To be overridden if unit-specific resolution
* does not work and the classpath root is not appropriate either.
* <p><b>NOTE: Only applied if no external PersistenceUnitManager specified.</b>
* @since 4.3.3
* @see DefaultPersistenceUnitManager#setDefaultPersistenceUnitRootLocation
*/
public void setPersistenceUnitRootLocation(String defaultPersistenceUnitRootLocation) {
this.internalPersistenceUnitManager.setDefaultPersistenceUnitRootLocation(defaultPersistenceUnitRootLocation);
}
/**
* Set a local JPA 3.2 {@link PersistenceConfiguration} to use for this
* persistence unit.
* <p>Note: {@link PersistenceConfiguration} includes a persistence unit name,
* so this effectively overrides the {@link #setPersistenceUnitName} method.
* In contrast, all other settings will be merged with the settings in the
* {@code PersistenceConfiguration} instance.
* @since 7.0
* @see DefaultPersistenceUnitManager#setPersistenceConfiguration
*/
public void setPersistenceConfiguration(PersistenceConfiguration configuration) {
Assert.notNull(configuration, "PersistenceConfiguration must not be null");
super.setPersistenceUnitName(configuration.name());
this.internalPersistenceUnitManager.setPersistenceConfiguration(configuration);
}
/**
* Set the {@link PersistenceManagedTypes} to use to build the list of managed types
* as an alternative to entity scanning.
* @param managedTypes the managed types
* @since 6.0
* @see DefaultPersistenceUnitManager#setManagedTypes(PersistenceManagedTypes)
*/
public void setManagedTypes(PersistenceManagedTypes managedTypes) {
this.internalPersistenceUnitManager.setManagedTypes(managedTypes);
}
/**
* Set whether to use Spring-based scanning for entity classes in the classpath
* instead of using JPA's standard scanning of jar files with {@code persistence.xml}
* markers in them. In case of Spring-based scanning, no {@code persistence.xml}
* is necessary; all you need to do is to specify base packages to search here.
* <p>Default is none. Specify packages to search for autodetection of your entity
* classes in the classpath. This is analogous to Spring's component-scan feature
* ({@link org.springframework.context.annotation.ClassPathBeanDefinitionScanner}).
* <p>Consider setting a {@link PersistenceManagedTypes} instead that allows the
* scanning logic to be optimized by AOT processing.
* <p><b>Note: There may be limitations in comparison to regular JPA scanning.</b>
* In particular, JPA providers may pick up annotated packages for provider-specific
* annotations only when driven by {@code persistence.xml}. As of 4.1, Spring's
* scan can detect annotated packages as well if supported by the given
* {@link JpaVendorAdapter} (for example, for Hibernate).
* <p>If no explicit {@link #setMappingResources mapping resources} have been
* specified in addition to these packages, Spring's setup looks for a default
* {@code META-INF/orm.xml} file in the classpath, registering it as a mapping
* resource for the default unit if the mapping file is not co-located with a
* {@code persistence.xml} file (in which case we assume it is only meant to be
* used with the persistence units defined there, like in standard JPA).
* <p><b>NOTE: Only applied if no external PersistenceUnitManager specified.</b>
* @param packagesToScan one or more base packages to search, analogous to
* Spring's component-scan configuration for regular Spring components
* @see #setPersistenceUnitManager
* @see DefaultPersistenceUnitManager#setPackagesToScan
*/
public void setPackagesToScan(String... packagesToScan) {
this.internalPersistenceUnitManager.setPackagesToScan(packagesToScan);
}
/**
* Set the {@link ManagedClassNameFilter} to apply on entity classes discovered
* using {@linkplain #setPackagesToScan(String...) classpath scanning}.
* @param managedClassNameFilter a predicate to filter entity classes
* @since 6.1.4
* @see DefaultPersistenceUnitManager#setManagedClassNameFilter
*/
public void setManagedClassNameFilter(ManagedClassNameFilter managedClassNameFilter) {
this.internalPersistenceUnitManager.setManagedClassNameFilter(managedClassNameFilter);
}
/**
* Specify one or more mapping resources (equivalent to {@code <mapping-file>}
* entries in {@code persistence.xml}) for the default persistence unit.
* Can be used on its own or in combination with entity scanning in the classpath,
* in both cases avoiding {@code persistence.xml}.
* <p>Note that mapping resources must be relative to the classpath root,
* for example, "META-INF/mappings.xml" or "com/mycompany/repository/mappings.xml",
* so that they can be loaded through {@code ClassLoader.getResource}.
* <p>If no explicit mapping resources have been specified next to
* {@link #setPackagesToScan packages to scan}, Spring's setup looks for a default
* {@code META-INF/orm.xml} file in the classpath, registering it as a mapping
* resource for the default unit if the mapping file is not co-located with a
* {@code persistence.xml} file (in which case we assume it is only meant to be
* used with the persistence units defined there, like in standard JPA).
* <p>Note that specifying an empty array/list here suppresses the default
* {@code META-INF/orm.xml} check. On the other hand, explicitly specifying
* {@code META-INF/orm.xml} here will register that file even if it happens
* to be co-located with a {@code persistence.xml} file.
* <p><b>NOTE: Only applied if no external PersistenceUnitManager specified.</b>
* @see #setPersistenceUnitManager
* @see DefaultPersistenceUnitManager#setMappingResources
*/
public void setMappingResources(String... mappingResources) {
this.internalPersistenceUnitManager.setMappingResources(mappingResources);
}
/**
* Specify the JPA 2.0 shared cache mode for this persistence unit,
* overriding a value in {@code persistence.xml} if set.
* <p><b>NOTE: Only applied if no external PersistenceUnitManager specified.</b>
* @since 4.0
* @see jakarta.persistence.spi.PersistenceUnitInfo#getSharedCacheMode()
* @see #setPersistenceUnitManager
*/
public void setSharedCacheMode(SharedCacheMode sharedCacheMode) {
this.internalPersistenceUnitManager.setSharedCacheMode(sharedCacheMode);
}
/**
* Specify the JPA 2.0 validation mode for this persistence unit,
* overriding a value in {@code persistence.xml} if set.
* <p><b>NOTE: Only applied if no external PersistenceUnitManager specified.</b>
* @since 4.0
* @see jakarta.persistence.spi.PersistenceUnitInfo#getValidationMode()
* @see #setPersistenceUnitManager
*/
public void setValidationMode(ValidationMode validationMode) {
this.internalPersistenceUnitManager.setValidationMode(validationMode);
}
/**
* Specify the JDBC DataSource that the JPA persistence provider is supposed
* to use for accessing the database. This is an alternative to keeping the
* JDBC configuration in {@code persistence.xml}, passing in a Spring-managed
* DataSource instead.
* <p>In JPA speak, a DataSource passed in here will be used as "nonJtaDataSource"
* on the PersistenceUnitInfo passed to the PersistenceProvider, as well as
* overriding data source configuration in {@code persistence.xml} (if any).
* Note that this variant typically works for JTA transaction management as well;
* if it does not, consider using the explicit {@link #setJtaDataSource} instead.
* <p><b>NOTE: Only applied if no external PersistenceUnitManager specified.</b>
* @see jakarta.persistence.spi.PersistenceUnitInfo#getNonJtaDataSource()
* @see #setPersistenceUnitManager
*/
public void setDataSource(@Nullable DataSource dataSource) {
this.internalPersistenceUnitManager.setDataSourceLookup(
dataSource != null ? new SingleDataSourceLookup(dataSource) : null);
this.internalPersistenceUnitManager.setDefaultDataSource(dataSource);
}
/**
* Specify the JDBC DataSource that the JPA persistence provider is supposed
* to use for accessing the database. This is an alternative to keeping the
* JDBC configuration in {@code persistence.xml}, passing in a Spring-managed
* DataSource instead.
* <p>In JPA speak, a DataSource passed in here will be used as "jtaDataSource"
* on the PersistenceUnitInfo passed to the PersistenceProvider, as well as
* overriding data source configuration in {@code persistence.xml} (if any).
* <p><b>NOTE: Only applied if no external PersistenceUnitManager specified.</b>
* @see jakarta.persistence.spi.PersistenceUnitInfo#getJtaDataSource()
* @see #setPersistenceUnitManager
*/
public void setJtaDataSource(@Nullable DataSource jtaDataSource) {
this.internalPersistenceUnitManager.setDataSourceLookup(
jtaDataSource != null ? new SingleDataSourceLookup(jtaDataSource) : null);
this.internalPersistenceUnitManager.setDefaultJtaDataSource(jtaDataSource);
}
/**
* Set the PersistenceUnitPostProcessors to be applied to the
* PersistenceUnitInfo used for creating this EntityManagerFactory.
* <p>Such post-processors can, for example, register further entity
* classes and jar files, in addition to the metadata read from
* {@code persistence.xml}.
* <p><b>NOTE: Only applied if no external PersistenceUnitManager specified.</b>
* @see #setPersistenceUnitManager
*/
public void setPersistenceUnitPostProcessors(PersistenceUnitPostProcessor... postProcessors) {
this.internalPersistenceUnitManager.setPersistenceUnitPostProcessors(postProcessors);
}
/**
* Specify the Spring LoadTimeWeaver to use for
|
LocalContainerEntityManagerFactoryBean
|
java
|
apache__kafka
|
connect/runtime/src/main/java/org/apache/kafka/connect/runtime/StateTracker.java
|
{
"start": 1096,
"end": 2308
}
|
class ____ {
private final AtomicReference<StateChange> lastState = new AtomicReference<>(new StateChange());
/**
* Change the current state.
*
* @param newState the current state; may not be null
* @param now the current time in milliseconds
*/
public void changeState(State newState, long now) {
lastState.getAndUpdate(oldState -> oldState.newState(newState, now));
}
/**
* Calculate the ratio of time spent in the specified state.
*
* @param ratioState the state for which the ratio is to be calculated; may not be null
* @param now the current time in milliseconds
* @return the ratio of time spent in the specified state to the time spent in all states
*/
public double durationRatio(State ratioState, long now) {
return lastState.get().durationRatio(ratioState, now);
}
/**
* Get the current state.
*
* @return the current state; may be null if no state change has been recorded
*/
public State currentState() {
return lastState.get().state;
}
/**
* An immutable record of the accumulated times at the most recent state change. This
|
StateTracker
|
java
|
quarkusio__quarkus
|
integration-tests/gradle/src/test/java/io/quarkus/gradle/DevDepsLeakIntoProdClaspathTest.java
|
{
"start": 93,
"end": 432
}
|
class ____ extends QuarkusGradleWrapperTestBase {
@Test
public void test() throws Exception {
final File projectDir = getProjectDir("dev-deps-leak-into-prod-48992");
runGradleWrapper(projectDir, "dependencies", "--write-locks");
runGradleWrapper(projectDir, "assemble");
}
}
|
DevDepsLeakIntoProdClaspathTest
|
java
|
google__guice
|
core/test/com/google/inject/ModuleTest.java
|
{
"start": 1255,
"end": 1355
}
|
class ____ {}
public void testDiamond() throws Exception {
Guice.createInjector(new A());
}
}
|
W
|
java
|
mapstruct__mapstruct
|
processor/src/main/java/org/mapstruct/ap/internal/model/common/ImplementationType.java
|
{
"start": 396,
"end": 2920
}
|
class ____ {
private final Type type;
private final boolean initialCapacityConstructor;
private final boolean loadFactorAdjustment;
private final String factoryMethodName;
private ImplementationType(
Type type,
boolean initialCapacityConstructor,
boolean loadFactorAdjustment,
String factoryMethodName
) {
this.type = type;
this.initialCapacityConstructor = initialCapacityConstructor;
this.loadFactorAdjustment = loadFactorAdjustment;
this.factoryMethodName = factoryMethodName;
}
public static ImplementationType withDefaultConstructor(Type type) {
return new ImplementationType( type, false, false, null );
}
public static ImplementationType withInitialCapacity(Type type) {
return new ImplementationType( type, true, false, null );
}
public static ImplementationType withLoadFactorAdjustment(Type type) {
return new ImplementationType( type, true, true, null );
}
public static ImplementationType withFactoryMethod(Type type, String factoryMethodName) {
return new ImplementationType( type, true, false, factoryMethodName );
}
/**
* Creates new {@link ImplementationType} that has the same {@link #initialCapacityConstructor} and
* {@link #loadFactorAdjustment}, but a different underlying {@link Type}
*
* @param type to be replaced
*
* @return a new implementation type with the given {@code type}
*/
public ImplementationType createNew(Type type) {
return new ImplementationType( type, initialCapacityConstructor, loadFactorAdjustment, factoryMethodName );
}
/**
* @return the underlying {@link Type}
*/
public Type getType() {
return type;
}
/**
* @return {@code true} if the underlying type has a constructor for {@code int} {@code initialCapacity}, {@code
* false} otherwise
*/
public boolean hasInitialCapacityConstructor() {
return initialCapacityConstructor;
}
/**
* If this method returns {@code true} then {@link #hasInitialCapacityConstructor()} also returns {@code true}
*
* @return {@code true} if the underlying type needs adjustment for the initial capacity constructor, {@code
* false} otherwise
*/
public boolean isLoadFactorAdjustment() {
return loadFactorAdjustment;
}
public String getFactoryMethodName() {
return factoryMethodName;
}
}
|
ImplementationType
|
java
|
quarkusio__quarkus
|
integration-tests/main/src/test/java/io/quarkus/it/main/EnabledIfTest.java
|
{
"start": 912,
"end": 1092
}
|
class ____ {
@Test
@EnabledIf(R5Enabled.class)
void testInjectionWorksProperly() {
// empty test, nothing to test here
}
public static final
|
EnabledIfTest
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/checkpointing/StatefulJobWBroadcastStateMigrationITCase.java
|
{
"start": 21804,
"end": 23543
}
|
class ____
extends KeyedBroadcastProcessFunction<
Long, Tuple2<Long, Long>, Tuple2<Long, Long>, Tuple2<Long, Long>> {
private static final long serialVersionUID = 1333992081671604521L;
private final Map<Long, String> expectedState;
private MapStateDescriptor<Long, String> stateDesc;
CheckingKeyedSingleBroadcastFunction(Map<Long, String> state) {
this.expectedState = state;
}
@Override
public void open(OpenContext openContext) throws Exception {
super.open(openContext);
stateDesc =
new MapStateDescriptor<>(
"broadcast-state-3",
BasicTypeInfo.LONG_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO);
}
@Override
public void processElement(
Tuple2<Long, Long> value, ReadOnlyContext ctx, Collector<Tuple2<Long, Long>> out)
throws Exception {
final Map<Long, String> actualState = new HashMap<>();
for (Map.Entry<Long, String> entry :
ctx.getBroadcastState(stateDesc).immutableEntries()) {
actualState.put(entry.getKey(), entry.getValue());
}
Assert.assertEquals(expectedState, actualState);
out.collect(value);
}
@Override
public void processBroadcastElement(
Tuple2<Long, Long> value, Context ctx, Collector<Tuple2<Long, Long>> out)
throws Exception {
// now we do nothing as we just want to verify the contents of the broadcast state.
}
}
}
|
CheckingKeyedSingleBroadcastFunction
|
java
|
spring-projects__spring-framework
|
spring-aop/src/test/java/org/springframework/aop/aspectj/annotation/ReflectiveAspectJAdvisorFactoryTests.java
|
{
"start": 846,
"end": 1055
}
|
class ____ extends AbstractAspectJAdvisorFactoryTests {
@Override
protected AspectJAdvisorFactory getAdvisorFactory() {
return new ReflectiveAspectJAdvisorFactory();
}
}
|
ReflectiveAspectJAdvisorFactoryTests
|
java
|
quarkusio__quarkus
|
extensions/cache/deployment/src/main/java/io/quarkus/cache/deployment/exception/KeyGeneratorConstructorException.java
|
{
"start": 91,
"end": 477
}
|
class ____ extends RuntimeException {
private ClassInfo classInfo;
public KeyGeneratorConstructorException(ClassInfo classInfo) {
super("No default constructor found in cache key generator [class=" + classInfo.name() + "]");
this.classInfo = classInfo;
}
public ClassInfo getClassInfo() {
return classInfo;
}
}
|
KeyGeneratorConstructorException
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/collection/map/Parent.java
|
{
"start": 259,
"end": 917
}
|
class ____ {
private String name;
private Map<String,Child> children = new HashMap<>();
public Parent() {
}
public Parent(String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Map<String,Child> getChildren() {
return children;
}
public void setChildren(Map<String,Child> children) {
this.children = children;
}
public Child addChild(String name) {
Child child = new Child( name );
addChild( child );
return child;
}
public void addChild(Child child) {
child.setParent( this );
getChildren().put( child.getName(), child );
}
}
|
Parent
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/junit/jupiter/TestConstructorIntegrationTests.java
|
{
"start": 3389,
"end": 3623
}
|
class ____ {
private final String foo;
AutomaticallyAutowiredTestCase(String foo) {
this.foo = foo;
}
@Test
void test() {
assertThat(foo).isEqualTo("bar");
}
@Configuration
static
|
AutomaticallyAutowiredTestCase
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/floats/Floats_assertIsNegative_Test.java
|
{
"start": 1221,
"end": 2582
}
|
class ____ extends FloatsBaseTest {
@Test
void should_succeed_since_actual_is_negative() {
floats.assertIsNegative(someInfo(), -6.0f);
}
@Test
void should_fail_since_actual_is_not_negative() {
// WHEN
var assertionError = expectAssertionError(() -> floats.assertIsNegative(someInfo(), 6.0f));
// THEN
then(assertionError).hasMessage(shouldBeLess(6.0f, 0.0f).create());
}
@Test
void should_fail_since_actual_is_zero() {
// WHEN
var assertionError = expectAssertionError(() -> floats.assertIsNegative(someInfo(), 0.0f));
// THEN
then(assertionError).hasMessage(shouldBeLess(0.0f, 0.0f).create());
}
@Test
void should_fail_since_actual_can_not_be_negative_according_to_custom_comparison_strategy() {
// WHEN
var error = expectAssertionError(() -> floatsWithAbsValueComparisonStrategy.assertIsNegative(someInfo(), 6.0f));
// THEN
then(error).hasMessage(shouldBeLess(6.0f, 0.0f, absValueComparisonStrategy).create());
}
@Test
void should_fail_since_actual_is_not_negative_according_to_custom_comparison_strategy() {
// WHEN
var error = expectAssertionError(() -> floatsWithAbsValueComparisonStrategy.assertIsNegative(someInfo(), -1.0f));
// THEN
then(error).hasMessage(shouldBeLess(-1.0f, 0.0f, absValueComparisonStrategy).create());
}
}
|
Floats_assertIsNegative_Test
|
java
|
mapstruct__mapstruct
|
processor/src/main/java/org/mapstruct/ap/internal/model/common/TypeFactory.java
|
{
"start": 2913,
"end": 16170
}
|
class ____ {
private static final Extractor<BuilderInfo, String> BUILDER_INFO_CREATION_METHOD_EXTRACTOR =
builderInfo -> {
ExecutableElement builderCreationMethod = builderInfo.getBuilderCreationMethod();
StringBuilder sb = new StringBuilder( builderCreationMethod.getSimpleName() );
sb.append( '(' );
for ( VariableElement parameter : builderCreationMethod.getParameters() ) {
sb.append( parameter );
}
sb.append( ')' );
return sb.toString();
};
private static final String LINKED_HASH_SET_FACTORY_METHOD_NAME = "newLinkedHashSet";
private static final String LINKED_HASH_MAP_FACTORY_METHOD_NAME = "newLinkedHashMap";
private final ElementUtils elementUtils;
private final TypeUtils typeUtils;
private final FormattingMessager messager;
private final RoundContext roundContext;
private final TypeMirror iterableType;
private final TypeMirror collectionType;
private final TypeMirror mapType;
private final TypeMirror streamType;
private final Map<String, ImplementationType> implementationTypes = new HashMap<>();
private final Map<String, String> toBeImportedTypes = new HashMap<>();
private final Map<String, String> notToBeImportedTypes;
private final boolean loggingVerbose;
public TypeFactory(ElementUtils elementUtils, TypeUtils typeUtils, FormattingMessager messager,
RoundContext roundContext, Map<String, String> notToBeImportedTypes, boolean loggingVerbose,
VersionInformation versionInformation) {
this.elementUtils = elementUtils;
this.typeUtils = typeUtils;
this.messager = messager;
this.roundContext = roundContext;
this.notToBeImportedTypes = notToBeImportedTypes;
iterableType = typeUtils.erasure( elementUtils.getTypeElement( Iterable.class.getCanonicalName() ).asType() );
collectionType =
typeUtils.erasure( elementUtils.getTypeElement( Collection.class.getCanonicalName() ).asType() );
mapType = typeUtils.erasure( elementUtils.getTypeElement( Map.class.getCanonicalName() ).asType() );
TypeElement streamTypeElement = elementUtils.getTypeElement( JavaStreamConstants.STREAM_FQN );
streamType = streamTypeElement == null ? null : typeUtils.erasure( streamTypeElement.asType() );
implementationTypes.put( Iterable.class.getName(), withInitialCapacity( getType( ArrayList.class ) ) );
implementationTypes.put( Collection.class.getName(), withInitialCapacity( getType( ArrayList.class ) ) );
implementationTypes.put( List.class.getName(), withInitialCapacity( getType( ArrayList.class ) ) );
boolean sourceVersionAtLeast19 = versionInformation.isSourceVersionAtLeast19();
implementationTypes.put(
Set.class.getName(),
sourceVersionAtLeast19 ?
withFactoryMethod( getType( LinkedHashSet.class ), LINKED_HASH_SET_FACTORY_METHOD_NAME ) :
withLoadFactorAdjustment( getType( LinkedHashSet.class ) )
);
implementationTypes.put( SortedSet.class.getName(), withDefaultConstructor( getType( TreeSet.class ) ) );
implementationTypes.put( NavigableSet.class.getName(), withDefaultConstructor( getType( TreeSet.class ) ) );
implementationTypes.put(
Map.class.getName(),
sourceVersionAtLeast19 ?
withFactoryMethod( getType( LinkedHashMap.class ), LINKED_HASH_MAP_FACTORY_METHOD_NAME ) :
withLoadFactorAdjustment( getType( LinkedHashMap.class ) )
);
implementationTypes.put( SortedMap.class.getName(), withDefaultConstructor( getType( TreeMap.class ) ) );
implementationTypes.put( NavigableMap.class.getName(), withDefaultConstructor( getType( TreeMap.class ) ) );
implementationTypes.put(
ConcurrentMap.class.getName(),
withLoadFactorAdjustment( getType( ConcurrentHashMap.class ) )
);
implementationTypes.put(
ConcurrentNavigableMap.class.getName(),
withDefaultConstructor( getType( ConcurrentSkipListMap.class ) )
);
implementationTypes.put(
JavaCollectionConstants.SEQUENCED_SET_FQN,
sourceVersionAtLeast19 ?
withFactoryMethod( getType( LinkedHashSet.class ), LINKED_HASH_SET_FACTORY_METHOD_NAME ) :
withLoadFactorAdjustment( getType( LinkedHashSet.class ) )
);
implementationTypes.put(
JavaCollectionConstants.SEQUENCED_MAP_FQN,
sourceVersionAtLeast19 ?
withFactoryMethod( getType( LinkedHashMap.class ), LINKED_HASH_MAP_FACTORY_METHOD_NAME ) :
withLoadFactorAdjustment( getType( LinkedHashMap.class ) )
);
this.loggingVerbose = loggingVerbose;
}
public Type getTypeForLiteral(Class<?> type) {
return type.isPrimitive() ? getType( getPrimitiveType( type ), true )
: getType( type.getCanonicalName(), true );
}
public Type getType(Class<?> type) {
return type.isPrimitive() ? getType( getPrimitiveType( type ) ) : getType( type.getCanonicalName() );
}
public Type getType(String canonicalName) {
return getType( canonicalName, false );
}
private Type getType(String canonicalName, boolean isLiteral) {
TypeElement typeElement = elementUtils.getTypeElement( canonicalName );
if ( typeElement == null ) {
throw new AnnotationProcessingException(
"Couldn't find type " + canonicalName + ". Are you missing a dependency on your classpath?"
);
}
return getType( typeElement, isLiteral );
}
/**
* Determines if the type with the given full qualified name is part of the classpath
*
* @param canonicalName Name of the type to be checked for availability
* @return true if the type with the given full qualified name is part of the classpath.
*/
public boolean isTypeAvailable(String canonicalName) {
return null != elementUtils.getTypeElement( canonicalName );
}
public Type getWrappedType(Type type ) {
Type result = type;
if ( type.isPrimitive() ) {
PrimitiveType typeMirror = (PrimitiveType) type.getTypeMirror();
result = getType( typeUtils.boxedClass( typeMirror ) );
}
return result;
}
public Type getType(TypeElement typeElement) {
return getType( typeElement.asType(), false );
}
private Type getType(TypeElement typeElement, boolean isLiteral) {
return getType( typeElement.asType(), isLiteral );
}
public Type getType(TypeMirror mirror) {
return getType( mirror, false );
}
/**
* Return a type that is always going to be imported.
* This is useful when using it in {@code Mapper#imports}
* for types that should be used in expressions.
*
* @param mirror the type mirror for which we need a type
*
* @return the type
*/
public Type getAlwaysImportedType(TypeMirror mirror) {
return getType( mirror, false, true );
}
private Type getType(TypeMirror mirror, boolean isLiteral) {
return getType( mirror, isLiteral, null );
}
private Type getType(TypeMirror mirror, boolean isLiteral, Boolean alwaysImport) {
if ( !canBeProcessed( mirror ) ) {
throw new TypeHierarchyErroneousException( mirror );
}
ImplementationType implementationType = getImplementationType( mirror );
boolean isIterableType = typeUtils.isSubtypeErased( mirror, iterableType );
boolean isCollectionType = typeUtils.isSubtypeErased( mirror, collectionType );
boolean isMapType = typeUtils.isSubtypeErased( mirror, mapType );
boolean isStreamType = streamType != null && typeUtils.isSubtypeErased( mirror, streamType );
boolean isEnumType;
boolean isInterface;
String name;
String packageName;
String qualifiedName;
TypeElement typeElement;
Type componentType;
Boolean toBeImported = alwaysImport;
if ( mirror.getKind() == TypeKind.DECLARED ) {
DeclaredType declaredType = (DeclaredType) mirror;
isEnumType = declaredType.asElement().getKind() == ElementKind.ENUM;
isInterface = declaredType.asElement().getKind() == ElementKind.INTERFACE;
name = declaredType.asElement().getSimpleName().toString();
typeElement = (TypeElement) declaredType.asElement();
if ( typeElement != null ) {
packageName = elementUtils.getPackageOf( typeElement ).getQualifiedName().toString();
qualifiedName = typeElement.getQualifiedName().toString();
}
else {
packageName = null;
qualifiedName = name;
}
componentType = null;
}
else if ( mirror.getKind() == TypeKind.ARRAY ) {
TypeMirror componentTypeMirror = getComponentType( mirror );
StringBuilder builder = new StringBuilder("[]");
while ( componentTypeMirror.getKind() == TypeKind.ARRAY ) {
componentTypeMirror = getComponentType( componentTypeMirror );
builder.append( "[]" );
}
if ( componentTypeMirror.getKind() == TypeKind.DECLARED ) {
DeclaredType declaredType = (DeclaredType) componentTypeMirror;
TypeElement componentTypeElement = (TypeElement) declaredType.asElement();
String arraySuffix = builder.toString();
name = componentTypeElement.getSimpleName().toString() + arraySuffix;
packageName = elementUtils.getPackageOf( componentTypeElement ).getQualifiedName().toString();
qualifiedName = componentTypeElement.getQualifiedName().toString() + arraySuffix;
}
else if (componentTypeMirror.getKind().isPrimitive()) {
// When the component type is primitive and is annotated with ElementType.TYPE_USE then
// the typeMirror#toString returns (@CustomAnnotation :: byte) for the javac compiler
name = NativeTypes.getName( componentTypeMirror.getKind() ) + builder.toString();
packageName = null;
// for primitive types only name (e.g. byte, short..) required as qualified name
qualifiedName = name;
toBeImported = false;
}
else {
name = mirror.toString();
packageName = null;
qualifiedName = name;
toBeImported = false;
}
isEnumType = false;
isInterface = false;
typeElement = null;
componentType = getType( getComponentType( mirror ) );
}
else {
isEnumType = false;
isInterface = false;
// When the component type is primitive and is annotated with ElementType.TYPE_USE then
// the typeMirror#toString returns (@CustomAnnotation :: byte) for the javac compiler
if ( mirror.getKind().isPrimitive() ) {
name = NativeTypes.getName( mirror.getKind() );
}
// When the component type is type var and is annotated with ElementType.TYPE_USE then
// the typeMirror#toString returns (@CustomAnnotation T) for the errorprone javac compiler
else if ( mirror.getKind() == TypeKind.TYPEVAR ) {
name = ( (TypeVariable) mirror ).asElement().getSimpleName().toString();
}
else {
name = mirror.toString();
}
packageName = null;
qualifiedName = name;
typeElement = null;
componentType = null;
toBeImported = false;
}
return new Type(
typeUtils, elementUtils, this,
roundContext.getAnnotationProcessorContext().getAccessorNaming(),
mirror,
typeElement,
getTypeParameters( mirror, false ),
implementationType,
componentType,
packageName,
name,
qualifiedName,
isInterface,
isEnumType,
isIterableType,
isCollectionType,
isMapType,
isStreamType,
toBeImportedTypes,
notToBeImportedTypes,
toBeImported,
isLiteral,
loggingVerbose
);
}
/**
* Returns the Type that represents the declared Class type of the given type. For primitive types, the boxed class
* will be used. Examples:
* <ul>
* <li>If type represents {@code java.lang.Integer}, it will return the type that represents {@code Class<Integer>}.
* </li>
* <li>If type represents {@code int}, it will return the type that represents {@code Class<Integer>}.</li>
* </ul>
*
* @param type the type to return the declared
|
TypeFactory
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-globalpolicygenerator/src/main/java/org/apache/hadoop/yarn/server/globalpolicygenerator/GPGPolicyFacade.java
|
{
"start": 3007,
"end": 3296
}
|
class ____ a PolicyManager cache and a SubClusterPolicyConfiguration
* cache. The primary use for these caches are to serve reads, and to
* identify when the PolicyGenerator has actually changed the policy
* so unnecessary FederationStateStore policy writes can be avoided.
*/
public
|
uses
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/ExecNodeTypeIdResolver.java
|
{
"start": 1639,
"end": 2561
}
|
class ____ extends TypeIdResolverBase {
private JavaType superType;
@Override
public void init(JavaType baseType) {
superType = baseType;
}
@Override
public Id getMechanism() {
return Id.NAME;
}
@Override
public String idFromValue(Object obj) {
return idFromValueAndType(obj, obj.getClass());
}
@Override
public String idFromValueAndType(Object obj, Class<?> subType) {
return ((ExecNodeBase<?>) obj).getContextFromAnnotation().toString();
}
@Override
public JavaType typeFromId(DatabindContext context, String id) {
ExecNodeContext execNodeContext = new ExecNodeContext(id);
return context.constructSpecializedType(
superType,
ExecNodeMetadataUtil.retrieveExecNode(
execNodeContext.getName(), execNodeContext.getVersion()));
}
}
|
ExecNodeTypeIdResolver
|
java
|
quarkusio__quarkus
|
independent-projects/qute/core/src/main/java/io/quarkus/qute/CheckedTemplate.java
|
{
"start": 5109,
"end": 5210
}
|
class ____ {
*
* @CheckedTemplate(basePath = "items_v1")
* static
|
ItemResource
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webflux/src/test/java/org/springframework/boot/webflux/autoconfigure/WebFluxAutoConfigurationTests.java
|
{
"start": 48462,
"end": 48671
}
|
class ____ {
@Bean
HiddenHttpMethodFilter customHiddenHttpMethodFilter() {
return mock(HiddenHttpMethodFilter.class);
}
}
@Configuration(proxyBeanMethods = false)
static
|
CustomHiddenHttpMethodFilter
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/io/BlockingBinaryEncoder.java
|
{
"start": 1753,
"end": 2072
}
|
class ____ extends BufferedBinaryEncoder {
/*
* Implementation note:
*
* Blocking is complicated because of nesting. If a large, nested value
* overflows your buffer, you've got to do a lot of dancing around to output the
* blocks correctly.
*
* To handle this complexity, this
|
BlockingBinaryEncoder
|
java
|
quarkusio__quarkus
|
integration-tests/virtual-threads/resteasy-reactive-virtual-threads/src/main/java/io/quarkus/virtual/rr/ResourceOnClassImpl.java
|
{
"start": 237,
"end": 702
}
|
class ____ implements IResourceOnClass {
private final Counter counter;
ResourceOnClassImpl(Counter counter) {
this.counter = counter;
}
public String testGet() {
VirtualThreadsAssertions.assertEverything();
return "hello-" + counter.increment();
}
public String testPost(String body) {
VirtualThreadsAssertions.assertEverything();
return body + "-" + counter.increment();
}
}
|
ResourceOnClassImpl
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_3361/Issue3361Mapper.java
|
{
"start": 1706,
"end": 1959
}
|
class ____ {
private final String attribute;
public Source(String attribute) {
this.attribute = attribute;
}
public String getAttribute() {
return attribute;
}
}
public static
|
Source
|
java
|
elastic__elasticsearch
|
x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java
|
{
"start": 33716,
"end": 34688
}
|
class ____ a reference counted object that also tracks a flag for eviction of an instance.
* It is only inherited by CacheFileRegion to enable the use of a static var handle in on a non-static inner class.
* As long as the flag in {@link #evicted} is not set the instance's contents can be trusted. As soon as the flag is set, the contents
* of the instance can not be trusted. Thus, each read operation from a file region should be followed by a call to {@link #isEvicted()}
* to ensure that whatever bytes have been read are still valid.
* The reference count is used by write operations to a region on top of the eviction flag. Every write operation must first increment
* the reference count, then write to the region and then decrement it again. Only when the reference count reaches zero, will the
* region by moved to the {@link #freeRegions} list and becomes available for allocation again.
*/
private abstract static
|
models
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/read/ValueSourceReaderTypeConversionTests.java
|
{
"start": 91450,
"end": 91953
}
|
class ____ extends BlockToStringConverter {
BytesRef scratchPad = new BytesRef();
TestBytesRefToBytesRefConverter(DriverContext driverContext) {
super(driverContext);
}
@Override
BytesRef evalValue(Block container, int index) {
return convertByteRef(((BytesRefBlock) container).getBytesRef(index, scratchPad));
}
abstract BytesRef convertByteRef(BytesRef bytesRef);
}
private static
|
TestBytesRefToBytesRefConverter
|
java
|
netty__netty
|
codec-compression/src/main/java/io/netty/handler/codec/compression/CompressionException.java
|
{
"start": 816,
"end": 1441
}
|
class ____ extends EncoderException {
private static final long serialVersionUID = 5603413481274811897L;
/**
* Creates a new instance.
*/
public CompressionException() {
}
/**
* Creates a new instance.
*/
public CompressionException(String message, Throwable cause) {
super(message, cause);
}
/**
* Creates a new instance.
*/
public CompressionException(String message) {
super(message);
}
/**
* Creates a new instance.
*/
public CompressionException(Throwable cause) {
super(cause);
}
}
|
CompressionException
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/proxy/LoadANonExistingNotFoundBatchEntityTest.java
|
{
"start": 5190,
"end": 5562
}
|
class ____ {
@Id
private int id;
private String name;
@ManyToOne(fetch = FetchType.EAGER, cascade = CascadeType.ALL)
@JoinColumn(name = "employer_id", foreignKey = @ForeignKey(value = ConstraintMode.NO_CONSTRAINT))
@NotFound(action=NotFoundAction.IGNORE)
private Employer employer;
}
@Entity(name = "Employer")
@BatchSize(size = 10)
public static
|
Employee
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/script/ReindexMetadata.java
|
{
"start": 1197,
"end": 1328
}
|
class ____ appear as {@link Metadata}
* but handle the internal versioning scheme without scripts accessing the ctx map.
*/
public
|
to
|
java
|
quarkusio__quarkus
|
extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/applicationfieldaccess/RecordFieldAccessTest.java
|
{
"start": 6666,
"end": 7387
}
|
class ____ {
@Id
@GeneratedValue
public long id;
@Embedded
@AttributeOverride(name = "value", column = @Column(name = "value1"))
public MyRecordEmbeddableWithAdditionalGetters embeddedWithAdditionalGetters;
@Embedded
@AttributeOverride(name = "value", column = @Column(name = "value2"))
public MyRecordEmbeddableWithoutAdditionalGetters embeddedWithoutAdditionalGetters;
}
@Embeddable
public record MyRecordEmbeddableWithoutAdditionalGetters(Long value) {
}
@Embeddable
public record MyRecordEmbeddableWithAdditionalGetters(Long value) {
Long getValue() {
return value;
}
}
private
|
MyEntity
|
java
|
apache__flink
|
flink-connectors/flink-connector-base/src/test/java/org/apache/flink/connector/base/source/reader/SourceMetricsITCase.java
|
{
"start": 3005,
"end": 12699
}
|
class ____ {
private static final int DEFAULT_PARALLELISM = 4;
// since integration tests depend on wall clock time, use huge lags
private static final long EVENTTIME_LAG = Duration.ofDays(100).toMillis();
private static final long WATERMARK_LAG = Duration.ofDays(1).toMillis();
private static final long EVENTTIME_EPSILON = Duration.ofDays(20).toMillis();
// this basically is the time a build is allowed to be frozen before the test fails
private static final long WATERMARK_EPSILON = Duration.ofHours(6).toMillis();
@RegisterExtension SharedObjectsExtension sharedObjects = SharedObjectsExtension.create();
private static final InMemoryReporter reporter = InMemoryReporter.createWithRetainedMetrics();
@RegisterExtension
private static final MiniClusterExtension MINI_CLUSTER_RESOURCE =
new MiniClusterExtension(
new MiniClusterResourceConfiguration.Builder()
.setNumberTaskManagers(1)
.setNumberSlotsPerTaskManager(DEFAULT_PARALLELISM)
.setConfiguration(reporter.addToConfiguration(new Configuration()))
.build());
@Test
void testMetricsWithTimestamp() throws Exception {
long baseTime = System.currentTimeMillis() - EVENTTIME_LAG;
WatermarkStrategy<Integer> strategy =
WatermarkStrategy.forGenerator(
context -> new EagerBoundedOutOfOrdernessWatermarks())
.withTimestampAssigner(new LaggingTimestampAssigner(baseTime));
testMetrics(strategy, true);
}
@Test
void testMetricsWithoutTimestamp() throws Exception {
testMetrics(WatermarkStrategy.noWatermarks(), false);
}
private void testMetrics(WatermarkStrategy<Integer> strategy, boolean hasTimestamps)
throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
int numSplits = Math.max(1, env.getParallelism() - 2);
env.getConfig().setAutoWatermarkInterval(1L);
int numRecordsPerSplit = 10;
MockBaseSource source =
new MockBaseSource(numSplits, numRecordsPerSplit, Boundedness.BOUNDED);
// make sure all parallel instances have processed the same amount of records before
// validating metrics
SharedReference<CyclicBarrier> beforeBarrier =
sharedObjects.add(new CyclicBarrier(numSplits + 1));
SharedReference<CyclicBarrier> afterBarrier =
sharedObjects.add(new CyclicBarrier(numSplits + 1));
int stopAtRecord1 = 3;
int stopAtRecord2 = numRecordsPerSplit - 1;
DataStream<Integer> stream =
env.fromSource(source, strategy, "MetricTestingSource")
.addMetricVariable("foo", "42")
.map(
i -> {
if (i % numRecordsPerSplit == stopAtRecord1
|| i % numRecordsPerSplit == stopAtRecord2) {
beforeBarrier.get().await();
afterBarrier.get().await();
}
return i;
});
stream.sinkTo(new DiscardingSink<>());
JobClient jobClient = env.executeAsync();
final JobID jobId = jobClient.getJobID();
beforeBarrier.get().await();
assertSourceMetrics(
jobId,
reporter,
stopAtRecord1 + 1,
numRecordsPerSplit,
env.getParallelism(),
numSplits,
hasTimestamps);
afterBarrier.get().await();
beforeBarrier.get().await();
assertSourceMetrics(
jobId,
reporter,
stopAtRecord2 + 1,
numRecordsPerSplit,
env.getParallelism(),
numSplits,
hasTimestamps);
afterBarrier.get().await();
jobClient.getJobExecutionResult().get();
}
private void assertSourceMetrics(
JobID jobId,
InMemoryReporter reporter,
long processedRecordsPerSubtask,
long numTotalPerSubtask,
int parallelism,
int numSplits,
boolean hasTimestamps) {
List<OperatorMetricGroup> groups =
reporter.findOperatorMetricGroups(jobId, "MetricTestingSource");
assertThat(groups).hasSize(parallelism);
int subtaskWithMetrics = 0;
for (OperatorMetricGroup group : groups) {
assertThat(group.getAllVariables()).contains(entry("foo", "42"));
Map<String, Metric> metrics = reporter.getMetricsByGroup(group);
// there are only 2 splits assigned; so two groups will not update metrics
if (group.getIOMetricGroup().getNumRecordsInCounter().getCount() == 0) {
// assert that optional metrics are not initialized when no split assigned
assertThatGauge(metrics.get(MetricNames.CURRENT_EMIT_EVENT_TIME_LAG))
.isEqualTo(InternalSourceReaderMetricGroup.UNDEFINED);
assertThat(metrics.get(MetricNames.WATERMARK_LAG)).isNull();
continue;
}
subtaskWithMetrics++;
// I/O metrics
assertThatCounter(group.getIOMetricGroup().getNumRecordsInCounter())
.isEqualTo(processedRecordsPerSubtask);
assertThatCounter(group.getIOMetricGroup().getNumBytesInCounter())
.isEqualTo(processedRecordsPerSubtask * MockRecordEmitter.RECORD_SIZE_IN_BYTES);
assertThatCounter(group.getIOMetricGroup().getNumRecordsOutCounter())
.isEqualTo(processedRecordsPerSubtask);
assertThatCounter(group.getIOMetricGroup().getNumBytesOutCounter())
.isEqualTo(processedRecordsPerSubtask * MockRecordEmitter.RECORD_SIZE_IN_BYTES);
// MockRecordEmitter is just incrementing errors every even record
assertThatCounter(metrics.get(MetricNames.NUM_RECORDS_IN_ERRORS))
.isEqualTo(processedRecordsPerSubtask / 2);
if (hasTimestamps) {
// Timestamp assigner subtracting EVENTTIME_LAG from wall clock
assertThatGauge(metrics.get(MetricNames.CURRENT_EMIT_EVENT_TIME_LAG))
.isCloseTo(EVENTTIME_LAG, EVENTTIME_EPSILON);
// Watermark is derived from timestamp, so it has to be in the same order of
// magnitude
assertThatGauge(metrics.get(MetricNames.WATERMARK_LAG))
.isCloseTo(EVENTTIME_LAG, EVENTTIME_EPSILON);
// Calculate the additional watermark lag (on top of event time lag)
Long watermarkLag =
((Gauge<Long>) metrics.get(MetricNames.WATERMARK_LAG)).getValue()
- ((Gauge<Long>)
metrics.get(
MetricNames.CURRENT_EMIT_EVENT_TIME_LAG))
.getValue();
// That should correspond to the out-of-order boundedness
assertThat(watermarkLag)
.isGreaterThan(WATERMARK_LAG - WATERMARK_EPSILON)
.isLessThan(WATERMARK_LAG + WATERMARK_EPSILON);
} else {
// assert that optional metrics are not initialized when no timestamp assigned
assertThatGauge(metrics.get(MetricNames.CURRENT_EMIT_EVENT_TIME_LAG))
.isEqualTo(InternalSourceReaderMetricGroup.UNDEFINED);
assertThat(metrics.get(MetricNames.WATERMARK_LAG)).isNull();
}
long pendingRecords = numTotalPerSubtask - processedRecordsPerSubtask;
assertThatGauge(metrics.get(MetricNames.PENDING_RECORDS)).isEqualTo(pendingRecords);
assertThatGauge(metrics.get(MetricNames.PENDING_BYTES))
.isEqualTo(pendingRecords * MockRecordEmitter.RECORD_SIZE_IN_BYTES);
// test is keeping source idle time metric busy with the barrier
assertThatGauge(metrics.get(MetricNames.SOURCE_IDLE_TIME)).isEqualTo(0L);
}
assertThat(subtaskWithMetrics).isEqualTo(numSplits);
// Test operator I/O metrics are reused by task metrics
List<TaskMetricGroup> taskMetricGroups =
reporter.findTaskMetricGroups(jobId, "MetricTestingSource");
assertThat(taskMetricGroups).hasSize(parallelism);
int subtaskWithTaskMetrics = 0;
for (TaskMetricGroup taskMetricGroup : taskMetricGroups) {
// there are only 2 splits assigned; so two groups will not update metrics
if (taskMetricGroup.getIOMetricGroup().getNumRecordsInCounter().getCount() == 0) {
continue;
}
subtaskWithTaskMetrics++;
assertThatCounter(taskMetricGroup.getIOMetricGroup().getNumRecordsInCounter())
.isEqualTo(processedRecordsPerSubtask);
assertThatCounter(taskMetricGroup.getIOMetricGroup().getNumBytesInCounter())
.isEqualTo(processedRecordsPerSubtask * MockRecordEmitter.RECORD_SIZE_IN_BYTES);
}
assertThat(subtaskWithTaskMetrics).isEqualTo(numSplits);
}
private static
|
SourceMetricsITCase
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/codec/TrackingPostingsInMemoryBytesCodec.java
|
{
"start": 3557,
"end": 4889
}
|
class ____ extends FilterLeafReader.FilterFields {
final FieldInfos fieldInfos;
final IntHashSet seenFields;
final long[] totalBytes;
TrackingLengthFields(Fields in, FieldInfos fieldInfos, IntHashSet seenFields, long[] totalBytes) {
super(in);
this.seenFields = seenFields;
this.fieldInfos = fieldInfos;
this.totalBytes = totalBytes;
}
@Override
public Terms terms(String field) throws IOException {
Terms terms = super.terms(field);
if (terms == null) {
return null;
}
int fieldNum = fieldInfos.fieldInfo(field).number;
if (seenFields.add(fieldNum)) {
return new TrackingLengthTerms(terms, bytes -> totalBytes[0] += bytes);
} else {
// As far as I know only when bloom filter for _id filter gets written this method gets invoked twice for the same field.
// So maybe we can get rid of the seenFields here? And just keep track of whether _id field has been seen? However, this
// is fragile and could make us vulnerable to tricky bugs in the future if this is no longer the case.
return terms;
}
}
}
static final
|
TrackingLengthFields
|
java
|
hibernate__hibernate-orm
|
hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/function/array/GaussDBArrayConcatElementFunction.java
|
{
"start": 1097,
"end": 3596
}
|
class ____ extends ArrayConcatElementFunction {
public GaussDBArrayConcatElementFunction(boolean prepend) {
super( "", "||", "", prepend );
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> walker) {
final Expression firstArgument = (Expression) sqlAstArguments.get( 0 );
final Expression secondArgument = (Expression) sqlAstArguments.get( 1 );
final Expression arrayArgument;
final Expression elementArgument;
if ( prepend ) {
elementArgument = firstArgument;
arrayArgument = secondArgument;
}
else {
arrayArgument = firstArgument;
elementArgument = secondArgument;
}
final String elementCastType;
if ( needsElementCasting( elementArgument ) ) {
final JdbcMappingContainer arrayType = arrayArgument.getExpressionType();
final Size size = arrayType instanceof SqlTypedMapping ? ( (SqlTypedMapping) arrayType ).toSize() : null;
elementCastType = DdlTypeHelper.getCastTypeName(
( (BasicPluralType<?, ?>) returnType ).getElementType(),
size,
walker.getSessionFactory().getTypeConfiguration()
);
}
else {
elementCastType = null;
}
sqlAppender.append( "case when " );
walker.render( arrayArgument, SqlAstNodeRenderingMode.DEFAULT );
sqlAppender.append( " is not null then " );
if ( prepend && elementCastType != null) {
sqlAppender.append( "cast(" );
walker.render( firstArgument, SqlAstNodeRenderingMode.DEFAULT );
sqlAppender.append( " as " );
sqlAppender.append( elementCastType );
sqlAppender.append( ')' );
}
else {
walker.render( firstArgument, SqlAstNodeRenderingMode.DEFAULT );
}
sqlAppender.append( "||" );
if ( !prepend && elementCastType != null) {
sqlAppender.append( "cast(" );
walker.render( secondArgument, SqlAstNodeRenderingMode.DEFAULT );
sqlAppender.append( " as " );
sqlAppender.append( elementCastType );
sqlAppender.append( ')' );
}
else {
walker.render( secondArgument, SqlAstNodeRenderingMode.DEFAULT );
}
sqlAppender.append( " end" );
}
private static boolean needsElementCasting(Expression elementExpression) {
// GaussDB needs casting of null and string literal expressions
return elementExpression instanceof Literal && (
elementExpression.getExpressionType().getSingleJdbcMapping().getJdbcType().isString()
|| ( (Literal) elementExpression ).getLiteralValue() == null
);
}
}
|
GaussDBArrayConcatElementFunction
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/requests/FetchSnapshotResponse.java
|
{
"start": 1192,
"end": 3736
}
|
class ____ extends AbstractResponse {
private final FetchSnapshotResponseData data;
public FetchSnapshotResponse(FetchSnapshotResponseData data) {
super(ApiKeys.FETCH_SNAPSHOT);
this.data = data;
}
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errors = new EnumMap<>(Errors.class);
Errors topLevelError = Errors.forCode(data.errorCode());
if (topLevelError != Errors.NONE) {
errors.put(topLevelError, 1);
}
for (FetchSnapshotResponseData.TopicSnapshot topicResponse : data.topics()) {
for (FetchSnapshotResponseData.PartitionSnapshot partitionResponse : topicResponse.partitions()) {
errors.compute(Errors.forCode(partitionResponse.errorCode()),
(error, count) -> count == null ? 1 : count + 1);
}
}
return errors;
}
@Override
public int throttleTimeMs() {
return data.throttleTimeMs();
}
@Override
public void maybeSetThrottleTimeMs(int throttleTimeMs) {
data.setThrottleTimeMs(throttleTimeMs);
}
@Override
public FetchSnapshotResponseData data() {
return data;
}
/**
* Creates a FetchSnapshotResponseData with a top level error.
*
* @param error the top level error
* @return the created fetch snapshot response data
*/
public static FetchSnapshotResponseData withTopLevelError(Errors error) {
return new FetchSnapshotResponseData().setErrorCode(error.code());
}
/**
* Finds the PartitionSnapshot for a given topic partition.
*
* @param data the fetch snapshot response data
* @param topicPartition the topic partition to find
* @return the response partition snapshot if found, otherwise an empty Optional
*/
public static Optional<FetchSnapshotResponseData.PartitionSnapshot> forTopicPartition(
FetchSnapshotResponseData data,
TopicPartition topicPartition
) {
return data
.topics()
.stream()
.filter(topic -> topic.name().equals(topicPartition.topic()))
.flatMap(topic -> topic.partitions().stream())
.filter(partition -> partition.index() == topicPartition.partition())
.findAny();
}
public static FetchSnapshotResponse parse(Readable readable, short version) {
return new FetchSnapshotResponse(new FetchSnapshotResponseData(readable, version));
}
}
|
FetchSnapshotResponse
|
java
|
apache__kafka
|
server-common/src/main/java/org/apache/kafka/server/common/serialization/BytesApiMessageSerde.java
|
{
"start": 1184,
"end": 1517
}
|
class ____ conversion of {@code ApiMessageAndVersion} to bytes and vice versa. This can be used as serialization protocol for any
* metadata records derived of {@code ApiMessage}s. It internally uses {@link AbstractApiMessageSerde} for serialization/deserialization
* mechanism.
* <br><br>
* Implementors need to extend this
|
provides
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/java/typeutils/TypeExtractorTest.java
|
{
"start": 89321,
"end": 90134
}
|
class ____ implements MapFunction, ResultTypeQueryable {
private static final long serialVersionUID = 1L;
@Override
public TypeInformation getProducedType() {
return BasicTypeInfo.STRING_TYPE_INFO;
}
@Override
public Object map(Object value) throws Exception {
return null;
}
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
void testInputMismatchWithRawFuntion() {
MapFunction<?, ?> function = new MapWithResultTypeQueryable();
TypeInformation<?> ti =
TypeExtractor.getMapReturnTypes(
(MapFunction) function, BasicTypeInfo.INT_TYPE_INFO);
assertThat(ti).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
}
public static
|
MapWithResultTypeQueryable
|
java
|
apache__flink
|
flink-clients/src/main/java/org/apache/flink/client/program/PackagedProgramUtils.java
|
{
"start": 1967,
"end": 11179
}
|
enum ____ {
;
private static final String SQL_DRIVER_CLASS_NAME =
"org.apache.flink.table.runtime.application.SqlDriver";
private static final String PYTHON_GATEWAY_CLASS_NAME =
"org.apache.flink.client.python.PythonGatewayServer";
private static final String PYTHON_DRIVER_CLASS_NAME =
"org.apache.flink.client.python.PythonDriver";
/**
* Creates a {@link JobGraph} with a specified {@link JobID} from the given {@link
* PackagedProgram}.
*
* @param packagedProgram to extract the JobGraph from
* @param configuration to use for the optimizer and job graph generator
* @param defaultParallelism for the JobGraph
* @param jobID the pre-generated job id
* @return JobGraph extracted from the PackagedProgram
* @throws ProgramInvocationException if the JobGraph generation failed
*/
public static JobGraph createJobGraph(
PackagedProgram packagedProgram,
Configuration configuration,
int defaultParallelism,
@Nullable JobID jobID,
boolean suppressOutput)
throws ProgramInvocationException {
final Pipeline pipeline =
getPipelineFromProgram(
packagedProgram, configuration, defaultParallelism, suppressOutput);
final JobGraph jobGraph =
FlinkPipelineTranslationUtil.getJobGraphUnderUserClassLoader(
packagedProgram.getUserCodeClassLoader(),
pipeline,
configuration,
defaultParallelism);
if (jobID != null) {
jobGraph.setJobID(jobID);
}
jobGraph.addJars(packagedProgram.getJobJarAndDependencies());
jobGraph.setClasspaths(packagedProgram.getClasspaths());
jobGraph.setSavepointRestoreSettings(packagedProgram.getSavepointSettings());
return jobGraph;
}
/**
* Creates a {@link JobGraph} with a random {@link JobID} from the given {@link
* PackagedProgram}.
*
* @param packagedProgram to extract the JobGraph from
* @param configuration to use for the optimizer and job graph generator
* @param defaultParallelism for the JobGraph
* @param suppressOutput Whether to suppress stdout/stderr during interactive JobGraph creation.
* @return JobGraph extracted from the PackagedProgram
* @throws ProgramInvocationException if the JobGraph generation failed
*/
public static JobGraph createJobGraph(
PackagedProgram packagedProgram,
Configuration configuration,
int defaultParallelism,
boolean suppressOutput)
throws ProgramInvocationException {
return createJobGraph(
packagedProgram, configuration, defaultParallelism, null, suppressOutput);
}
public static Pipeline getPipelineFromProgram(
PackagedProgram program,
Configuration configuration,
int parallelism,
boolean suppressOutput)
throws ProgramInvocationException {
final ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(program.getUserCodeClassLoader());
final PrintStream originalOut = System.out;
final PrintStream originalErr = System.err;
final ByteArrayOutputStream stdOutBuffer;
final ByteArrayOutputStream stdErrBuffer;
if (suppressOutput) {
// temporarily write STDERR and STDOUT to a byte array.
stdOutBuffer = new ByteArrayOutputStream();
System.setOut(new PrintStream(stdOutBuffer));
stdErrBuffer = new ByteArrayOutputStream();
System.setErr(new PrintStream(stdErrBuffer));
} else {
stdOutBuffer = null;
stdErrBuffer = null;
}
// temporary hack to support the optimizer plan preview
StreamPlanEnvironment senv =
new StreamPlanEnvironment(
configuration, program.getUserCodeClassLoader(), parallelism);
senv.setAsContext();
try {
program.invokeInteractiveModeForExecution();
} catch (Throwable t) {
if (senv.getPipeline() != null) {
return senv.getPipeline();
}
if (t instanceof ProgramInvocationException) {
throw t;
}
throw generateException(
program, "The program caused an error: ", t, stdOutBuffer, stdErrBuffer);
} finally {
senv.unsetAsContext();
if (suppressOutput) {
System.setOut(originalOut);
System.setErr(originalErr);
}
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
throw generateException(
program,
"The program plan could not be fetched - the program aborted pre-maturely. "
+ "The root cause may be that the main method doesn't call `env.execute()` or `env.executeAsync()`.",
null,
stdOutBuffer,
stdErrBuffer);
}
public static Boolean isPython(String entryPointClassName) {
return (entryPointClassName != null)
&& (entryPointClassName.equals(PYTHON_DRIVER_CLASS_NAME)
|| entryPointClassName.equals(PYTHON_GATEWAY_CLASS_NAME));
}
public static boolean isPython(String[] programArguments) {
return CollectionUtils.containsAny(
Arrays.asList(programArguments),
Arrays.asList("-py", "-pym", "--python", "--pyModule"));
}
public static URL getPythonJar() {
return getOptJar("flink-python");
}
public static String getPythonDriverClassName() {
return PYTHON_DRIVER_CLASS_NAME;
}
public static boolean isSqlApplication(String entryPointClassName) {
return (entryPointClassName != null) && (entryPointClassName.equals(SQL_DRIVER_CLASS_NAME));
}
public static URL getSqlGatewayJar() {
return getOptJar("flink-sql-gateway");
}
public static URI resolveURI(String path) throws URISyntaxException {
final URI uri = new URI(path);
if (uri.getScheme() != null) {
return uri;
}
return new File(path).getAbsoluteFile().toURI();
}
public static boolean usingSystemClassPath(Configuration configuration) {
return configuration.get(PipelineOptions.JARS) == null;
}
private static ProgramInvocationException generateException(
PackagedProgram program,
String msg,
@Nullable Throwable cause,
@Nullable ByteArrayOutputStream stdoutBuffer,
@Nullable ByteArrayOutputStream stderrBuffer) {
checkState(
(stdoutBuffer != null) == (stderrBuffer != null),
"Stderr/Stdout should either both be set or both be null.");
final String stdout = (stdoutBuffer != null) ? stdoutBuffer.toString() : "";
final String stderr = (stderrBuffer != null) ? stderrBuffer.toString() : "";
return new ProgramInvocationException(
String.format(
"%s\n\nClasspath: %s\n\nSystem.out: %s\n\nSystem.err: %s",
msg,
program.getJobJarAndDependencies(),
stdout.length() == 0 ? "(none)" : stdout,
stderr.length() == 0 ? "(none)" : stderr),
cause);
}
private static URL getOptJar(String jarName) {
String flinkOptPath = System.getenv(ConfigConstants.ENV_FLINK_OPT_DIR);
final List<Path> optJarPath = new ArrayList<>();
try {
Files.walkFileTree(
FileSystems.getDefault().getPath(flinkOptPath),
new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
FileVisitResult result = super.visitFile(file, attrs);
if (file.getFileName().toString().startsWith(jarName)) {
optJarPath.add(file);
}
return result;
}
});
} catch (IOException e) {
throw new RuntimeException(
"Exception encountered during finding the flink-python jar. This should not happen.",
e);
}
if (optJarPath.size() != 1) {
throw new RuntimeException(
String.format("Found " + optJarPath.size() + " %s jar.", jarName));
}
try {
return optJarPath.get(0).toUri().toURL();
} catch (MalformedURLException e) {
throw new RuntimeException("URL is invalid. This should not happen.", e);
}
}
}
|
PackagedProgramUtils
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/rowid/RowIdUpdateAndDeleteTest.java
|
{
"start": 6675,
"end": 7114
}
|
class ____ {
@Id
@Column( name = "primary_key" )
public Long primaryKey;
public String status;
public SimpleEntity() {
}
public SimpleEntity(Long primaryKey, String status) {
this.primaryKey = primaryKey;
this.status = status;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
}
@Entity( name = "ParentEntity" )
public static
|
SimpleEntity
|
java
|
apache__maven
|
its/core-it-suite/src/test/resources/mng-5640-lifecycleParticipant-afterSession/buildfailure-depmissing/src/test/java/org/apache/maven/its/mng5640/FailingTest.java
|
{
"start": 918,
"end": 1060
}
|
class ____ extends TestCase {
/**
* Rigourous Test :-)
*/
public void testApp() {
assertTrue(false);
}
}
|
FailingTest
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/cli/MultiCommandTests.java
|
{
"start": 8371,
"end": 9635
}
|
class ____ extends Command {
ErrorThrowingSubCommand() {
super("error throwing");
}
@Override
protected void execute(Terminal terminal, OptionSet options, ProcessInfo processInfo) throws Exception {
throw new UserException(1, "Dummy error");
}
}
public void testErrorDisplayedWithDefault() throws Exception {
multiCommand.subcommands.put("throw", new ErrorThrowingSubCommand());
executeMain("throw", "--silent");
assertThat(terminal.getOutput(), is(emptyString()));
assertThat(terminal.getErrorOutput().lines().collect(Collectors.joining()), containsString("ERROR: Dummy error"));
}
public void testNullErrorMessageSuppressesErrorOutput() throws Exception {
multiCommand.subcommands.put("throw", new ErrorThrowingSubCommand() {
@Override
protected void execute(Terminal terminal, OptionSet options, ProcessInfo processInfo) throws Exception {
throw new UserException(1, null);
}
});
executeMain("throw", "--silent");
assertThat(terminal.getOutput(), is(emptyString()));
assertThat(terminal.getErrorOutput(), is(emptyString()));
}
}
|
ErrorThrowingSubCommand
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/HandlerAdapter.java
|
{
"start": 1767,
"end": 2170
}
|
interface ____ be able to specify a sorting
* order (and thus a priority) for getting applied by the {@code DispatcherServlet}.
* Non-Ordered instances get treated as the lowest priority.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @see org.springframework.web.servlet.mvc.SimpleControllerHandlerAdapter
* @see org.springframework.web.servlet.handler.SimpleServletHandlerAdapter
*/
public
|
to
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/settings/Setting.java
|
{
"start": 70727,
"end": 70792
}
|
enum ____ must be uppercase.
*
* @param clazz the
|
constants
|
java
|
quarkusio__quarkus
|
test-framework/keycloak-server/src/main/java/io/quarkus/test/keycloak/client/KeycloakTestClient.java
|
{
"start": 769,
"end": 21089
}
|
class ____ implements DevServicesContext.ContextAware {
private final static String CLIENT_AUTH_SERVER_URL_PROP = "client.quarkus.oidc.auth-server-url";
private final static String AUTH_SERVER_URL_PROP = "quarkus.oidc.auth-server-url";
private final static String CLIENT_ID_PROP = "quarkus.oidc.client-id";
private final static String CLIENT_SECRET_PROP = "quarkus.oidc.credentials.secret";
private DevServicesContext testContext;
private final String authServerUrl;
private final Tls tls;
public KeycloakTestClient() {
this(null, null);
}
public KeycloakTestClient(Tls tls) {
this(null, tls);
}
public KeycloakTestClient(String authServerUrl) {
this(authServerUrl, null);
}
public KeycloakTestClient(String authServerUrl, Tls tls) {
this.authServerUrl = authServerUrl;
this.tls = tls;
}
/**
* Get an access token from the default tenant realm using a client_credentials grant.
* Realm name is set to `quarkus` unless it has been configured with the `quarkus.keycloak.devservices.realm-name` property.
* Client id is set to `quarkus-app` unless it has been configured with the `quarkus.oidc.client-id` property.
* Client secret is set to `secret` unless it has been configured with the `quarkus.oidc.credentials.secret` property.
*/
public String getClientAccessToken() {
return getClientAccessToken(getClientId());
}
/**
* Get an access token from the default tenant realm using a client_credentials grant with a
* the provided client id.
* Realm name is set to `quarkus` unless it has been configured with the `quarkus.keycloak.devservices.realm-name` property.
* Client secret will be to `secret` unless it has been configured with the `quarkus.oidc.credentials.secret` property.
*/
public String getClientAccessToken(String clientId) {
return getClientAccessToken(clientId, getClientSecret());
}
/**
* Get an access token from the default tenant realm using a client_credentials grant with a
* the provided client id and secret.
* Realm name is set to `quarkus` unless it has been configured with the `quarkus.keycloak.devservices.realm-name` property.
*/
public String getClientAccessToken(String clientId, String clientSecret) {
return getClientAccessToken(clientId, clientSecret, null);
}
/**
* Get an access token from the default tenant realm using a client_credentials grant with a
* the provided client id and secret, and scopes.
* Realm name is set to `quarkus` unless it has been configured with the `quarkus.keycloak.devservices.realm-name` property.
*/
public String getClientAccessToken(String clientId, String clientSecret, List<String> scopes) {
return getClientAccessTokenInternal(clientId, clientSecret, scopes, getAuthServerUrl());
}
/**
* Get an access token from the provided realm using a client_credentials grant.
* Client id is set to `quarkus-app` unless it has been configured with the `quarkus.oidc.client-id` property.
* Client secret is set to `secret` unless it has been configured with the `quarkus.oidc.credentials.secret` property.
*/
public String getRealmClientAccessToken(String realm) {
return getRealmClientAccessToken(realm, getClientId());
}
/**
* Get an access token from the provided realm using a client_credentials grant with a
* the provided client id.
* Client secret will be to `secret` unless it has been configured with the `quarkus.oidc.credentials.secret` property.
*/
public String getRealmClientAccessToken(String realm, String clientId) {
return getRealmClientAccessToken(realm, clientId, getClientSecret());
}
/**
* Get an access token from the provided realm using a client_credentials grant with a
* the provided client id and secret.
*/
public String getRealmClientAccessToken(String realm, String clientId, String clientSecret) {
return getRealmClientAccessToken(realm, clientId, clientSecret, null);
}
/**
* Get an access token from the provided realm using a client_credentials grant with a
* the provided client id and secret, and scopes.
*/
public String getRealmClientAccessToken(String realm, String clientId, String clientSecret, List<String> scopes) {
return getClientAccessTokenInternal(clientId, clientSecret, scopes, getAuthServerBaseUrl() + "/realms/" + realm);
}
/**
* Get an access token from the default tenant realm using a password grant with a provided user name.
* Realm name is set to `quarkus` unless it has been configured with the `quarkus.keycloak.devservices.realm-name` property.
* User secret will be the same as the user name.
* Client id will be set to `quarkus-app` unless it has been configured with the `quarkus.oidc.client-id` property.
* Client secret will be to `secret` unless it has been configured with the `quarkus.oidc.credentials.secret` property.
*/
public String getAccessToken(String userName) {
return getAccessToken(userName, getClientId());
}
/**
* Get an access token from the default tenant realm using a password grant with the provided user name and client id.
* Realm name is set to `quarkus` unless it has been configured with the `quarkus.keycloak.devservices.realm-name` property.
* User secret will be the same as the user name.
* Client secret will be to `secret` unless it has been configured with the `quarkus.oidc.credentials.secret` property.
*/
public String getAccessToken(String userName, String clientId) {
return getAccessToken(userName, userName, clientId);
}
/**
* Get an access token from the default tenant realm using a password grant with the provided user name, user secret and
* client id.
* Realm name is set to `quarkus` unless it has been configured with the `quarkus.keycloak.devservices.realm-name` property.
* Client secret will be set to `secret` unless it has been configured with the `quarkus.oidc.credentials.secret` propertys.
*/
public String getAccessToken(String userName, String userSecret, String clientId) {
return getAccessToken(userName, userSecret, clientId, getClientSecret());
}
/**
* Get an access token from the default tenant realm using a password grant with the provided user name, user secret, client
* id and secret.
* Realm name is set to `quarkus` unless it has been configured with the `quarkus.keycloak.devservices.realm-name` property.
*/
public String getAccessToken(String userName, String userSecret, String clientId, String clientSecret) {
return getAccessToken(userName, userSecret, clientId, clientSecret, null);
}
/**
* Get an access token from the default tenant realm using a password grant with the provided user name, user secret, client
* id and secret, and scopes.
*/
public String getAccessToken(String userName, String userSecret, String clientId, String clientSecret,
List<String> scopes) {
return getAccessTokenInternal(userName, userSecret, clientId, clientSecret, scopes, getAuthServerUrl());
}
/**
* Get a refresh token from the default tenant realm using a password grant with a provided user name.
* Realm name is set to `quarkus` unless it has been configured with the `quarkus.keycloak.devservices.realm-name` property.
* User secret will be the same as the user name.
* Client id will be set to `quarkus-app` unless it has been configured with the `quarkus.oidc.client-id` property.
* Client secret will be to `secret` unless it has been configured with the `quarkus.oidc.credentials.secret` property.
*/
public String getRefreshToken(String userName) {
return getRefreshToken(userName, getClientId());
}
/**
* Get a refresh token from the default tenant realm using a password grant with the provided user name and client id.
* Realm name is set to `quarkus` unless it has been configured with the `quarkus.keycloak.devservices.realm-name` property.
* User secret will be the same as the user name.
* Client secret will be to `secret` unless it has been configured with the `quarkus.oidc.credentials.secret` property.
*/
public String getRefreshToken(String userName, String clientId) {
return getRefreshToken(userName, userName, clientId);
}
/**
* Get a refresh token from the default tenant realm using a password grant with the provided user name, user secret and
* client id.
* Realm name is set to `quarkus` unless it has been configured with the `quarkus.keycloak.devservices.realm-name` property.
* Client secret will be set to `secret` unless it has been configured with the `quarkus.oidc.credentials.secret` propertys.
*/
public String getRefreshToken(String userName, String userSecret, String clientId) {
return getRefreshToken(userName, userSecret, clientId, getClientSecret());
}
/**
* Get a refresh token from the default tenant realm using a password grant with the provided user name, user secret, client
* id and secret.
* Realm name is set to `quarkus` unless it has been configured with the `quarkus.keycloak.devservices.realm-name` property.
*/
public String getRefreshToken(String userName, String userSecret, String clientId, String clientSecret) {
return getRefreshToken(userName, userSecret, clientId, clientSecret, null);
}
/**
* Get a refresh token from the default tenant realm using a password grant with the provided user name, user secret, client
* id and secret, and scopes.
*/
public String getRefreshToken(String userName, String userSecret, String clientId, String clientSecret,
List<String> scopes) {
return getRefreshTokenInternal(userName, userSecret, clientId, clientSecret, scopes, getAuthServerUrl());
}
/**
* Get a realm access token using a password grant with a provided user name.
* User secret will be the same as the user name.
* Client id will be set to `quarkus-app` unless it has been configured with the `quarkus.oidc.client-id` property.
* Client secret will be to `secret` unless it has been configured with the `quarkus.oidc.credentials.secret` property.
*/
public String getRealmAccessToken(String realm, String userName) {
return getRealmAccessToken(realm, userName, getClientId());
}
/**
* Get a realm access token using a password grant with the provided user name and client id.
* User secret will be the same as the user name.
* Client secret will be to `secret` unless it has been configured with the `quarkus.oidc.credentials.secret` property.
*/
public String getRealmAccessToken(String realm, String userName, String clientId) {
return getRealmAccessToken(realm, userName, userName, clientId);
}
/**
* Get a realm access token using a password grant with the provided user name, user secret and client id.
* Client secret will be to `secret` unless it has been configured with the `quarkus.oidc.credentials.secret` property.
*/
public String getRealmAccessToken(String realm, String userName, String userSecret, String clientId) {
return getRealmAccessToken(realm, userName, userSecret, clientId, getClientSecret());
}
/**
* Get a realm access token using a password grant with the provided user name, user secret, client id and secret.
* Set the client secret to an empty string or null if it is not required.
*/
public String getRealmAccessToken(String realm, String userName, String userSecret, String clientId, String clientSecret) {
return getRealmAccessToken(realm, userName, userSecret, clientId, clientSecret, null);
}
/**
* Get a realm access token using a password grant with the provided user name, user secret, client id and secret, and
* scopes.
* Set the client secret to an empty string or null if it is not required.
*/
public String getRealmAccessToken(String realm, String userName, String userSecret, String clientId, String clientSecret,
List<String> scopes) {
return getAccessTokenInternal(userName, userSecret, clientId, clientSecret, scopes,
getAuthServerBaseUrl() + "/realms/" + realm);
}
private String getAccessTokenInternal(String userName, String userSecret, String clientId, String clientSecret,
List<String> scopes, String authServerUrl) {
return getAccessTokenResponse(userName, userSecret, clientId, clientSecret, scopes, authServerUrl).getToken();
}
private String getRefreshTokenInternal(String userName, String userSecret, String clientId, String clientSecret,
List<String> scopes, String authServerUrl) {
return getAccessTokenResponse(userName, userSecret, clientId, clientSecret, scopes, authServerUrl).getRefreshToken();
}
private AccessTokenResponse getAccessTokenResponse(String userName, String userSecret, String clientId, String clientSecret,
List<String> scopes, String authServerUrl) {
RequestSpecification requestSpec = getSpec().param("grant_type", "password")
.param("username", userName)
.param("password", userSecret)
.param("client_id", clientId);
if (clientSecret != null && !clientSecret.isBlank()) {
requestSpec = requestSpec.param("client_secret", clientSecret);
}
if (scopes != null && !scopes.isEmpty()) {
requestSpec = requestSpec.param("scope", urlEncode(String.join(" ", scopes)));
}
return requestSpec.when().post(authServerUrl + "/protocol/openid-connect/token")
.as(AccessTokenResponse.class);
}
private String getClientAccessTokenInternal(String clientId, String clientSecret,
List<String> scopes, String authServerUrl) {
RequestSpecification requestSpec = getSpec().param("grant_type", "client_credentials")
.param("client_id", clientId);
if (clientSecret != null && !clientSecret.isBlank()) {
requestSpec = requestSpec.param("client_secret", clientSecret);
}
if (scopes != null && !scopes.isEmpty()) {
requestSpec = requestSpec.param("scope", urlEncode(String.join(" ", scopes)));
}
return requestSpec.when().post(authServerUrl + "/protocol/openid-connect/token")
.as(AccessTokenResponse.class).getToken();
}
private String getClientId() {
return getPropertyValue(CLIENT_ID_PROP, "quarkus-app");
}
private String getClientSecret() {
return getPropertyValue(CLIENT_SECRET_PROP, "secret");
}
/**
* Get an admin access token which can be used to create Keycloak realms and perform other Keycloak administration tasks.
*/
public String getAdminAccessToken() {
return getAccessTokenInternal("admin", "admin", "admin-cli", null, null, getAuthServerBaseUrl() + "/realms/master");
}
/**
* Return URL string pointing to a Keycloak base endpoint.
* For example: 'http://localhost:8081/auth'.
*/
public String getAuthServerBaseUrl() {
try {
var uri = new URI(getAuthServerUrl());
// Keycloak-X does not have the `/auth` path segment by default.
return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(),
(uri.getPath().startsWith("/auth") ? "/auth" : null), null, null)
.toString();
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
/**
* Return URL string pointing to a Keycloak authentication endpoint configured with a 'quarkus.oidc.auth-server'
* property.
* For example: 'http://localhost:8081/auth/realms/quarkus'.
*/
public String getAuthServerUrl() {
if (this.authServerUrl != null) {
return this.authServerUrl;
}
String authServerUrl = getPropertyValue(CLIENT_AUTH_SERVER_URL_PROP, null);
if (authServerUrl == null) {
authServerUrl = getPropertyValue(AUTH_SERVER_URL_PROP, null);
}
if (authServerUrl == null) {
throw new ConfigurationException(
String.format("Unable to obtain the Auth Server URL as neither '%s' or '%s' is set",
CLIENT_AUTH_SERVER_URL_PROP, AUTH_SERVER_URL_PROP));
}
return authServerUrl;
}
/**
* Create a realm.
*/
public void createRealm(RealmRepresentation realm) {
try {
getSpec()
.auth().oauth2(getAdminAccessToken())
.contentType("application/json")
.body(JsonSerialization.writeValueAsBytes(realm))
.when()
.post(getAuthServerBaseUrl() + "/admin/realms").then()
.statusCode(201);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Delete a realm
*/
public void deleteRealm(String realm) {
getSpec()
.auth().oauth2(getAdminAccessToken())
.when()
.delete(getAuthServerBaseUrl() + "/admin/realms/" + realm).then().statusCode(204);
}
/**
* Delete a realm
*/
public void deleteRealm(RealmRepresentation realm) {
deleteRealm(realm.getRealm());
}
public void createRealmFromPath(String path) {
RealmRepresentation representation = readRealmFile(path);
createRealm(representation);
}
public RealmRepresentation readRealmFile(String realmPath) {
try {
return readRealmFile(Path.of(realmPath).toUri().toURL(), realmPath);
} catch (MalformedURLException ex) {
// Will not happen as this method is called only when it is confirmed the file exists
throw new RuntimeException(ex);
}
}
public RealmRepresentation readRealmFile(URL url, String realmPath) {
try {
try (InputStream is = url.openStream()) {
return JsonSerialization.readValue(is, RealmRepresentation.class);
}
} catch (IOException ex) {
throw new RuntimeException("Realm " + realmPath + " resource can not be opened", ex);
}
}
private String getPropertyValue(String prop, String defaultValue) {
return ConfigProvider.getConfig().getOptionalValue(prop, String.class)
.orElseGet(() -> getDevProperty(prop, defaultValue));
}
private String getDevProperty(String prop, String defaultValue) {
String value = testContext == null ? null : testContext.devServicesProperties().get(prop);
return value == null ? defaultValue : value;
}
@Override
public void setIntegrationTestContext(DevServicesContext context) {
this.testContext = context;
}
private static String urlEncode(String value) {
try {
return URLEncoder.encode(value, StandardCharsets.UTF_8.name());
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
private RequestSpecification getSpec() {
RequestSpecification spec = RestAssured.given();
if (tls != null) {
spec = spec.keyStore(tls.keystore(), tls.keystorePassword())
.trustStore(tls.truststore(), tls.truststorePassword());
} else {
spec = spec.relaxedHTTPSValidation();
}
return spec;
}
public record Tls(String keystore, String keystorePassword,
String truststore, String truststorePassword) {
public Tls() {
this("client-keystore.p12", "password", "client-truststore.p12", "password");
}
public Tls(String keystore, String truststore) {
this(keystore, "password", truststore, "password");
}
};
}
|
KeycloakTestClient
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/LuceneEndpointBuilderFactory.java
|
{
"start": 11289,
"end": 12463
}
|
class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final LuceneHeaderNameBuilder INSTANCE = new LuceneHeaderNameBuilder();
/**
* The Lucene Query to performed on the index. The query may include
* wildcards and phrases.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code QUERY}.
*/
public String qUERY() {
return "QUERY";
}
/**
* Set this header to true to include the actual Lucene documentation
* when returning hit information.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code RETURN_LUCENE_DOCS}.
*/
public String returnLuceneDocs() {
return "RETURN_LUCENE_DOCS";
}
}
static LuceneEndpointBuilder endpointBuilder(String componentName, String path) {
|
LuceneHeaderNameBuilder
|
java
|
quarkusio__quarkus
|
extensions/azure-functions-http/deployment/src/main/java/io/quarkus/azure/functions/resteasy/deployment/AzureFunctionsHttpProcessor.java
|
{
"start": 500,
"end": 1295
}
|
class ____ {
private static final Logger log = Logger.getLogger(AzureFunctionsHttpProcessor.class);
@BuildStep
public RequireVirtualHttpBuildItem requestVirtualHttp(LaunchModeBuildItem launchMode) {
return launchMode.getLaunchMode().isProduction() ? RequireVirtualHttpBuildItem.MARKER : null;
}
@BuildStep
public void registerFunction(BuildProducer<AzureFunctionBuildItem> producer) {
Method functionMethod = null;
for (Method method : Function.class.getMethods()) {
if (method.getName().equals("run")) {
functionMethod = method;
break;
}
}
producer.produce(new AzureFunctionBuildItem(Function.QUARKUS_HTTP, Function.class, functionMethod));
}
}
|
AzureFunctionsHttpProcessor
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
|
{
"start": 8464,
"end": 11262
}
|
class ____ extends CommandHandler {
@Override
protected void checkArgs(String[] args) {
if (args.length != 1) {
throw new HadoopIllegalArgumentException(
"usage: " + Command.CONFKEY.getUsage());
}
}
@Override
int doWorkInternal(GetConf tool, String[] args) throws Exception {
this.key = args[0];
return super.doWorkInternal(tool, args);
}
}
private final PrintStream out; // Stream for printing command output
private final PrintStream err; // Stream for printing error
GetConf(Configuration conf) {
this(conf, System.out, System.err);
}
GetConf(Configuration conf, PrintStream out, PrintStream err) {
super(conf);
this.out = out;
this.err = err;
}
void printError(String message) {
err.println(message);
}
void printOut(String message) {
out.println(message);
}
void printMap(Map<String, Map<String, InetSocketAddress>> map) {
StringBuilder buffer = new StringBuilder();
List<ConfiguredNNAddress> cnns = DFSUtil.flattenAddressMap(map);
for (ConfiguredNNAddress cnn : cnns) {
InetSocketAddress address = cnn.getAddress();
if (buffer.length() > 0) {
buffer.append(" ");
}
buffer.append(address.getHostName());
}
printOut(buffer.toString());
}
void printSet(Set<String> journalnodes) {
StringBuilder buffer = new StringBuilder();
for (String journalnode : journalnodes) {
if (buffer.length() > 0) {
buffer.append(" ");
}
buffer.append(journalnode);
}
printOut(buffer.toString());
}
private void printUsage() {
printError(USAGE);
}
/**
* Main method that runs the tool for given arguments.
* @param args arguments
* @return return status of the command
*/
private int doWork(String[] args) {
if (args.length >= 1) {
CommandHandler handler = Command.getHandler(args[0]);
if (handler != null) {
return handler.doWork(this,
Arrays.copyOfRange(args, 1, args.length));
}
}
printUsage();
return -1;
}
@Override
public int run(final String[] args) throws Exception {
try {
return UserGroupInformation.getCurrentUser().doAs(
new PrivilegedExceptionAction<Integer>() {
@Override
public Integer run() throws Exception {
return doWork(args);
}
});
} catch (InterruptedException e) {
throw new IOException(e);
}
}
public static void main(String[] args) throws Exception {
if (DFSUtil.parseHelpArgument(args, USAGE, System.out, true)) {
System.exit(0);
}
int res = ToolRunner.run(new GetConf(new HdfsConfiguration()), args);
System.exit(res);
}
}
|
PrintConfKeyCommandHandler
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConversionServiceDeducerTests.java
|
{
"start": 7494,
"end": 7676
}
|
class ____ implements Converter<String, InputStream> {
@Override
public InputStream convert(String source) {
throw new UnsupportedOperationException();
}
}
}
|
StringConverter
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/NodeAttributesInfo.java
|
{
"start": 1237,
"end": 1677
}
|
class ____ {
@XmlElement(name = "nodeAttributeInfo")
private ArrayList<NodeAttributeInfo> nodeAttributesInfo =
new ArrayList<>();
public NodeAttributesInfo() {
// JAXB needs this
}
public void addNodeAttributeInfo(NodeAttributeInfo attributeInfo) {
this.nodeAttributesInfo.add(attributeInfo);
}
public ArrayList<NodeAttributeInfo> getNodeAttributesInfo() {
return nodeAttributesInfo;
}
}
|
NodeAttributesInfo
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/docs/replaces/BookFactory.java
|
{
"start": 794,
"end": 1008
}
|
class ____ {
@Singleton
Book novel() {
return new Book("A Great Novel");
}
@Singleton
TextBook textBook() {
return new TextBook("Learning 101");
}
}
// end::class[]
|
BookFactory
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/EnumParserTest.java
|
{
"start": 225,
"end": 2461
}
|
class ____ extends TestCase {
public void test_0() throws Exception {
String text = "\"A\"";
DefaultJSONParser parser = new DefaultJSONParser(text);
Type type = parser.parseObject(Type.class);
Assert.assertEquals(Type.A, type);
}
public void test_1() throws Exception {
String text = "0";
DefaultJSONParser parser = new DefaultJSONParser(text);
Type type = parser.parseObject(Type.class);
Assert.assertEquals(Type.A, type);
}
public void test_error() throws Exception {
String text = "\"C\"";
DefaultJSONParser parser = new DefaultJSONParser(text);
assertNull(parser.parseObject(Type.class));
}
public void test_error_1() throws Exception {
Exception error = null;
try {
String text = "4";
DefaultJSONParser parser = new DefaultJSONParser(text);
parser.parseObject(Type.class);
} catch (Exception ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_2() throws Exception {
Exception error = null;
try {
String text = "4";
DefaultJSONParser parser = new DefaultJSONParser(text);
parser.parseObject(TypeA.class);
} catch (Exception ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_3() throws Exception {
Exception error = null;
try {
String text = "4";
DefaultJSONParser parser = new DefaultJSONParser(text);
new EnumDeserializer(Object.class).deserialze(parser, Object.class, null);
} catch (Exception ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_4() throws Exception {
Exception error = null;
try {
String text = "true";
DefaultJSONParser parser = new DefaultJSONParser(text);
new EnumDeserializer(Object.class).deserialze(parser, Object.class, null);
} catch (Exception ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public static
|
EnumParserTest
|
java
|
apache__camel
|
components/camel-thrift/src/test/java/org/apache/camel/component/thrift/generated/Calculator.java
|
{
"start": 185380,
"end": 185656
}
|
class ____ implements org.apache.thrift.scheme.SchemeFactory {
@Override
public echo_resultStandardScheme getScheme() {
return new echo_resultStandardScheme();
}
}
private static
|
echo_resultStandardSchemeFactory
|
java
|
quarkusio__quarkus
|
extensions/funqy/funqy-amazon-lambda/maven-archetype/src/main/resources/archetype-resources/src/main/java/OutputObject.java
|
{
"start": 28,
"end": 250
}
|
class ____ {
private String result;
public String getResult() {
return result;
}
public OutputObject setResult(String result) {
this.result = result;
return this;
}
}
|
OutputObject
|
java
|
apache__camel
|
components/camel-snmp/src/test/java/org/apache/camel/component/snmp/TrapTest.java
|
{
"start": 1762,
"end": 5381
}
|
class ____ extends SnmpTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(TrapTest.class);
@ParameterizedTest
@MethodSource("supportedVersions")
public void testSendReceiveTraps(int version) throws Exception {
PDU trap = createTrap(version);
// Send it
LOG.info("Sending pdu {}", trap);
Endpoint endpoint = context.getEndpoint("direct:snmptrapV" + version);
Exchange exchange = endpoint.createExchange();
exchange.getIn().setBody(trap);
Producer producer = endpoint.createProducer();
producer.process(exchange);
// If all goes right it should come here
MockEndpoint mock = getMockEndpoint("mock:resultV" + version);
mock.expectedMessageCount(1);
// wait a bit
Awaitility.await().atMost(2, TimeUnit.SECONDS)
.untilAsserted(() -> mock.assertIsSatisfied());
Message in = mock.getReceivedExchanges().get(0).getIn();
Assertions.assertTrue(in instanceof SnmpMessage, "Expected received object 'SnmpMessage.class'. Got: " + in.getClass());
String msg = in.getBody(String.class);
String expected = "<oid>1.2.3.4.5</oid><value>some string</value>";
Assertions.assertTrue(msg.contains(expected), "Expected string containing '" + expected + "'. Got: " + msg);
}
private PDU createTrap(int version) {
PDU trap = SnmpHelper.createPDU(version);
OID oid = new OID("1.2.3.4.5");
trap.add(new VariableBinding(SnmpConstants.snmpTrapOID, oid));
trap.add(new VariableBinding(SnmpConstants.sysUpTime, new TimeTicks(5000))); // put your uptime here
trap.add(new VariableBinding(SnmpConstants.sysDescr, new OctetString("System Description")));
if (version == 0) {
((PDUv1) trap).setEnterprise(oid); //?
}
//Add Payload
Variable var = new OctetString("some string");
trap.add(new VariableBinding(oid, var));
return trap;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
//genrate ports for trap consumers/producers
int portV0 = AvailablePortFinder.getNextAvailable();
int portV1 = AvailablePortFinder.getNextAvailable();
int portV3 = AvailablePortFinder.getNextAvailable();
from("direct:snmptrapV0")
.log(LoggingLevel.INFO, "Sending Trap pdu ${body}")
.to("snmp:127.0.0.1:" + portV0 + "?protocol=udp&type=TRAP&snmpVersion=0");
from("snmp:0.0.0.0:" + portV0 + "?protocol=udp&type=TRAP&snmpVersion=0")
.to("mock:resultV0");
from("direct:snmptrapV1")
.log(LoggingLevel.INFO, "Sending Trap pdu ${body}")
.to("snmp:127.0.0.1:" + portV1 + "?protocol=udp&type=TRAP&snmpVersion=1");
from("snmp:0.0.0.0:" + portV1 + "?protocol=udp&type=TRAP&snmpVersion=1")
.to("mock:resultV1");
from("direct:snmptrapV3")
.log(LoggingLevel.INFO, "Sending Trap pdu ${body}")
.to("snmp:127.0.0.1:" + portV3
+ "?securityName=test&securityLevel=1&protocol=udp&type=TRAP&snmpVersion=3");
from("snmp:0.0.0.0:" + portV3 + "?securityName=test&securityLevel=1&protocol=udp&type=TRAP&snmpVersion=3")
.to("mock:resultV3");
}
};
}
}
|
TrapTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/event/AnnotationDrivenEventListenerTests.java
|
{
"start": 28122,
"end": 28268
}
|
class ____ {
@EventListener
public void cannotBeCalled(String s, Integer what) {
}
}
@Component
static
|
InvalidMethodSignatureEventListener
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/AnalyzerTests.java
|
{
"start": 1734,
"end": 7120
}
|
class ____ extends ESTestCase {
private final SqlParser parser = new SqlParser();
private final AnalyzerContext context = new AnalyzerContext(
SqlTestUtils.TEST_CFG,
new SqlFunctionRegistry(),
IndexResolution.valid(new EsIndex("test", loadMapping("mapping-basic.json")))
);
private final Analyzer analyzer = new Analyzer(context, new Verifier(new Metrics()));
private LogicalPlan analyze(String sql) {
return analyzer.analyze(parser.createStatement(sql), false);
}
public void testResolveRecursiveFilterRefs() {
LogicalPlan plan = analyze("SELECT emp_no * 10 emp_no, emp_no + 1 e FROM test WHERE e > 0");
assertThat(plan, instanceOf(Project.class));
Project p = (Project) plan;
assertThat(p.child(), instanceOf(Filter.class));
Filter f = (Filter) p.child();
assertThat(f.condition(), instanceOf(GreaterThan.class));
GreaterThan gt = (GreaterThan) f.condition();
assertThat(gt.left().toString(), is("emp_no + 1"));
}
public void testResolveAlternatingRecursiveFilterRefs() {
// Queries like the following used to cause a StackOverflowError in ResolveFilterRefs.
// see https://github.com/elastic/elasticsearch/issues/81577
LogicalPlan plan = analyze(
"SELECT MAX(emp_no) emp_no, MAX(emp_no) emp_no, AVG(emp_no) e FROM test WHERE e > 0 GROUP BY emp_no HAVING e > 0"
);
assertThat(plan, instanceOf(Having.class));
Having h = (Having) plan;
assertThat(h.condition(), instanceOf(GreaterThan.class));
GreaterThan gt = (GreaterThan) h.condition();
assertThat(gt.left().toString(), startsWith("e{r}"));
assertThat(h.child(), instanceOf(Aggregate.class));
Aggregate a = (Aggregate) h.child();
assertThat(a.child(), instanceOf(Filter.class));
Filter f = (Filter) a.child();
assertThat(f.condition(), instanceOf(GreaterThan.class));
GreaterThan gtF = (GreaterThan) f.condition();
// having the aggregation function in the where clause will cause an error later on
assertThat(gtF.left().toString(), is("AVG(emp_no)"));
}
public void testResolveAlternatingRecursiveFilterRefs_WithCountInSubSelect() {
// Queries like the following used to cause a StackOverflowError in Analyzer.
// see https://github.com/elastic/elasticsearch/issues/81577
// The query itself is not supported (using aggregates in a sub-select) but it shouldn't bring down ES
LogicalPlan plan = analyze(
"SELECT salary, ks AS salary FROM (SELECT COUNT(salary/1000) AS salary, salary/1000 ks FROM test GROUP BY salary/1000) "
+ "WHERE ks > 30 AND salary > 3"
);
// passing the analysis step should succeed
LogicalPlan optimizedPlan = new Optimizer().optimize(plan);
assertThat(optimizedPlan, instanceOf(Project.class));
Project p = (Project) optimizedPlan;
assertThat(p.child(), instanceOf(Filter.class));
Filter f = (Filter) p.child();
assertThat(f.condition(), instanceOf(GreaterThan.class));
GreaterThan gt = (GreaterThan) f.condition();
assertThat(gt.left(), instanceOf(Count.class));
assertThat(gt.right(), instanceOf(Literal.class));
assertThat(f.child(), instanceOf(Aggregate.class));
Aggregate a = (Aggregate) f.child();
assertThat(a.groupings().size(), is(1));
assertThat(a.groupings().get(0), instanceOf(Div.class));
assertThat(a.child(), instanceOf(Filter.class));
Filter af = (Filter) a.child();
assertThat(af.condition(), instanceOf(GreaterThan.class));
gt = (GreaterThan) af.condition();
assertThat(gt.left(), instanceOf(Div.class));
assertThat(gt.right(), instanceOf(Literal.class));
}
public void testResolveAlternatingRecursiveFilterRefs_WithAvgInSubSelect() {
// Queries like the following used to cause a StackOverflowError in Analyzer.
// see https://github.com/elastic/elasticsearch/issues/81577
// The query itself is not supported (using aggregates in a sub-select) but it shouldn't bring down ES
LogicalPlan plan = analyze(
"SELECT salary AS salary, salary AS s FROM (SELECT ROUND(AVG(salary)) AS salary FROM test GROUP BY gender) "
+ "WHERE s > 48000 OR salary > 46000"
);
// passing the analysis step should succeed
LogicalPlan optimizedPlan = new Optimizer().optimize(plan);
assertThat(optimizedPlan, instanceOf(Project.class));
Project p = (Project) optimizedPlan;
assertThat(p.child(), instanceOf(Filter.class));
Filter f = (Filter) p.child();
assertThat(f.condition(), instanceOf(GreaterThan.class));
GreaterThan gt = (GreaterThan) f.condition();
assertThat(gt.left(), instanceOf(Round.class));
assertThat(gt.right(), instanceOf(Literal.class));
assertThat(f.child(), instanceOf(Aggregate.class));
Aggregate a = (Aggregate) f.child();
assertThat(a.groupings().size(), is(1));
assertThat(a.groupings().get(0), instanceOf(FieldAttribute.class));
assertThat(a.child(), instanceOf(EsRelation.class));
// the first filter (s > 48000) is removed by the optimizer
}
}
|
AnalyzerTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformHealthIssueTests.java
|
{
"start": 679,
"end": 2290
}
|
class ____ extends AbstractWireSerializingTestCase<TransformHealthIssue> {
public static TransformHealthIssue randomTransformHealthIssue() {
return new TransformHealthIssue(
randomAlphaOfLengthBetween(10, 200),
randomAlphaOfLengthBetween(10, 200),
randomBoolean() ? randomAlphaOfLengthBetween(10, 200) : null,
randomIntBetween(1, 10),
randomBoolean() ? null : Instant.ofEpochSecond(randomLongBetween(1, 100000), randomLongBetween(-999_999_999, 999_999_999))
);
}
@Override
protected Writeable.Reader<TransformHealthIssue> instanceReader() {
return TransformHealthIssue::new;
}
@Override
protected TransformHealthIssue createTestInstance() {
return randomTransformHealthIssue();
}
@Override
protected TransformHealthIssue mutateInstance(TransformHealthIssue instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
public void testMissingTypePre88() throws IOException {
TransformHealthIssue originalIssue = new TransformHealthIssue("some-type", "some-issue", null, 1, null);
assertThat(originalIssue.getType(), is(equalTo("some-type")));
TransformHealthIssue deserializedIssue = copyInstance(
originalIssue,
getNamedWriteableRegistry(),
StreamOutput::writeWriteable,
TransformHealthIssue::new,
TransportVersions.V_8_7_0
);
assertThat(deserializedIssue.getType(), is(equalTo("unknown")));
}
}
|
TransformHealthIssueTests
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/javadoc/MalformedInlineTagTest.java
|
{
"start": 989,
"end": 1431
}
|
class ____ {
private final BugCheckerRefactoringTestHelper helper =
BugCheckerRefactoringTestHelper.newInstance(MalformedInlineTag.class, getClass());
@Test
public void positive_allInlineTags() {
helper
.addInputLines(
"Test.java",
"""
/**
* Here are a list of malformed tags: @{code code} @{docRoot} @{inheritDoc} @{link Test} @{linkplain
* Test} @{literal literal} @{value Test}
*/
|
MalformedInlineTagTest
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/xml/BeanDefinitionParserDelegate.java
|
{
"start": 3626,
"end": 16192
}
|
class ____ {
public static final String BEANS_NAMESPACE_URI = "http://www.springframework.org/schema/beans";
public static final String MULTI_VALUE_ATTRIBUTE_DELIMITERS = ",; ";
/**
* Value of a T/F attribute that represents true.
* Anything else represents false.
*/
public static final String TRUE_VALUE = "true";
public static final String FALSE_VALUE = "false";
public static final String DEFAULT_VALUE = "default";
public static final String DESCRIPTION_ELEMENT = "description";
public static final String AUTOWIRE_NO_VALUE = "no";
public static final String AUTOWIRE_BY_NAME_VALUE = "byName";
public static final String AUTOWIRE_BY_TYPE_VALUE = "byType";
public static final String AUTOWIRE_CONSTRUCTOR_VALUE = "constructor";
public static final String AUTOWIRE_AUTODETECT_VALUE = "autodetect";
public static final String NAME_ATTRIBUTE = "name";
public static final String BEAN_ELEMENT = "bean";
public static final String META_ELEMENT = "meta";
public static final String ID_ATTRIBUTE = "id";
public static final String PARENT_ATTRIBUTE = "parent";
public static final String CLASS_ATTRIBUTE = "class";
public static final String ABSTRACT_ATTRIBUTE = "abstract";
public static final String SCOPE_ATTRIBUTE = "scope";
private static final String SINGLETON_ATTRIBUTE = "singleton";
public static final String LAZY_INIT_ATTRIBUTE = "lazy-init";
public static final String AUTOWIRE_ATTRIBUTE = "autowire";
public static final String AUTOWIRE_CANDIDATE_ATTRIBUTE = "autowire-candidate";
public static final String PRIMARY_ATTRIBUTE = "primary";
public static final String DEPENDS_ON_ATTRIBUTE = "depends-on";
public static final String INIT_METHOD_ATTRIBUTE = "init-method";
public static final String DESTROY_METHOD_ATTRIBUTE = "destroy-method";
public static final String FACTORY_METHOD_ATTRIBUTE = "factory-method";
public static final String FACTORY_BEAN_ATTRIBUTE = "factory-bean";
public static final String CONSTRUCTOR_ARG_ELEMENT = "constructor-arg";
public static final String INDEX_ATTRIBUTE = "index";
public static final String TYPE_ATTRIBUTE = "type";
public static final String VALUE_TYPE_ATTRIBUTE = "value-type";
public static final String KEY_TYPE_ATTRIBUTE = "key-type";
public static final String PROPERTY_ELEMENT = "property";
public static final String REF_ATTRIBUTE = "ref";
public static final String VALUE_ATTRIBUTE = "value";
public static final String LOOKUP_METHOD_ELEMENT = "lookup-method";
public static final String REPLACED_METHOD_ELEMENT = "replaced-method";
public static final String REPLACER_ATTRIBUTE = "replacer";
public static final String ARG_TYPE_ELEMENT = "arg-type";
public static final String ARG_TYPE_MATCH_ATTRIBUTE = "match";
public static final String REF_ELEMENT = "ref";
public static final String IDREF_ELEMENT = "idref";
public static final String BEAN_REF_ATTRIBUTE = "bean";
public static final String PARENT_REF_ATTRIBUTE = "parent";
public static final String VALUE_ELEMENT = "value";
public static final String NULL_ELEMENT = "null";
public static final String ARRAY_ELEMENT = "array";
public static final String LIST_ELEMENT = "list";
public static final String SET_ELEMENT = "set";
public static final String MAP_ELEMENT = "map";
public static final String ENTRY_ELEMENT = "entry";
public static final String KEY_ELEMENT = "key";
public static final String KEY_ATTRIBUTE = "key";
public static final String KEY_REF_ATTRIBUTE = "key-ref";
public static final String VALUE_REF_ATTRIBUTE = "value-ref";
public static final String PROPS_ELEMENT = "props";
public static final String PROP_ELEMENT = "prop";
public static final String MERGE_ATTRIBUTE = "merge";
public static final String QUALIFIER_ELEMENT = "qualifier";
public static final String QUALIFIER_ATTRIBUTE_ELEMENT = "attribute";
public static final String DEFAULT_LAZY_INIT_ATTRIBUTE = "default-lazy-init";
public static final String DEFAULT_MERGE_ATTRIBUTE = "default-merge";
public static final String DEFAULT_AUTOWIRE_ATTRIBUTE = "default-autowire";
public static final String DEFAULT_AUTOWIRE_CANDIDATES_ATTRIBUTE = "default-autowire-candidates";
public static final String DEFAULT_INIT_METHOD_ATTRIBUTE = "default-init-method";
public static final String DEFAULT_DESTROY_METHOD_ATTRIBUTE = "default-destroy-method";
protected final Log logger = LogFactory.getLog(getClass());
private final XmlReaderContext readerContext;
private final DocumentDefaultsDefinition defaults = new DocumentDefaultsDefinition();
private final ParseState parseState = new ParseState();
/**
* Stores all used bean names so we can enforce uniqueness on a per
* beans-element basis. Duplicate bean ids/names may not exist within the
* same level of beans element nesting, but may be duplicated across levels.
*/
private final Set<String> usedNames = new HashSet<>();
/**
* Create a new BeanDefinitionParserDelegate associated with the supplied
* {@link XmlReaderContext}.
*/
public BeanDefinitionParserDelegate(XmlReaderContext readerContext) {
Assert.notNull(readerContext, "XmlReaderContext must not be null");
this.readerContext = readerContext;
}
/**
* Get the {@link XmlReaderContext} associated with this helper instance.
*/
public final XmlReaderContext getReaderContext() {
return this.readerContext;
}
/**
* Invoke the {@link org.springframework.beans.factory.parsing.SourceExtractor}
* to pull the source metadata from the supplied {@link Element}.
*/
protected @Nullable Object extractSource(Element ele) {
return this.readerContext.extractSource(ele);
}
/**
* Report an error with the given message for the given source element.
*/
protected void error(String message, Node source) {
this.readerContext.error(message, source, this.parseState.snapshot());
}
/**
* Report an error with the given message for the given source element.
*/
protected void error(String message, Element source) {
this.readerContext.error(message, source, this.parseState.snapshot());
}
/**
* Report an error with the given message for the given source element.
*/
protected void error(String message, Element source, Throwable cause) {
this.readerContext.error(message, source, this.parseState.snapshot(), cause);
}
/**
* Initialize the default settings assuming a {@code null} parent delegate.
*/
public void initDefaults(Element root) {
initDefaults(root, null);
}
/**
* Initialize the default lazy-init, autowire, dependency check settings,
* init-method, destroy-method and merge settings. Support nested 'beans'
* element use cases by falling back to the given parent in case the
* defaults are not explicitly set locally.
* @see #populateDefaults(DocumentDefaultsDefinition, DocumentDefaultsDefinition, org.w3c.dom.Element)
* @see #getDefaults()
*/
public void initDefaults(Element root, @Nullable BeanDefinitionParserDelegate parent) {
populateDefaults(this.defaults, (parent != null ? parent.defaults : null), root);
this.readerContext.fireDefaultsRegistered(this.defaults);
}
/**
* Populate the given DocumentDefaultsDefinition instance with the default lazy-init,
* autowire, dependency check settings, init-method, destroy-method and merge settings.
* Support nested 'beans' element use cases by falling back to {@code parentDefaults}
* in case the defaults are not explicitly set locally.
* @param defaults the defaults to populate
* @param parentDefaults the parent BeanDefinitionParserDelegate (if any) defaults to fall back to
* @param root the root element of the current bean definition document (or nested beans element)
*/
protected void populateDefaults(DocumentDefaultsDefinition defaults, @Nullable DocumentDefaultsDefinition parentDefaults, Element root) {
String lazyInit = root.getAttribute(DEFAULT_LAZY_INIT_ATTRIBUTE);
if (isDefaultValue(lazyInit)) {
// Potentially inherited from outer <beans> sections, otherwise falling back to false.
lazyInit = (parentDefaults != null ? parentDefaults.getLazyInit() : FALSE_VALUE);
}
defaults.setLazyInit(lazyInit);
String merge = root.getAttribute(DEFAULT_MERGE_ATTRIBUTE);
if (isDefaultValue(merge)) {
// Potentially inherited from outer <beans> sections, otherwise falling back to false.
merge = (parentDefaults != null ? parentDefaults.getMerge() : FALSE_VALUE);
}
defaults.setMerge(merge);
String autowire = root.getAttribute(DEFAULT_AUTOWIRE_ATTRIBUTE);
if (isDefaultValue(autowire)) {
// Potentially inherited from outer <beans> sections, otherwise falling back to 'no'.
autowire = (parentDefaults != null ? parentDefaults.getAutowire() : AUTOWIRE_NO_VALUE);
}
defaults.setAutowire(autowire);
if (root.hasAttribute(DEFAULT_AUTOWIRE_CANDIDATES_ATTRIBUTE)) {
defaults.setAutowireCandidates(root.getAttribute(DEFAULT_AUTOWIRE_CANDIDATES_ATTRIBUTE));
}
else if (parentDefaults != null) {
defaults.setAutowireCandidates(parentDefaults.getAutowireCandidates());
}
if (root.hasAttribute(DEFAULT_INIT_METHOD_ATTRIBUTE)) {
defaults.setInitMethod(root.getAttribute(DEFAULT_INIT_METHOD_ATTRIBUTE));
}
else if (parentDefaults != null) {
defaults.setInitMethod(parentDefaults.getInitMethod());
}
if (root.hasAttribute(DEFAULT_DESTROY_METHOD_ATTRIBUTE)) {
defaults.setDestroyMethod(root.getAttribute(DEFAULT_DESTROY_METHOD_ATTRIBUTE));
}
else if (parentDefaults != null) {
defaults.setDestroyMethod(parentDefaults.getDestroyMethod());
}
defaults.setSource(this.readerContext.extractSource(root));
}
/**
* Return the defaults definition object.
*/
public DocumentDefaultsDefinition getDefaults() {
return this.defaults;
}
/**
* Return the default settings for bean definitions as indicated within
* the attributes of the top-level {@code <beans/>} element.
*/
public BeanDefinitionDefaults getBeanDefinitionDefaults() {
BeanDefinitionDefaults bdd = new BeanDefinitionDefaults();
bdd.setLazyInit(TRUE_VALUE.equalsIgnoreCase(this.defaults.getLazyInit()));
bdd.setAutowireMode(getAutowireMode(DEFAULT_VALUE));
bdd.setInitMethodName(this.defaults.getInitMethod());
bdd.setDestroyMethodName(this.defaults.getDestroyMethod());
return bdd;
}
/**
* Return any patterns provided in the 'default-autowire-candidates'
* attribute of the top-level {@code <beans/>} element.
*/
public String @Nullable [] getAutowireCandidatePatterns() {
String candidatePattern = this.defaults.getAutowireCandidates();
return (candidatePattern != null ? StringUtils.commaDelimitedListToStringArray(candidatePattern) : null);
}
/**
* Parses the supplied {@code <bean>} element. May return {@code null}
* if there were errors during parse. Errors are reported to the
* {@link org.springframework.beans.factory.parsing.ProblemReporter}.
*/
public @Nullable BeanDefinitionHolder parseBeanDefinitionElement(Element ele) {
return parseBeanDefinitionElement(ele, null);
}
/**
* Parses the supplied {@code <bean>} element. May return {@code null}
* if there were errors during parse. Errors are reported to the
* {@link org.springframework.beans.factory.parsing.ProblemReporter}.
*/
public @Nullable BeanDefinitionHolder parseBeanDefinitionElement(Element ele, @Nullable BeanDefinition containingBean) {
String id = ele.getAttribute(ID_ATTRIBUTE);
String nameAttr = ele.getAttribute(NAME_ATTRIBUTE);
List<String> aliases = new ArrayList<>();
if (StringUtils.hasLength(nameAttr)) {
String[] nameArr = StringUtils.tokenizeToStringArray(nameAttr, MULTI_VALUE_ATTRIBUTE_DELIMITERS);
aliases.addAll(Arrays.asList(nameArr));
}
String beanName = id;
if (!StringUtils.hasText(beanName) && !aliases.isEmpty()) {
beanName = aliases.remove(0);
if (logger.isTraceEnabled()) {
logger.trace("No XML 'id' specified - using '" + beanName +
"' as bean name and " + aliases + " as aliases");
}
}
if (containingBean == null) {
checkNameUniqueness(beanName, aliases, ele);
}
AbstractBeanDefinition beanDefinition = parseBeanDefinitionElement(ele, beanName, containingBean);
if (beanDefinition != null) {
if (!StringUtils.hasText(beanName)) {
try {
if (containingBean != null) {
beanName = BeanDefinitionReaderUtils.generateBeanName(
beanDefinition, this.readerContext.getRegistry(), true);
}
else {
beanName = this.readerContext.generateBeanName(beanDefinition);
// Register an alias for the plain bean
|
BeanDefinitionParserDelegate
|
java
|
apache__avro
|
lang/java/protobuf/src/test/java/org/apache/avro/protobuf/multiplefiles/Foo.java
|
{
"start": 287,
"end": 9986
}
|
class ____ extends com.google.protobuf.GeneratedMessage implements
// @@protoc_insertion_point(message_implements:org.apache.avro.protobuf.multiplefiles.Foo)
FooOrBuilder {
private static final long serialVersionUID = 0L;
static {
com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion(
com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, /* major= */ 4, /* minor= */ 26, /* patch= */ 1,
/* suffix= */ "", Foo.class.getName());
}
// Use Foo.newBuilder() to construct.
private Foo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
}
private Foo() {
string_ = "";
bytes_ = com.google.protobuf.ByteString.EMPTY;
enum_ = 3;
intArray_ = emptyIntList();
fooArray_ = java.util.Collections.emptyList();
syms_ = java.util.Collections.emptyList();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return org.apache.avro.protobuf.multiplefiles.TestMultipleFiles.internal_static_org_apache_avro_protobuf_multiplefiles_Foo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() {
return org.apache.avro.protobuf.multiplefiles.TestMultipleFiles.internal_static_org_apache_avro_protobuf_multiplefiles_Foo_fieldAccessorTable
.ensureFieldAccessorsInitialized(org.apache.avro.protobuf.multiplefiles.Foo.class,
org.apache.avro.protobuf.multiplefiles.Foo.Builder.class);
}
private int bitField0_;
public static final int INT32_FIELD_NUMBER = 1;
private int int32_ = 0;
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*
* @return Whether the int32 field is set.
*/
@java.lang.Override
public boolean hasInt32() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*
* @return The int32.
*/
@java.lang.Override
public int getInt32() {
return int32_;
}
public static final int INT64_FIELD_NUMBER = 2;
private long int64_ = 0L;
/**
* <code>optional int64 int64 = 2;</code>
*
* @return Whether the int64 field is set.
*/
@java.lang.Override
public boolean hasInt64() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <code>optional int64 int64 = 2;</code>
*
* @return The int64.
*/
@java.lang.Override
public long getInt64() {
return int64_;
}
public static final int UINT32_FIELD_NUMBER = 3;
private int uint32_ = 0;
/**
* <code>optional uint32 uint32 = 3;</code>
*
* @return Whether the uint32 field is set.
*/
@java.lang.Override
public boolean hasUint32() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <code>optional uint32 uint32 = 3;</code>
*
* @return The uint32.
*/
@java.lang.Override
public int getUint32() {
return uint32_;
}
public static final int UINT64_FIELD_NUMBER = 4;
private long uint64_ = 0L;
/**
* <code>optional uint64 uint64 = 4;</code>
*
* @return Whether the uint64 field is set.
*/
@java.lang.Override
public boolean hasUint64() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
* <code>optional uint64 uint64 = 4;</code>
*
* @return The uint64.
*/
@java.lang.Override
public long getUint64() {
return uint64_;
}
public static final int SINT32_FIELD_NUMBER = 5;
private int sint32_ = 0;
/**
* <code>optional sint32 sint32 = 5;</code>
*
* @return Whether the sint32 field is set.
*/
@java.lang.Override
public boolean hasSint32() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
* <code>optional sint32 sint32 = 5;</code>
*
* @return The sint32.
*/
@java.lang.Override
public int getSint32() {
return sint32_;
}
public static final int SINT64_FIELD_NUMBER = 6;
private long sint64_ = 0L;
/**
* <code>optional sint64 sint64 = 6;</code>
*
* @return Whether the sint64 field is set.
*/
@java.lang.Override
public boolean hasSint64() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
* <code>optional sint64 sint64 = 6;</code>
*
* @return The sint64.
*/
@java.lang.Override
public long getSint64() {
return sint64_;
}
public static final int FIXED32_FIELD_NUMBER = 7;
private int fixed32_ = 0;
/**
* <code>optional fixed32 fixed32 = 7;</code>
*
* @return Whether the fixed32 field is set.
*/
@java.lang.Override
public boolean hasFixed32() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
* <code>optional fixed32 fixed32 = 7;</code>
*
* @return The fixed32.
*/
@java.lang.Override
public int getFixed32() {
return fixed32_;
}
public static final int FIXED64_FIELD_NUMBER = 8;
private long fixed64_ = 0L;
/**
* <code>optional fixed64 fixed64 = 8;</code>
*
* @return Whether the fixed64 field is set.
*/
@java.lang.Override
public boolean hasFixed64() {
return ((bitField0_ & 0x00000080) != 0);
}
/**
* <code>optional fixed64 fixed64 = 8;</code>
*
* @return The fixed64.
*/
@java.lang.Override
public long getFixed64() {
return fixed64_;
}
public static final int SFIXED32_FIELD_NUMBER = 9;
private int sfixed32_ = 0;
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*
* @return Whether the sfixed32 field is set.
*/
@java.lang.Override
public boolean hasSfixed32() {
return ((bitField0_ & 0x00000100) != 0);
}
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*
* @return The sfixed32.
*/
@java.lang.Override
public int getSfixed32() {
return sfixed32_;
}
public static final int SFIXED64_FIELD_NUMBER = 10;
private long sfixed64_ = 0L;
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*
* @return Whether the sfixed64 field is set.
*/
@java.lang.Override
public boolean hasSfixed64() {
return ((bitField0_ & 0x00000200) != 0);
}
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*
* @return The sfixed64.
*/
@java.lang.Override
public long getSfixed64() {
return sfixed64_;
}
public static final int FLOAT_FIELD_NUMBER = 11;
private float float_ = 0F;
/**
* <code>optional float float = 11;</code>
*
* @return Whether the float field is set.
*/
@java.lang.Override
public boolean hasFloat() {
return ((bitField0_ & 0x00000400) != 0);
}
/**
* <code>optional float float = 11;</code>
*
* @return The float.
*/
@java.lang.Override
public float getFloat() {
return float_;
}
public static final int DOUBLE_FIELD_NUMBER = 12;
private double double_ = 0D;
/**
* <code>optional double double = 12;</code>
*
* @return Whether the double field is set.
*/
@java.lang.Override
public boolean hasDouble() {
return ((bitField0_ & 0x00000800) != 0);
}
/**
* <code>optional double double = 12;</code>
*
* @return The double.
*/
@java.lang.Override
public double getDouble() {
return double_;
}
public static final int BOOL_FIELD_NUMBER = 13;
private boolean bool_ = false;
/**
* <code>optional bool bool = 13;</code>
*
* @return Whether the bool field is set.
*/
@java.lang.Override
public boolean hasBool() {
return ((bitField0_ & 0x00001000) != 0);
}
/**
* <code>optional bool bool = 13;</code>
*
* @return The bool.
*/
@java.lang.Override
public boolean getBool() {
return bool_;
}
public static final int STRING_FIELD_NUMBER = 14;
@SuppressWarnings("serial")
private volatile java.lang.Object string_ = "";
/**
* <code>optional string string = 14;</code>
*
* @return Whether the string field is set.
*/
@java.lang.Override
public boolean hasString() {
return ((bitField0_ & 0x00002000) != 0);
}
/**
* <code>optional string string = 14;</code>
*
* @return The string.
*/
@java.lang.Override
public java.lang.String getString() {
java.lang.Object ref = string_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
string_ = s;
}
return s;
}
}
/**
* <code>optional string string = 14;</code>
*
* @return The bytes for string.
*/
@java.lang.Override
public com.google.protobuf.ByteString getStringBytes() {
java.lang.Object ref = string_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
string_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BYTES_FIELD_NUMBER = 15;
private com.google.protobuf.ByteString bytes_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes bytes = 15;</code>
*
* @return Whether the bytes field is set.
*/
@java.lang.Override
public boolean hasBytes() {
return ((bitField0_ & 0x00004000) != 0);
}
/**
* <code>optional bytes bytes = 15;</code>
*
* @return The bytes.
*/
@java.lang.Override
public com.google.protobuf.ByteString getBytes() {
return bytes_;
}
public static final int ENUM_FIELD_NUMBER = 16;
private int enum_ = 3;
/**
* <code>optional .org.apache.avro.protobuf.multiplefiles.A enum = 16 [default = Z];</code>
*
* @return Whether the
|
Foo
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/LifeCycle.java
|
{
"start": 938,
"end": 1105
}
|
interface ____ handling
* the life cycle context of an object is this one. An object first starts in the {@link State#INITIALIZED} state
* by default to indicate the
|
for
|
java
|
quarkusio__quarkus
|
integration-tests/smallrye-metrics/src/main/java/io/quarkus/it/metrics/MetricsOnClassResource.java
|
{
"start": 193,
"end": 283
}
|
class ____ {
@Path("/method")
public void method() {
}
}
|
MetricsOnClassResource
|
java
|
alibaba__nacos
|
server/src/main/java/com/alibaba/nacos/server/NacosNormalBeanTypeFilter.java
|
{
"start": 910,
"end": 1216
}
|
class ____ extends AbstractNacosWebBeanTypeFilter {
@Override
public boolean match(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory)
throws IOException {
return !super.isWebBean(metadataReader, metadataReaderFactory);
}
}
|
NacosNormalBeanTypeFilter
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/util/ClassUtils.java
|
{
"start": 38548,
"end": 38873
}
|
class ____ case of a
* CGLIB-generated subclass.
* @param instance the instance to check
* @return the user-defined class
*/
public static Class<?> getUserClass(Object instance) {
Assert.notNull(instance, "Instance must not be null");
return getUserClass(instance.getClass());
}
/**
* Return the user-defined
|
in
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/cglib/proxy/DispatcherGenerator.java
|
{
"start": 1050,
"end": 2882
}
|
class ____ implements CallbackGenerator {
public static final DispatcherGenerator INSTANCE =
new DispatcherGenerator(false);
public static final DispatcherGenerator PROXY_REF_INSTANCE =
new DispatcherGenerator(true);
private static final Type DISPATCHER =
TypeUtils.parseType("org.springframework.cglib.proxy.Dispatcher");
private static final Type PROXY_REF_DISPATCHER =
TypeUtils.parseType("org.springframework.cglib.proxy.ProxyRefDispatcher");
private static final Signature LOAD_OBJECT =
TypeUtils.parseSignature("Object loadObject()");
private static final Signature PROXY_REF_LOAD_OBJECT =
TypeUtils.parseSignature("Object loadObject(Object)");
private boolean proxyRef;
private DispatcherGenerator(boolean proxyRef) {
this.proxyRef = proxyRef;
}
@Override
public void generate(ClassEmitter ce, Context context, List methods) {
for (Iterator it = methods.iterator(); it.hasNext();) {
MethodInfo method = (MethodInfo)it.next();
if (!TypeUtils.isProtected(method.getModifiers())) {
CodeEmitter e = context.beginMethod(ce, method);
context.emitCallback(e, context.getIndex(method));
if (proxyRef) {
e.load_this();
e.invoke_interface(PROXY_REF_DISPATCHER, PROXY_REF_LOAD_OBJECT);
} else {
e.invoke_interface(DISPATCHER, LOAD_OBJECT);
}
e.checkcast(method.getClassInfo().getType());
e.load_args();
e.invoke(method);
e.return_value();
e.end_method();
}
}
}
@Override
public void generateStatic(CodeEmitter e, Context context, List methods) { }
}
|
DispatcherGenerator
|
java
|
apache__flink
|
flink-kubernetes/src/test/java/org/apache/flink/kubernetes/artifact/DefaultKubernetesArtifactUploaderTest.java
|
{
"start": 1752,
"end": 9082
}
|
class ____ {
private final DefaultKubernetesArtifactUploader artifactUploader =
new DefaultKubernetesArtifactUploader();
@TempDir private Path tmpDir;
private Configuration config;
private DummyFs dummyFs;
@BeforeEach
void setup() throws IOException {
config = new Configuration();
config.set(KubernetesConfigOptions.LOCAL_UPLOAD_ENABLED, true);
config.set(KubernetesConfigOptions.LOCAL_UPLOAD_TARGET, getTargetDirUri());
dummyFs = (DummyFs) new org.apache.flink.core.fs.Path(getTargetDirUri()).getFileSystem();
dummyFs.resetCallCounters();
}
@Test
void testInvalidJobJar() {
String msg = "The 'pipeline.jars' config must contain one JAR.";
config.set(PipelineOptions.JARS, Collections.emptyList());
assertThatThrownBy(() -> artifactUploader.uploadAll(config))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(msg);
config.set(PipelineOptions.JARS, Arrays.asList("a", "b"));
assertThatThrownBy(() -> artifactUploader.uploadAll(config))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(msg);
}
@Test
void testUploadAllWithOneJobJar() throws Exception {
// flink-kubernetes depends on flink-annotations
// that means flink-annotations jar should be present before test execution
File jar = getFlinkAnnotationsJar();
String localUri = "local://" + jar.getAbsolutePath();
config.set(PipelineOptions.JARS, Collections.singletonList(localUri));
artifactUploader.uploadAll(config);
assertJobJarUri(jar.getName());
}
@Test
void testUploadAllWithAdditionalArtifacts() throws Exception {
// flink-kubernetes depends on flink-annotations
// that means flink-annotations jar should be present before test execution
File jobJar = getFlinkAnnotationsJar();
File addArtifact1 = TestingUtils.getClassFile(DefaultKubernetesArtifactUploader.class);
File addArtifact2 = TestingUtils.getClassFile(KubernetesUtils.class);
String localJobUri = "local://" + jobJar.getAbsolutePath();
String localAddArtUri = "local://" + addArtifact1.getAbsolutePath();
String nonLocalAddArtUri = "dummyfs://" + addArtifact2.getAbsolutePath();
config.set(PipelineOptions.JARS, Collections.singletonList(localJobUri));
config.set(
ArtifactFetchOptions.ARTIFACT_LIST,
Arrays.asList(nonLocalAddArtUri, localAddArtUri));
artifactUploader.uploadAll(config);
assertJobJarUri(jobJar.getName());
List<String> additionalArtifactsResult = config.get(ArtifactFetchOptions.ARTIFACT_LIST);
assertThat(additionalArtifactsResult).hasSize(2);
assertThat(additionalArtifactsResult)
.containsExactlyInAnyOrder(
nonLocalAddArtUri, "dummyfs:" + tmpDir.resolve(addArtifact1.getName()));
}
@Test
void testMissingTargetConf() {
config.removeConfig(KubernetesConfigOptions.LOCAL_UPLOAD_TARGET);
assertThatThrownBy(() -> artifactUploader.upload(config, "local:///tmp/my-artifact.jar"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(
"Setting 'kubernetes.artifacts.local-upload-target' to a valid remote path is required.");
}
@Test
void testRemoteUri() throws Exception {
config.removeConfig(KubernetesConfigOptions.LOCAL_UPLOAD_TARGET);
String remoteUri = "s3://my-bucket/my-artifact.jar";
String finalUri = artifactUploader.upload(config, remoteUri);
assertThat(finalUri).isEqualTo(remoteUri);
}
@Test
void testUpload() throws Exception {
// flink-kubernetes depends on flink-annotations
// that means flink-annotations jar should be present before test execution
File jar = getFlinkAnnotationsJar();
String localUri = "local://" + jar.getAbsolutePath();
String expectedUri = "dummyfs:" + tmpDir.resolve(jar.getName());
String resultUri = artifactUploader.upload(config, localUri);
assertThat(resultUri).isEqualTo(expectedUri);
}
@Test
void testUploadNoOverwrite() throws Exception {
// flink-kubernetes depends on flink-annotations
// that means flink-annotations jar should be present before test execution
File jar = getFlinkAnnotationsJar();
String localUri = "local://" + jar.getAbsolutePath();
Files.createFile(tmpDir.resolve(jar.getName()));
artifactUploader.upload(config, localUri);
assertThat(dummyFs.getExistsCallCounter()).isOne();
assertThat(dummyFs.getCreateCallCounter()).isZero();
}
@Test
void testUploadOverwrite() throws Exception {
// flink-kubernetes depends on flink-annotations
// that means flink-annotations jar should be present before test execution
File jar = getFlinkAnnotationsJar();
String localUri = "local://" + jar.getAbsolutePath();
Files.createFile(tmpDir.resolve(jar.getName()));
config.set(KubernetesConfigOptions.LOCAL_UPLOAD_OVERWRITE, true);
artifactUploader.upload(config, localUri);
assertThat(dummyFs.getExistsCallCounter()).isEqualTo(2);
assertThat(dummyFs.getCreateCallCounter()).isOne();
}
@Test
void testUpdateConfig() {
List<String> artifactList =
Arrays.asList("local:///tmp/artifact1.jar", "s3://my-bucket/artifact2.jar");
Configuration config = new Configuration();
config.set(ArtifactFetchOptions.ARTIFACT_LIST, artifactList);
List<String> uploadedArtifactList = new ArrayList<>(artifactList);
uploadedArtifactList.set(0, getTargetDirUri() + "/artifact1.jar");
artifactUploader.updateConfig(
config, ArtifactFetchOptions.ARTIFACT_LIST, uploadedArtifactList);
assertThat(config.get(ArtifactFetchOptions.ARTIFACT_LIST)).isEqualTo(uploadedArtifactList);
}
@Test
void testNoUpdateConfig() {
List<String> artifactList = Collections.singletonList("s3://my-bucket/my-artifact.jar");
Configuration config = new Configuration();
config.set(ArtifactFetchOptions.ARTIFACT_LIST, artifactList);
artifactUploader.updateConfig(config, ArtifactFetchOptions.ARTIFACT_LIST, artifactList);
assertThat(config.get(ArtifactFetchOptions.ARTIFACT_LIST)).isEqualTo(artifactList);
}
private String getTargetDirUri() {
return "dummyfs://" + tmpDir;
}
private File getFlinkAnnotationsJar() throws IOException {
return TestingUtils.getFileFromTargetDir(
FlinkVersion.class,
p ->
org.apache.flink.util.FileUtils.isJarFile(p)
&& p.toFile().getName().contains("flink-annotations"));
}
private void assertJobJarUri(String filename) {
String expectedUri = "dummyfs:" + tmpDir.resolve(filename);
List<String> result = config.get(PipelineOptions.JARS);
assertThat(result).hasSize(1);
assertThat(result.get(0)).isEqualTo(expectedUri);
}
}
|
DefaultKubernetesArtifactUploaderTest
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_BigDecimal_scale_Test.java
|
{
"start": 991,
"end": 1548
}
|
class ____ {
@Test
void should_run_test_when_assumption_using_file_size_succeeds() {
// WHEN
ThrowingCallable assumptionCode = () -> assumeThat(BigDecimal.ONE).scale().isZero();
// THEN
thenCode(assumptionCode).doesNotThrowAnyException();
}
@Test
void should_ignore_test_when_assumption_using_file_size_fails() {
// WHEN
ThrowingCallable assumptionCode = () -> assumeThat(BigDecimal.ONE).scale().isOne();
// THEN
expectAssumptionNotMetException(assumptionCode);
}
}
|
Assumptions_assumeThat_with_BigDecimal_scale_Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/embeddable/ElementCollectionLazyToOneTest.java
|
{
"start": 2124,
"end": 2555
}
|
class ____ {
@Column
private String content;
@ManyToOne(fetch = FetchType.LAZY)
private TheEntity entity;
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public TheEntity getEntity() {
return entity;
}
public void setEntity(TheEntity entity) {
this.entity = entity;
}
}
@Entity(name = "TheEntity")
public static
|
TheEmbeddable
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.