language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__kafka | connect/api/src/main/java/org/apache/kafka/connect/util/ConnectorUtils.java | {
"start": 1028,
"end": 2688
} | class ____ {
/**
* Given a list of elements and a target number of groups, generates list of groups of
* elements to match the target number of groups, spreading them evenly among the groups.
* This generates groups with contiguous elements, which results in intuitive ordering if
* your elements are also ordered (e.g. alphabetical lists of table names if you sort
* table names alphabetically to generate the raw partitions) or can result in efficient
* partitioning if elements are sorted according to some criteria that affects performance
* (e.g. topic partitions with the same leader).
*
* @param elements list of elements to partition
* @param numGroups the number of output groups to generate.
*/
public static <T> List<List<T>> groupPartitions(List<T> elements, int numGroups) {
if (numGroups <= 0)
throw new IllegalArgumentException("Number of groups must be positive.");
List<List<T>> result = new ArrayList<>(numGroups);
// Each group has either n+1 or n raw partitions
int perGroup = elements.size() / numGroups;
int leftover = elements.size() - (numGroups * perGroup);
int assigned = 0;
for (int group = 0; group < numGroups; group++) {
int numThisGroup = group < leftover ? perGroup + 1 : perGroup;
List<T> groupList = new ArrayList<>(numThisGroup);
for (int i = 0; i < numThisGroup; i++) {
groupList.add(elements.get(assigned));
assigned++;
}
result.add(groupList);
}
return result;
}
}
| ConnectorUtils |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java | {
"start": 96521,
"end": 97128
} | class ____ implements Procedure {
@ProcedureHint(
input = {@DataTypeHint("ARRAY<INT>"), @DataTypeHint(inputGroup = InputGroup.ANY)},
argumentNames = {"myInt", "myAny"},
output = @DataTypeHint("BOOLEAN"),
isVarArgs = true)
public Boolean[] call(Object procedureContext, Object... o) {
return null;
}
}
@ProcedureHint(input = @DataTypeHint("INT"), output = @DataTypeHint("INT"))
@ProcedureHint(input = @DataTypeHint("BIGINT"), output = @DataTypeHint("BIGINT"))
private static | ComplexProcedureHint |
java | google__dagger | javatests/dagger/internal/codegen/ScopingValidationTest.java | {
"start": 6403,
"end": 6724
} | interface ____ {",
" Class<?> value();",
"}");
Source typeFile =
CompilerTests.javaSource(
"test.ScopedType",
"package test;",
"",
"import javax.inject.Inject;",
"",
"@PerTest", // incompatible scope
" | Per |
java | apache__flink | flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/cli/CliChangelogResultView.java | {
"start": 11228,
"end": 11610
} | enum ____ {
QUIT, // leave view
REFRESH, // refresh
UP, // row selection up
DOWN, // row selection down
OPEN, // shows a full row
LEFT, // scroll left if row is large
RIGHT, // scroll right if row is large
INC_REFRESH, // increase refresh rate
DEC_REFRESH, // decrease refresh rate
}
}
| ResultChangelogOperation |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java | {
"start": 3377,
"end": 4315
} | class ____ {
public static final int EXECUTOR_THREAD_COUNT = 64;
private static final org.slf4j.Logger LOG =
LoggerFactory.getLogger(GenericTestUtils.class);
public static final String PREFIX = "file-";
private static final AtomicInteger sequence = new AtomicInteger();
/**
* system property for test data: {@value}
*/
public static final String SYSPROP_TEST_DATA_DIR = "test.build.data";
/**
* Default path for test data: {@value}
*/
public static final String DEFAULT_TEST_DATA_DIR =
"target" + File.separator + "test" + File.separator + "data";
/**
* The default path for using in Hadoop path references: {@value}
*/
public static final String DEFAULT_TEST_DATA_PATH = "target/test/data/";
/**
* Error string used in
* {@link GenericTestUtils#waitFor(Supplier, long, long)}.
*/
public static final String ERROR_MISSING_ARGUMENT =
"Input supplier | GenericTestUtils |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/handler/invocation/AbstractMethodMessageHandler.java | {
"start": 25410,
"end": 25746
} | class ____ implements Comparator<Match> {
private final Comparator<T> comparator;
public MatchComparator(Comparator<T> comparator) {
this.comparator = comparator;
}
@Override
public int compare(Match match1, Match match2) {
return this.comparator.compare(match1.mapping, match2.mapping);
}
}
private | MatchComparator |
java | spring-projects__spring-boot | module/spring-boot-micrometer-tracing-opentelemetry/src/main/java/org/springframework/boot/micrometer/tracing/opentelemetry/autoconfigure/otlp/OtlpHttpSpanExporterBuilderCustomizer.java | {
"start": 808,
"end": 1027
} | interface ____ can be implemented by beans wishing to customize the
* {@link OtlpHttpSpanExporterBuilder} to fine-tune its auto-configuration.
*
* @author Dmytro Nosan
* @since 4.0.0
*/
@FunctionalInterface
public | that |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/WildcardImportTest.java | {
"start": 1852,
"end": 2131
} | class ____ {
public static String MESSAGE = "Hello";
}
""")
.expectUnchanged()
.addInputLines(
"in/test/Test.java",
"""
package test;
import static a.One.*;
public | Two |
java | apache__dubbo | dubbo-plugin/dubbo-qos/src/test/java/org/apache/dubbo/qos/textui/TTableTest.java | {
"start": 975,
"end": 2270
} | class ____ {
@Test
void test1() throws Exception {
TTable table = new TTable(4);
table.addRow(1, "one", "uno", "un");
table.addRow(2, "two", "dos", "deux");
String result = table.rendering();
String expected = "+-+---+---+----+" + System.lineSeparator() + "|1|one|uno|un |"
+ System.lineSeparator() + "+-+---+---+----+"
+ System.lineSeparator() + "|2|two|dos|deux|"
+ System.lineSeparator() + "+-+---+---+----+"
+ System.lineSeparator();
assertThat(result, equalTo(expected));
}
@Test
void test2() throws Exception {
TTable table = new TTable(new TTable.ColumnDefine[] {
new TTable.ColumnDefine(5, true, TTable.Align.LEFT),
new TTable.ColumnDefine(10, false, TTable.Align.MIDDLE),
new TTable.ColumnDefine(10, false, TTable.Align.RIGHT)
});
table.addRow(1, "abcde", "ABCDE");
String result = table.rendering();
String expected = "+-+----------+----------+" + System.lineSeparator() + "|1| abcde | ABCDE|"
+ System.lineSeparator() + "+-+----------+----------+"
+ System.lineSeparator();
assertThat(result, equalTo(expected));
}
}
| TTableTest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/InputOutputFormatContainer.java | {
"start": 7094,
"end": 8761
} | class ____ implements Serializable {
private static final long serialVersionUID = 1L;
private final Map<OperatorID, UserCodeWrapper<? extends InputFormat<?, ?>>> inputFormats;
private final Map<OperatorID, UserCodeWrapper<? extends OutputFormat<?>>> outputFormats;
public FormatUserCodeTable() {
this.inputFormats = new HashMap<>();
this.outputFormats = new HashMap<>();
}
public void addInputFormat(
OperatorID operatorId, UserCodeWrapper<? extends InputFormat<?, ?>> wrapper) {
if (inputFormats.containsKey(checkNotNull(operatorId))) {
throw new IllegalStateException(
"The input format has been set for the operator: " + operatorId);
}
inputFormats.put(operatorId, checkNotNull(wrapper));
}
public void addOutputFormat(
OperatorID operatorId, UserCodeWrapper<? extends OutputFormat<?>> wrapper) {
if (outputFormats.containsKey(checkNotNull(operatorId))) {
throw new IllegalStateException(
"The output format has been set for the operator: " + operatorId);
}
outputFormats.put(operatorId, checkNotNull(wrapper));
}
public Map<OperatorID, UserCodeWrapper<? extends InputFormat<?, ?>>> getInputFormats() {
return Collections.unmodifiableMap(inputFormats);
}
public Map<OperatorID, UserCodeWrapper<? extends OutputFormat<?>>> getOutputFormats() {
return Collections.unmodifiableMap(outputFormats);
}
}
}
| FormatUserCodeTable |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/runtime/watermarkstatus/HeapPriorityQueue.java | {
"start": 8814,
"end": 9056
} | interface ____: When two objects are equal,
* they indicate the same priority, but indicating the same priority does not require that both
* objects are equal.
*
* @param <T> type of the compared objects.
*/
public | is |
java | micronaut-projects__micronaut-core | inject-java-test/src/test/groovy/io/micronaut/inject/visitor/beans/builder/TestBuildMe7.java | {
"start": 610,
"end": 1081
} | class ____ {
private String name;
private int age;
private Builder() {
}
public Builder name(String name) {
this.name = name;
return this;
}
public Builder age(int age) {
this.age = age;
return this;
}
public TestBuildMe7 build() {
return new TestBuildMe7(
name,
age
);
}
}
}
| Builder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cache/QueryCacheAndOneToManyTest.java | {
"start": 5850,
"end": 6060
} | class ____ {
@Id
@GeneratedValue
private Long id;
private String name;
public MyEntity2() {
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
}
}
| MyEntity2 |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/ReturnMissingNullableTest.java | {
"start": 16678,
"end": 17208
} | class ____ {
Object foo(Object o) {
// BUG: Diagnostic contains: @Nullable
return (o != null ? "" : o);
}
}
""")
.doTest();
}
@Test
public void returnXInsideIfNull() {
createCompilationTestHelper()
.addSourceLines(
"com/google/errorprone/bugpatterns/nullness/LiteralNullReturnTest.java",
"""
package com.google.errorprone.bugpatterns.nullness;
abstract | LiteralNullReturnTest |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/xmlembeddable/EmbeddableConfiguredInXmlTest.java | {
"start": 741,
"end": 1085
} | class ____ {
@Test
@TestForIssue(jiraKey = "METAGEN-66")
@WithClasses(value = { Foo.class, BusinessEntity.class }, preCompile = BusinessId.class)
@WithMappingFiles("orm.xml")
@IgnoreCompilationErrors
void testAttributeForEmbeddableConfiguredInXmlExists() {
// need to work with the source file. BusinessEntity_. | EmbeddableConfiguredInXmlTest |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/support/quartz/DruidQuartzConnectionProvider.java | {
"start": 789,
"end": 1094
} | class ____ extends DruidDataSource implements ConnectionProvider {
private static final long serialVersionUID = 1L;
public void initialize() throws SQLException {
this.init();
}
public void shutdown() throws SQLException {
this.close();
}
}
| DruidQuartzConnectionProvider |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/converter/SmartHttpMessageConverter.java | {
"start": 3622,
"end": 4180
} | class ____ be written by this converter.
* <p>This method should perform the same checks as
* {@link HttpMessageConverter#canWrite(Class, MediaType)} with additional ones
* related to the generic type.
* @param targetType the (potentially generic) target type to test for writability
* (can be {@link ResolvableType#NONE} if not specified). The {@linkplain ResolvableType#getSource() type source}
* may be used for retrieving additional information (the related method signature for example) when relevant.
* @param valueClass the source object | can |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/tests/MockitoUtils.java | {
"start": 1028,
"end": 3236
} | class ____ {
/**
* Verify the same invocations have been applied to two mocks. This is generally not
* the preferred way test with mockito and should be avoided if possible.
* @param expected the mock containing expected invocations
* @param actual the mock containing actual invocations
* @param argumentAdapters adapters that can be used to change argument values before they are compared
*/
public static <T> void verifySameInvocations(T expected, T actual, InvocationArgumentsAdapter... argumentAdapters) {
List<Invocation> expectedInvocations =
((InvocationContainerImpl) MockUtil.getMockHandler(expected).getInvocationContainer()).getInvocations();
List<Invocation> actualInvocations =
((InvocationContainerImpl) MockUtil.getMockHandler(actual).getInvocationContainer()).getInvocations();
verifySameInvocations(expectedInvocations, actualInvocations, argumentAdapters);
}
private static void verifySameInvocations(List<Invocation> expectedInvocations, List<Invocation> actualInvocations,
InvocationArgumentsAdapter... argumentAdapters) {
assertThat(expectedInvocations).hasSameSizeAs(actualInvocations);
for (int i = 0; i < expectedInvocations.size(); i++) {
verifySameInvocation(expectedInvocations.get(i), actualInvocations.get(i), argumentAdapters);
}
}
private static void verifySameInvocation(Invocation expectedInvocation, Invocation actualInvocation,
InvocationArgumentsAdapter... argumentAdapters) {
assertThat(expectedInvocation.getMethod()).isEqualTo(actualInvocation.getMethod());
Object[] expectedArguments = getInvocationArguments(expectedInvocation, argumentAdapters);
Object[] actualArguments = getInvocationArguments(actualInvocation, argumentAdapters);
assertThat(expectedArguments).isEqualTo(actualArguments);
}
private static Object[] getInvocationArguments(Invocation invocation, InvocationArgumentsAdapter... argumentAdapters) {
Object[] arguments = invocation.getArguments();
for (InvocationArgumentsAdapter adapter : argumentAdapters) {
arguments = adapter.adaptArguments(arguments);
}
return arguments;
}
/**
* Adapter strategy that can be used to change invocation arguments.
*/
public | MockitoUtils |
java | apache__maven | api/maven-api-core/src/main/java/org/apache/maven/api/services/ArtifactResolverRequest.java | {
"start": 3804,
"end": 5683
} | class ____ extends BaseRequest<Session>
implements ArtifactResolverRequest {
@Nonnull
private final Collection<? extends ArtifactCoordinates> coordinates;
@Nullable
private final List<RemoteRepository> repositories;
DefaultArtifactResolverRequest(
@Nonnull Session session,
@Nullable RequestTrace trace,
@Nonnull Collection<? extends ArtifactCoordinates> coordinates,
@Nonnull List<RemoteRepository> repositories) {
super(session, trace);
this.coordinates = List.copyOf(requireNonNull(coordinates, "coordinates cannot be null"));
this.repositories = validate(repositories);
}
@Nonnull
@Override
public Collection<? extends ArtifactCoordinates> getCoordinates() {
return coordinates;
}
@Nullable
@Override
public List<RemoteRepository> getRepositories() {
return repositories;
}
@Override
public boolean equals(Object o) {
return o instanceof DefaultArtifactResolverRequest that
&& Objects.equals(coordinates, that.coordinates)
&& Objects.equals(repositories, that.repositories);
}
@Override
public int hashCode() {
return Objects.hash(coordinates, repositories);
}
@Override
@Nonnull
public String toString() {
return "ArtifactResolverRequest[" + "coordinates="
+ coordinates + ", repositories="
+ repositories + ']';
}
}
}
}
| DefaultArtifactResolverRequest |
java | elastic__elasticsearch | x-pack/plugin/repositories-metering-api/src/main/java/org/elasticsearch/xpack/repositories/metering/action/TransportClearRepositoriesStatsArchiveAction.java | {
"start": 3312,
"end": 3936
} | class ____ extends AbstractTransportRequest {
private final long maxVersionToClear;
ClearRepositoriesStatsArchiveNodeRequest(long maxVersionToClear) {
this.maxVersionToClear = maxVersionToClear;
}
ClearRepositoriesStatsArchiveNodeRequest(StreamInput in) throws IOException {
super(in);
this.maxVersionToClear = in.readLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeLong(maxVersionToClear);
}
}
}
| ClearRepositoriesStatsArchiveNodeRequest |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/StandardTestClassTests.java | {
"start": 6565,
"end": 6838
} | class ____ {
@Test
void succeedingTest1() {
assertTrue(true);
}
@Test
void succeedingTest2() {
assertTrue(true);
}
@Test
void succeedingTest3() {
assertTrue(true);
}
}
@SuppressWarnings("JUnitMalformedDeclaration")
static | SecondOfTwoTestCases |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/internal/matchers/text/ValuePrinterTest.java | {
"start": 2101,
"end": 2212
} | class ____ {
public String toString() {
return "ToString";
}
}
static | ToString |
java | spring-projects__spring-boot | module/spring-boot-webmvc/src/main/java/org/springframework/boot/webmvc/autoconfigure/actuate/web/CompositeHandlerExceptionResolver.java | {
"start": 1662,
"end": 3226
} | class ____ implements HandlerExceptionResolver {
@Autowired
private ListableBeanFactory beanFactory;
private volatile @Nullable List<HandlerExceptionResolver> resolvers;
@Override
public @Nullable ModelAndView resolveException(HttpServletRequest request, HttpServletResponse response,
@Nullable Object handler, Exception ex) {
for (HandlerExceptionResolver resolver : getResolvers()) {
ModelAndView resolved = resolver.resolveException(request, response, handler, ex);
if (resolved != null) {
return resolved;
}
}
return null;
}
private List<HandlerExceptionResolver> getResolvers() {
List<HandlerExceptionResolver> resolvers = this.resolvers;
if (resolvers == null) {
resolvers = new ArrayList<>();
collectResolverBeans(resolvers, this.beanFactory);
resolvers.remove(this);
AnnotationAwareOrderComparator.sort(resolvers);
if (resolvers.isEmpty()) {
resolvers.add(new DefaultErrorAttributes());
resolvers.add(new DefaultHandlerExceptionResolver());
}
this.resolvers = resolvers;
}
return resolvers;
}
private void collectResolverBeans(List<HandlerExceptionResolver> resolvers, @Nullable BeanFactory beanFactory) {
if (beanFactory instanceof ListableBeanFactory listableBeanFactory) {
resolvers.addAll(listableBeanFactory.getBeansOfType(HandlerExceptionResolver.class).values());
}
if (beanFactory instanceof HierarchicalBeanFactory hierarchicalBeanFactory) {
collectResolverBeans(resolvers, hierarchicalBeanFactory.getParentBeanFactory());
}
}
}
| CompositeHandlerExceptionResolver |
java | spring-projects__spring-boot | module/spring-boot-cache/src/main/java/org/springframework/boot/cache/autoconfigure/Cache2kBuilderCustomizer.java | {
"start": 986,
"end": 1170
} | interface ____ {
/**
* Customize the default cache settings.
* @param builder the builder to customize
*/
void customize(Cache2kBuilder<?, ?> builder);
}
| Cache2kBuilderCustomizer |
java | spring-projects__spring-boot | module/spring-boot-validation/src/main/java/org/springframework/boot/validation/autoconfigure/ValidatorAdapter.java | {
"start": 1962,
"end": 5802
} | class ____ implements SmartValidator, ApplicationContextAware, InitializingBean, DisposableBean {
private final SmartValidator target;
private final boolean existingBean;
ValidatorAdapter(SmartValidator target, boolean existingBean) {
this.target = target;
this.existingBean = existingBean;
}
public final Validator getTarget() {
return this.target;
}
@Override
public boolean supports(Class<?> type) {
return this.target.supports(type);
}
@Override
public void validate(Object target, Errors errors) {
this.target.validate(target, errors);
}
@Override
public void validate(Object target, Errors errors, Object... validationHints) {
this.target.validate(target, errors, validationHints);
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
if (!this.existingBean && this.target instanceof ApplicationContextAware contextAwareTarget) {
contextAwareTarget.setApplicationContext(applicationContext);
}
}
@Override
public void afterPropertiesSet() throws Exception {
if (!this.existingBean && this.target instanceof InitializingBean initializingBean) {
initializingBean.afterPropertiesSet();
}
}
@Override
public void destroy() throws Exception {
if (!this.existingBean && this.target instanceof DisposableBean disposableBean) {
disposableBean.destroy();
}
}
/**
* Return a {@link Validator} that only implements the {@link Validator} interface,
* wrapping it if necessary.
* <p>
* If the specified {@link Validator} is not {@code null}, it is wrapped. If not, a
* {@link jakarta.validation.Validator} is retrieved from the context and wrapped.
* Otherwise, a new default validator is created.
* @param applicationContext the application context
* @param validator an existing validator to use or {@code null}
* @return the validator to use
*/
public static Validator get(ApplicationContext applicationContext, @Nullable Validator validator) {
if (validator != null) {
return wrap(validator, false);
}
return getExistingOrCreate(applicationContext);
}
private static Validator getExistingOrCreate(ApplicationContext applicationContext) {
Validator existing = getExisting(applicationContext);
if (existing != null) {
return wrap(existing, true);
}
return create(applicationContext);
}
private static @Nullable Validator getExisting(ApplicationContext applicationContext) {
try {
jakarta.validation.Validator validatorBean = applicationContext.getBean(jakarta.validation.Validator.class);
if (validatorBean instanceof Validator validator) {
return validator;
}
return new SpringValidatorAdapter(validatorBean);
}
catch (NoSuchBeanDefinitionException ex) {
return null;
}
}
private static Validator create(MessageSource messageSource) {
OptionalValidatorFactoryBean validator = new OptionalValidatorFactoryBean();
try {
MessageInterpolatorFactory factory = new MessageInterpolatorFactory(messageSource);
validator.setMessageInterpolator(factory.getObject());
}
catch (ValidationException ex) {
// Ignore
}
return wrap(validator, false);
}
private static Validator wrap(Validator validator, boolean existingBean) {
if (validator instanceof jakarta.validation.Validator jakartaValidator) {
if (jakartaValidator instanceof SpringValidatorAdapter adapter) {
return new ValidatorAdapter(adapter, existingBean);
}
return new ValidatorAdapter(new SpringValidatorAdapter(jakartaValidator), existingBean);
}
return validator;
}
@Override
@SuppressWarnings("unchecked")
public <T> @Nullable T unwrap(@Nullable Class<T> type) {
Assert.state(type != null, "'type' must not be null");
if (type.isInstance(this.target)) {
return (T) this.target;
}
return this.target.unwrap(type);
}
}
| ValidatorAdapter |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/index/IndexAction.java | {
"start": 1143,
"end": 10078
} | class ____ implements Action {
public static final String TYPE = "index";
@Nullable
final String index;
@Nullable
final String docId;
@Nullable
final DocWriteRequest.OpType opType;
@Nullable
final String executionTimeField;
@Nullable
final TimeValue timeout;
@Nullable
final ZoneId dynamicNameTimeZone;
@Nullable
final RefreshPolicy refreshPolicy;
public IndexAction(
@Nullable String index,
@Nullable String docId,
@Nullable DocWriteRequest.OpType opType,
@Nullable String executionTimeField,
@Nullable TimeValue timeout,
@Nullable ZoneId dynamicNameTimeZone,
@Nullable RefreshPolicy refreshPolicy
) {
this.index = index;
this.docId = docId;
this.opType = opType;
this.executionTimeField = executionTimeField;
this.timeout = timeout;
this.dynamicNameTimeZone = dynamicNameTimeZone;
this.refreshPolicy = refreshPolicy;
}
@Override
public String type() {
return TYPE;
}
public String getIndex() {
return index;
}
public String getDocId() {
return docId;
}
public DocWriteRequest.OpType getOpType() {
return opType;
}
public String getExecutionTimeField() {
return executionTimeField;
}
public ZoneId getDynamicNameTimeZone() {
return dynamicNameTimeZone;
}
public RefreshPolicy getRefreshPolicy() {
return refreshPolicy;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
IndexAction that = (IndexAction) o;
return Objects.equals(index, that.index)
&& Objects.equals(docId, that.docId)
&& Objects.equals(opType, that.opType)
&& Objects.equals(executionTimeField, that.executionTimeField)
&& Objects.equals(timeout, that.timeout)
&& Objects.equals(dynamicNameTimeZone, that.dynamicNameTimeZone)
&& Objects.equals(refreshPolicy, that.refreshPolicy);
}
@Override
public int hashCode() {
return Objects.hash(index, docId, opType, executionTimeField, timeout, dynamicNameTimeZone, refreshPolicy);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (index != null) {
builder.field(Field.INDEX.getPreferredName(), index);
}
if (docId != null) {
builder.field(Field.DOC_ID.getPreferredName(), docId);
}
if (opType != null) {
builder.field(Field.OP_TYPE.getPreferredName(), opType);
}
if (executionTimeField != null) {
builder.field(Field.EXECUTION_TIME_FIELD.getPreferredName(), executionTimeField);
}
if (timeout != null) {
builder.humanReadableField(Field.TIMEOUT.getPreferredName(), Field.TIMEOUT_HUMAN.getPreferredName(), timeout);
}
if (dynamicNameTimeZone != null) {
builder.field(Field.DYNAMIC_NAME_TIMEZONE.getPreferredName(), dynamicNameTimeZone.toString());
}
if (refreshPolicy != null) {
builder.field(Field.REFRESH.getPreferredName(), refreshPolicy.getValue());
}
return builder.endObject();
}
public static IndexAction parse(String watchId, String actionId, XContentParser parser) throws IOException {
String index = null;
String docId = null;
DocWriteRequest.OpType opType = null;
String executionTimeField = null;
TimeValue timeout = null;
ZoneId dynamicNameTimeZone = null;
RefreshPolicy refreshPolicy = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (Field.INDEX.match(currentFieldName, parser.getDeprecationHandler())) {
try {
index = parser.text();
} catch (ElasticsearchParseException pe) {
throw new ElasticsearchParseException(
"could not parse [{}] action [{}/{}]. failed to parse index name value for " + "field [{}]",
pe,
TYPE,
watchId,
actionId,
currentFieldName
);
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if (Field.TIMEOUT.match(currentFieldName, parser.getDeprecationHandler())) {
timeout = timeValueMillis(parser.longValue());
} else {
throw new ElasticsearchParseException(
"could not parse [{}] action [{}/{}]. unexpected number field [{}]",
TYPE,
watchId,
actionId,
currentFieldName
);
}
} else if (token == XContentParser.Token.VALUE_STRING) {
if (Field.DOC_ID.match(currentFieldName, parser.getDeprecationHandler())) {
docId = parser.text();
} else if (Field.OP_TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
try {
opType = DocWriteRequest.OpType.fromString(parser.text());
if (List.of(DocWriteRequest.OpType.CREATE, DocWriteRequest.OpType.INDEX).contains(opType) == false) {
throw new ElasticsearchParseException(
"could not parse [{}] action [{}/{}]. op_type value for field [{}] " + "must be [index] or [create]",
TYPE,
watchId,
actionId,
currentFieldName
);
}
} catch (IllegalArgumentException e) {
throw new ElasticsearchParseException(
"could not parse [{}] action [{}/{}]. failed to parse op_type value for " + "field [{}]",
TYPE,
watchId,
actionId,
currentFieldName
);
}
} else if (Field.EXECUTION_TIME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
executionTimeField = parser.text();
} else if (Field.TIMEOUT_HUMAN.match(currentFieldName, parser.getDeprecationHandler())) {
// Parser for human specified timeouts and 2.x compatibility
timeout = WatcherDateTimeUtils.parseTimeValue(parser, Field.TIMEOUT_HUMAN.toString());
} else if (Field.DYNAMIC_NAME_TIMEZONE.match(currentFieldName, parser.getDeprecationHandler())) {
if (token == XContentParser.Token.VALUE_STRING) {
dynamicNameTimeZone = DateUtils.of(parser.text());
} else {
throw new ElasticsearchParseException(
"could not parse [{}] action for watch [{}]. failed to parse [{}]. must be "
+ "a string value (e.g. 'UTC' or '+01:00').",
TYPE,
watchId,
currentFieldName
);
}
} else if (Field.REFRESH.match(currentFieldName, parser.getDeprecationHandler())) {
refreshPolicy = RefreshPolicy.parse(parser.text());
} else {
throw new ElasticsearchParseException(
"could not parse [{}] action [{}/{}]. unexpected string field [{}]",
TYPE,
watchId,
actionId,
currentFieldName
);
}
} else {
throw new ElasticsearchParseException(
"could not parse [{}] action [{}/{}]. unexpected token [{}]",
TYPE,
watchId,
actionId,
token
);
}
}
return new IndexAction(index, docId, opType, executionTimeField, timeout, dynamicNameTimeZone, refreshPolicy);
}
public static Builder builder(String index) {
return new Builder(index);
}
public static | IndexAction |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/exception/SQLGrammarException.java | {
"start": 515,
"end": 995
} | class ____ extends JDBCException {
/**
* Constructor for JDBCException.
*
* @param root The underlying exception.
*/
public SQLGrammarException(String message, SQLException root) {
super( message, root );
}
/**
* Constructor for JDBCException.
*
* @param message Optional message.
* @param root The underlying exception.
*/
public SQLGrammarException(String message, SQLException root, String sql) {
super( message, root, sql );
}
}
| SQLGrammarException |
java | elastic__elasticsearch | modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java | {
"start": 6919,
"end": 9401
} | class ____ implements Processor.Factory {
private final Map<String, UserAgentParser> userAgentParsers;
public Factory(Map<String, UserAgentParser> userAgentParsers) {
this.userAgentParsers = userAgentParsers;
}
@Override
public UserAgentProcessor create(
Map<String, Processor.Factory> factories,
String processorTag,
String description,
Map<String, Object> config,
ProjectId projectId
) {
String field = readStringProperty(TYPE, processorTag, config, "field");
String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "user_agent");
String regexFilename = readStringProperty(TYPE, processorTag, config, "regex_file", IngestUserAgentPlugin.DEFAULT_PARSER_NAME);
List<String> propertyNames = readOptionalList(TYPE, processorTag, config, "properties");
boolean extractDeviceType = readBooleanProperty(TYPE, processorTag, config, "extract_device_type", false);
boolean ignoreMissing = readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false);
UserAgentParser parser = userAgentParsers.get(regexFilename);
if (parser == null) {
throw newConfigurationException(
TYPE,
processorTag,
"regex_file",
"regex file [" + regexFilename + "] doesn't exist (has to exist at node startup)"
);
}
final Set<Property> properties;
if (propertyNames != null) {
properties = EnumSet.noneOf(Property.class);
for (String fieldName : propertyNames) {
try {
properties.add(Property.parseProperty(fieldName));
} catch (IllegalArgumentException e) {
throw newConfigurationException(TYPE, processorTag, "properties", e.getMessage());
}
}
} else {
properties = EnumSet.allOf(Property.class);
}
return new UserAgentProcessor(
processorTag,
description,
field,
targetField,
parser,
properties,
extractDeviceType,
ignoreMissing
);
}
}
| Factory |
java | apache__camel | components/camel-jetty/src/test/java/org/apache/camel/component/jetty/proxy/MyAppException.java | {
"start": 859,
"end": 1151
} | class ____ extends Exception {
private static final long serialVersionUID = 1L;
private final String name;
public MyAppException(String msg, String name) {
super(msg);
this.name = name;
}
public String getName() {
return name;
}
}
| MyAppException |
java | google__dagger | hilt-compiler/main/java/dagger/hilt/android/processor/internal/androidentrypoint/InjectorEntryPointGenerator.java | {
"start": 1259,
"end": 1621
} | class ____ {
private final XProcessingEnv env;
private final AndroidEntryPointMetadata metadata;
public InjectorEntryPointGenerator(XProcessingEnv env, AndroidEntryPointMetadata metadata) {
this.env = env;
this.metadata = metadata;
}
// @Generated("InjectorEntryPointGenerator")
// @InstallIn({$SCOPES})
// public | InjectorEntryPointGenerator |
java | alibaba__nacos | sys/src/test/java/com/alibaba/nacos/sys/filter/mock/MockNacosPackageExcludeFilter.java | {
"start": 755,
"end": 1152
} | class ____ implements NacosPackageExcludeFilter {
@Override
public String getResponsiblePackagePrefix() {
return "com.alibaba.nacos.sys.filter.mock";
}
@Override
public boolean isExcluded(String className, Set<String> annotationNames) {
return className.equals(MockNacosPackageExcludeFilter.class.getCanonicalName());
}
}
| MockNacosPackageExcludeFilter |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java | {
"start": 3766,
"end": 4701
} | class ____ or "drf" or "fair" or "fifo"
* @return a {@link SchedulingPolicy} instance parsed from given policy
* @throws AllocationConfigurationException for any errors.
*
*/
@SuppressWarnings("unchecked")
public static SchedulingPolicy parse(String policy)
throws AllocationConfigurationException {
@SuppressWarnings("rawtypes")
Class clazz;
String text = StringUtils.toLowerCase(policy);
if (text.equalsIgnoreCase(FairSharePolicy.NAME)) {
clazz = FairSharePolicy.class;
} else if (text.equalsIgnoreCase(FifoPolicy.NAME)) {
clazz = FifoPolicy.class;
} else if (text.equalsIgnoreCase(DominantResourceFairnessPolicy.NAME)) {
clazz = DominantResourceFairnessPolicy.class;
} else {
try {
clazz = Class.forName(policy);
} catch (ClassNotFoundException cnfe) {
throw new AllocationConfigurationException(policy
+ " SchedulingPolicy | name |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRMAppSubmissionData.java | {
"start": 5677,
"end": 11124
} | class ____ {
private List<ResourceRequest> amResourceRequests;
private String name;
private String user;
private Map<ApplicationAccessType, String> acls;
private boolean unmanaged;
private String queue;
private int maxAppAttempts;
private String appType;
private boolean waitForAccepted;
private boolean keepContainers;
private boolean isAppIdProvided;
private ApplicationId applicationId;
private long attemptFailuresValidityInterval;
private LogAggregationContext logAggregationContext;
private boolean cancelTokensWhenComplete;
private Priority priority;
private String amLabel;
private Map<ApplicationTimeoutType, Long> applicationTimeouts;
private ByteBuffer tokensConf;
private Set<String> applicationTags;
private String appNodeLabel;
private Credentials credentials;
private Resource resource;
private Builder() {
}
public static Builder create() {
return new Builder();
}
public static Builder createWithMemory(long memory, MockRM mockRM)
throws IOException {
Resource resource = Records.newRecord(Resource.class);
resource.setMemorySize(memory);
return createWithResource(resource, mockRM);
}
public static Builder createWithResource(Resource resource, MockRM mockRM)
throws IOException {
int maxAppAttempts =
mockRM.getConfig().getInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS);
return MockRMAppSubmissionData.Builder.create()
.withResource(resource)
.withAppName("")
.withUser(UserGroupInformation
.getCurrentUser().getShortUserName())
.withAcls(null)
.withUnmanagedAM(false)
.withQueue(null)
.withMaxAppAttempts(maxAppAttempts)
.withCredentials(null)
.withAppType(null)
.withWaitForAppAcceptedState(true)
.withKeepContainers(false)
.withApplicationId(null)
.withAttemptFailuresValidityInterval(0L)
.withLogAggregationContext(null)
.withCancelTokensWhenComplete(true)
.withAppPriority(Priority.newInstance(0))
.withAmLabel("")
.withApplicationTimeouts(null)
.withTokensConf(null);
}
public Builder withAmResourceRequests(
List<ResourceRequest> amResourceRequests) {
this.amResourceRequests = amResourceRequests;
return this;
}
public Builder withAppName(String name) {
this.name = name;
return this;
}
public Builder withUser(String user) {
this.user = user;
return this;
}
public Builder withAcls(Map<ApplicationAccessType, String> acls) {
this.acls = acls;
return this;
}
public Builder withUnmanagedAM(boolean unmanaged) {
this.unmanaged = unmanaged;
return this;
}
public Builder withQueue(String queue) {
this.queue = queue;
return this;
}
public Builder withMaxAppAttempts(int maxAppAttempts) {
this.maxAppAttempts = maxAppAttempts;
return this;
}
public Builder withAppType(String appType) {
this.appType = appType;
return this;
}
public Builder withWaitForAppAcceptedState(boolean waitForAccepted) {
this.waitForAccepted = waitForAccepted;
return this;
}
public Builder withKeepContainers(boolean keepContainers) {
this.keepContainers = keepContainers;
return this;
}
public Builder withApplicationId(ApplicationId applicationId) {
this.applicationId = applicationId;
this.isAppIdProvided = applicationId != null;
return this;
}
public Builder withAttemptFailuresValidityInterval(
long attemptFailuresValidityInterval) {
this.attemptFailuresValidityInterval = attemptFailuresValidityInterval;
return this;
}
public Builder withLogAggregationContext(
LogAggregationContext logAggregationContext) {
this.logAggregationContext = logAggregationContext;
return this;
}
public Builder withCancelTokensWhenComplete(
boolean cancelTokensWhenComplete) {
this.cancelTokensWhenComplete = cancelTokensWhenComplete;
return this;
}
public Builder withAppPriority(Priority priority) {
this.priority = priority;
return this;
}
public Builder withAmLabel(String amLabel) {
this.amLabel = amLabel;
return this;
}
public Builder withApplicationTimeouts(
Map<ApplicationTimeoutType, Long> applicationTimeouts) {
this.applicationTimeouts = applicationTimeouts;
return this;
}
public Builder withTokensConf(ByteBuffer tokensConf) {
this.tokensConf = tokensConf;
return this;
}
public Builder withApplicationTags(Set<String> applicationTags) {
this.applicationTags = applicationTags;
return this;
}
public Builder withAppNodeLabel(String appNodeLabel) {
this.appNodeLabel = appNodeLabel;
return this;
}
public Builder withCredentials(Credentials cred) {
this.credentials = cred;
return this;
}
public Builder withResource(Resource resource) {
this.resource = resource;
return this;
}
public MockRMAppSubmissionData build() {
return new MockRMAppSubmissionData(this);
}
}
}
| Builder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/exceptionhandling/BaseJpaOrNativeBootstrapFunctionalTestCase.java | {
"start": 1459,
"end": 1576
} | class ____ all functional tests.
*/
@BaseUnitTest
@TestInstance( TestInstance.Lifecycle.PER_METHOD )
public abstract | for |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client-jaxb/runtime/src/main/java/io/quarkus/rest/client/reactive/jaxb/runtime/ClientMessageBodyWriter.java | {
"start": 722,
"end": 2411
} | class ____ implements MessageBodyWriter<Object> {
@Inject
Marshaller marshaller;
@Override
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return true;
}
@Override
public void writeTo(Object o, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws WebApplicationException {
setContentTypeIfNecessary(httpHeaders);
marshal(o, entityStream);
}
private void setContentTypeIfNecessary(MultivaluedMap<String, Object> httpHeaders) {
Object contentType = httpHeaders.getFirst(HttpHeaders.CONTENT_TYPE);
if (isNotXml(contentType)) {
httpHeaders.putSingle(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_XML);
}
}
protected void marshal(Object o, OutputStream outputStream) {
try {
Class<?> clazz = o.getClass();
Object jaxbObject = o;
if (!(o instanceof JAXBElement)) {
XmlRootElement jaxbElement = clazz.getAnnotation(XmlRootElement.class);
if (jaxbElement == null) {
jaxbObject = new JAXBElement(new QName(Introspector.decapitalize(clazz.getSimpleName())), clazz, o);
}
}
marshaller.marshal(jaxbObject, outputStream);
} catch (JAXBException e) {
throw new RuntimeException(e);
}
}
private boolean isNotXml(Object contentType) {
return contentType == null || !contentType.toString().contains("xml");
}
}
| ClientMessageBodyWriter |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_primitive_double.java | {
"start": 529,
"end": 740
} | class ____ {
private double value;
public double getValue() {
return value;
}
public void setValue(double value) {
this.value = value;
}
}
}
| VO |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/cid/idclass/PaymentId.java | {
"start": 188,
"end": 410
} | class ____ {
private OrderId order;
private String accountNumber;
public PaymentId() {
}
public PaymentId(OrderId order, String accountNumber) {
this.order = order;
this.accountNumber = accountNumber;
}
}
| PaymentId |
java | hibernate__hibernate-orm | local-build-plugins/src/main/java/org/hibernate/orm/toolchains/JdkVersionConfig.java | {
"start": 9245,
"end": 9375
} | interface ____ {
JavaLanguageVersion getCompiler();
JavaLanguageVersion getRelease();
boolean isExplicit();
}
}
| JdkVersionCombo |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/util/spring/KeyStoreParametersFactoryBeanTest.java | {
"start": 1348,
"end": 1839
} | class ____ {
@Resource
KeyStoreParameters ksp;
@Resource(name = "&ksp")
KeyStoreParametersFactoryBean kspfb;
@Test
public void testKeyStoreParameters() {
assertEquals("keystore.jks", ksp.getResource());
assertEquals("jks", ksp.getType());
assertEquals("provider", ksp.getProvider());
assertEquals("password", ksp.getPassword());
assertEquals("test", kspfb.getCamelContext().getName());
}
}
| KeyStoreParametersFactoryBeanTest |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/checkpointing/UnalignedCheckpointRescaleITCase.java | {
"start": 35690,
"end": 36072
} | class ____<T> implements SinkFunction<T> {
@Override
public void invoke(T value, Context ctx) throws Exception {
// TODO: maybe similarly to VerifyingSink, we should back pressure only until some point
// but currently it doesn't seem to be needed (test runs quickly enough)
Thread.sleep(1);
}
}
}
| BackPressureInducingSink |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-http12/src/main/java/org/apache/dubbo/remoting/http12/LimitedByteArrayOutputStream.java | {
"start": 996,
"end": 1959
} | class ____ extends ByteArrayOutputStream {
private final int capacity;
public LimitedByteArrayOutputStream(int capacity) {
super();
this.capacity = capacity == 0 ? Integer.MAX_VALUE : capacity;
}
public LimitedByteArrayOutputStream(int size, int capacity) {
super(size);
this.capacity = capacity == 0 ? Integer.MAX_VALUE : capacity;
}
@Override
public void write(int b) {
ensureCapacity(1);
super.write(b);
}
@Override
public void write(byte[] b) throws IOException {
ensureCapacity(b.length);
super.write(b);
}
@Override
public void write(byte[] b, int off, int len) {
ensureCapacity(len);
super.write(b, off, len);
}
private void ensureCapacity(int len) {
if (size() + len > capacity) {
throw new HttpOverPayloadException("Response Entity Too Large");
}
}
}
| LimitedByteArrayOutputStream |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cdi/events/nocdi/InvalidNoCdiSupportTest.java | {
"start": 836,
"end": 1228
} | class ____ {
@Test
public void testIt(DomainModelScope modelScope) {
// because there is no CDI available, building the SF should immediately
// try to build the ManagedBeans which should fail here
try (var sf = modelScope.getDomainModel().buildSessionFactory()) {
Assertions.fail( "Expecting failure" );
}
catch (InstantiationException expected) {
}
}
}
| InvalidNoCdiSupportTest |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/NumberUtils.java | {
"start": 2515,
"end": 4713
} | class ____ not be null");
if (targetClass.isInstance(number)) {
return (T) number;
}
else if (Byte.class == targetClass) {
long value = checkedLongValue(number, targetClass);
if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) {
raiseOverflowException(number, targetClass);
}
return (T) Byte.valueOf(number.byteValue());
}
else if (Short.class == targetClass) {
long value = checkedLongValue(number, targetClass);
if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) {
raiseOverflowException(number, targetClass);
}
return (T) Short.valueOf(number.shortValue());
}
else if (Integer.class == targetClass) {
long value = checkedLongValue(number, targetClass);
if (value < Integer.MIN_VALUE || value > Integer.MAX_VALUE) {
raiseOverflowException(number, targetClass);
}
return (T) Integer.valueOf(number.intValue());
}
else if (Long.class == targetClass) {
long value = checkedLongValue(number, targetClass);
return (T) Long.valueOf(value);
}
else if (BigInteger.class == targetClass) {
if (number instanceof BigDecimal bigDecimal) {
// do not lose precision - use BigDecimal's own conversion
return (T) bigDecimal.toBigInteger();
}
// original value is not a Big* number - use standard long conversion
return (T) BigInteger.valueOf(number.longValue());
}
else if (Float.class == targetClass) {
return (T) Float.valueOf(number.floatValue());
}
else if (Double.class == targetClass) {
return (T) Double.valueOf(number.doubleValue());
}
else if (BigDecimal.class == targetClass) {
// always use BigDecimal(String) here to avoid unpredictability of BigDecimal(double)
// (see BigDecimal javadoc for details)
return (T) new BigDecimal(number.toString());
}
else {
throw new IllegalArgumentException("Could not convert number [" + number + "] of type [" +
number.getClass().getName() + "] to unsupported target class [" + targetClass.getName() + "]");
}
}
/**
* Check for a {@code BigInteger}/{@code BigDecimal} long overflow
* before returning the given number as a long value.
* @param number the number to convert
* @param targetClass the target | must |
java | square__retrofit | retrofit/java-test/src/test/java/retrofit2/RequestFactoryTest.java | {
"start": 36262,
"end": 36611
} | class ____ {
@GET("/foo/bar/") //
Call<ResponseBody> method(@Query("ping") String ping) {
return null;
}
}
Request request = buildRequest(Example.class, new Object[] {null});
assertThat(request.url().toString()).isEqualTo("http://example.com/foo/bar/");
}
@Test
public void queryParamOptional() {
| Example |
java | elastic__elasticsearch | x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/AlreadyProcessedFollowingEngineException.java | {
"start": 740,
"end": 1377
} | class ____ extends VersionConflictEngineException {
private final long seqNo;
private final OptionalLong existingPrimaryTerm;
AlreadyProcessedFollowingEngineException(ShardId shardId, long seqNo, OptionalLong existingPrimaryTerm) {
super(shardId, "operation [{}] was processed before with term [{}]", null, seqNo, existingPrimaryTerm);
this.seqNo = seqNo;
this.existingPrimaryTerm = existingPrimaryTerm;
}
public long getSeqNo() {
return seqNo;
}
public OptionalLong getExistingPrimaryTerm() {
return existingPrimaryTerm;
}
}
| AlreadyProcessedFollowingEngineException |
java | spring-projects__spring-boot | loader/spring-boot-loader/src/test/java/org/springframework/boot/loader/launch/LauncherTests.java | {
"start": 6100,
"end": 6579
} | class ____ extends Launcher {
@Override
protected String getMainClass() throws Exception {
throw new IllegalStateException("Should not be called");
}
@Override
protected Archive getArchive() {
return null;
}
@Override
protected Set<URL> getClassPathUrls() throws Exception {
return Collections.emptySet();
}
@Override
protected void launch(String[] args) throws Exception {
super.launch(args);
}
}
}
}
| JarModeTestLauncher |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/issues/Issue5976.java | {
"start": 543,
"end": 5429
} | class ____ {
@Test
public void test_parse_alter() {
for (DbType dbType : new DbType[]{DbType.mysql}) {
for (String sql : new String[]{
"ALTER TABLE tbl_name\n"
+ " ALTER CHECK symbol ENFORCED;",
// "ALTER TABLE t1 \n"
// + " CHANGE COLUMN c1 c1 BLOB \n"
// + " COMMENT = 'NDB_COLUMN=BLOB_INLINE_SIZE=4096,MAX_BLOB_PART_SIZE';",
"ALTER TABLE tbl_name\n"
+ " DROP CHECK symbol;",
"alter table test.abc comment 'abc' engine MyISAM, rename abc_test",
"ALTER TABLE t2 DROP COLUMN c, DROP COLUMN d;",
"ALTER TABLE t1 ENGINE = InnoDB;",
"ALTER TABLE t1 ROW_FORMAT = COMPRESSED;",
"ALTER TABLE t1 AUTO_INCREMENT = 13;",
"ALTER TABLE t1 CHARACTER SET = utf8mb4;",
"ALTER TABLE t1 COMMENT = 'New table comment';",
"ALTER TABLE t1 COMMENT = \"NDB_TABLE=READ_BACKUP=0,PARTITION_BALANCE=FOR_RA_BY_NODE\";",
"ALTER TABLE t1 CHANGE a b BIGINT NOT NULL;",
"ALTER TABLE t1 CHANGE b b INT NOT NULL;",
"ALTER TABLE t1 CHANGE b a INT NOT NULL;",
"-- swap a and b\n"
+ "ALTER TABLE t1 RENAME COLUMN a TO b,\n"
+ " RENAME COLUMN b TO a;\n"
+ "-- \"rotate\" a, b, c through a cycle\n"
+ "ALTER TABLE t1 RENAME COLUMN a TO b,\n"
+ " RENAME COLUMN b TO c,\n"
+ " RENAME COLUMN c TO a;",
"ALTER TABLE t1 MODIFY col1 BIGINT;",
"ALTER TABLE t1 MODIFY col1 BIGINT UNSIGNED DEFAULT 1 COMMENT 'my column';",
"ALTER TABLE tbl_name DROP FOREIGN KEY fk_symbol;",
"ALTER TABLE tbl_name\n"
+ " ALTER CHECK symbol NOT ENFORCED;",
"ALTER TABLE tbl_name\n"
+ " DROP CONSTRAINT symbol;",
"ALTER TABLE tbl_name\n"
+ " ALTER CONSTRAINT symbol ENFORCED;",
"ALTER TABLE tbl_name\n"
+ " ALTER CONSTRAINT symbol NOT ENFORCED;",
"ALTER TABLE tbl_name CONVERT TO CHARACTER SET charset_name;",
"ALTER TABLE t MODIFY latin1_text_col TEXT CHARACTER SET utf8mb4;\n"
+ "ALTER TABLE t MODIFY latin1_varchar_col VARCHAR(M) CHARACTER SET utf8mb4;",
"ALTER TABLE t1 CHANGE c1 c1 BLOB;\n"
+ "ALTER TABLE t1 CHANGE c1 c1 TEXT CHARACTER SET utf8mb4;",
"ALTER TABLE tbl_name DEFAULT CHARACTER SET charset_name;",
}) {
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
List<SQLStatement> statementList = parser.parseStatementList();
System.out.println(statementList);
//assertEquals(1, statementList.size());
SQLParseAssertUtil.assertParseSql(sql, dbType);
}
}
}
@Test
public void test_parse_rename() {
for (DbType dbType : new DbType[]{DbType.mysql}) {
for (String sql : new String[]{
"alter table test.abc comment 'abc' engine MyISAM, rename abc_test",
"alter table test.abc comment 'abc' "
+ "engine MyISAM, rename abc_test RENAME COLUMN old_col_name TO new_col_name",
"RENAME TABLE old_table TO new_table;",
"ALTER TABLE old_table RENAME new_table;",
"RENAME TABLE old_table1 TO new_table1,\n"
+ " old_table2 TO new_table2,\n"
+ " old_table3 TO new_table3;",
"RENAME TABLE old_table TO tmp_table,\n"
+ " new_table TO old_table,\n"
+ " tmp_table TO new_table;",
"LOCK TABLE old_table1 WRITE;\n"
+ "RENAME TABLE old_table1 TO new_table1,\n"
+ " new_table1 TO new_table2;",
"LOCK TABLE old_table1 READ;\n"
+ "RENAME TABLE old_table1 TO new_table1,\n"
+ " new_table1 TO new_table2;",
"RENAME TABLE current_db.tbl_name TO other_db.tbl_name;",
}) {
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
List<SQLStatement> statementList = parser.parseStatementList();
System.out.println(statementList);
//assertEquals(1, statementList.size());
SQLParseAssertUtil.assertParseSql(sql, dbType);
}
}
}
}
| Issue5976 |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/InputChannelTestUtils.java | {
"start": 10836,
"end": 11442
} | class ____ implements MemorySegmentProvider {
private final int pageSize;
public UnpooledMemorySegmentProvider(int pageSize) {
this.pageSize = pageSize;
}
@Override
public Collection<MemorySegment> requestUnpooledMemorySegments(
int numberOfSegmentsToRequest) {
return Collections.singletonList(
MemorySegmentFactory.allocateUnpooledSegment(pageSize));
}
@Override
public void recycleUnpooledMemorySegments(Collection<MemorySegment> segments) {}
}
}
| UnpooledMemorySegmentProvider |
java | quarkusio__quarkus | extensions/smallrye-graphql-client/deployment/src/test/java/io/quarkus/smallrye/graphql/client/deployment/ssl/TypesafeGraphQLClientReloadKeystoreTest.java | {
"start": 1631,
"end": 3560
} | class ____ {
private static final int PORT = 63805;
private static final SSLTestingTools TOOLS = new SSLTestingTools();
private static final String EXPECTED_RESPONSE = "HelloWorld";
private static HttpServer server;
private static final File temp = new File("target/test-certificates-" + UUID.randomUUID());
private static final String CONFIGURATION = """
# No config - overridden in the test
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.add(new StringAsset(CONFIGURATION), "application.properties")
.addClasses(MyApi.class, SSLTestingTools.class))
.overrideRuntimeConfigKey("loc", temp.getAbsolutePath())
.overrideRuntimeConfigKey("quarkus.smallrye-graphql-client.my-client.tls-configuration-name", "my-tls-client")
.overrideRuntimeConfigKey("quarkus.tls.my-tls-client.key-store.p12.path", temp.getAbsolutePath() + "/tls.p12")
.overrideRuntimeConfigKey("quarkus.tls.my-tls-client.key-store.p12.password", "password")
.overrideRuntimeConfigKey("quarkus.smallrye-graphql-client.my-client.url", "https://127.0.0.1:" + PORT)
.overrideRuntimeConfigKey("quarkus.tls.my-tls-client.trust-all", "true")
.setBeforeAllCustomizer(() -> {
try {
temp.mkdirs();
Files.copy(new File("target/certs/wrong-test-reload-client-keystore.p12").toPath(),
new File(temp, "/tls.p12").toPath());
} catch (Exception e) {
throw new RuntimeException(e);
}
});
@GraphQLClientApi(configKey = "my-client")
@RequestScoped
private | TypesafeGraphQLClientReloadKeystoreTest |
java | apache__kafka | connect/api/src/main/java/org/apache/kafka/connect/rest/ConnectRestExtensionContext.java | {
"start": 1172,
"end": 1234
} | interface ____ provided
* by the Connect framework.
*/
public | is |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/util/DumpModelAsXmlRoutePredicateTest.java | {
"start": 1225,
"end": 3454
} | class ____ extends ContextTestSupport {
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("myCoolBean", new MyBarSingleton());
return jndi;
}
@Test
public void testDumpModelAsXml() throws Exception {
String xml = PluginHelper.getModelToXMLDumper(context).dumpModelAsXml(context, context.getRouteDefinition("myRoute"));
assertNotNull(xml);
log.info(xml);
assertTrue(xml.contains("<simple>${body} > 10</simple>"));
}
@Test
public void testDumpModelAsXmlXPath() throws Exception {
String xml
= PluginHelper.getModelToXMLDumper(context).dumpModelAsXml(context, context.getRouteDefinition("myOtherRoute"));
assertNotNull(xml);
log.info(xml);
assertTrue(xml.contains("<xpath>/foo</xpath>"));
}
@Test
public void testDumpModelAsXmlHeader() throws Exception {
String xml
= PluginHelper.getModelToXMLDumper(context).dumpModelAsXml(context, context.getRouteDefinition("myFooRoute"));
assertNotNull(xml);
log.info(xml);
assertTrue(xml.contains("<header>bar</header>"));
}
@Test
public void testDumpModelAsXmlBean() throws Exception {
String xml
= PluginHelper.getModelToXMLDumper(context).dumpModelAsXml(context, context.getRouteDefinition("myBeanRoute"));
assertNotNull(xml);
log.info(xml);
assertTrue(xml.contains("<method ref=\"myCoolBean\"/>"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").routeId("myRoute").filter(simple("${body} > 10")).to("mock:result");
from("direct:other").routeId("myOtherRoute").filter(xpath("/foo")).to("mock:result");
from("direct:foo").routeId("myFooRoute").filter().header("bar").to("mock:result");
from("direct:bean").routeId("myBeanRoute").filter().method("myCoolBean").to("mock:result");
}
};
}
}
| DumpModelAsXmlRoutePredicateTest |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/ForwardingListeningExecutorServiceTest.java | {
"start": 812,
"end": 1006
} | class ____ extends TestCase {
public void testForwarding() {
ForwardingObjectTester.testForwardingObject(ForwardingListeningExecutorService.class);
}
}
| ForwardingListeningExecutorServiceTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/VariableNameSameAsTypeTest.java | {
"start": 1611,
"end": 2010
} | class ____ {
void f() {
// BUG: Diagnostic contains: Variable named String has the type String
Predicate<String> p = (String) -> String.isEmpty();
}
}
""")
.doTest();
}
@Test
public void positiveInitialized() {
helper
.addSourceLines(
"Test.java",
"""
| Test |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/cluster/ClusterTopologyRefreshOptions.java | {
"start": 1193,
"end": 5509
} | class ____ {
/** Since Lettuce 7.0 all adaptive triggers are enabled by default */
public static final Set<RefreshTrigger> DEFAULT_ADAPTIVE_REFRESH_TRIGGERS = EnumSet.allOf(RefreshTrigger.class);
/** Since Lettuce 7.0 the default adaptive refresh timeout is 5 seconds */
public static final long DEFAULT_ADAPTIVE_REFRESH_TIMEOUT = 5;
@Deprecated
public static final TimeUnit DEFAULT_ADAPTIVE_REFRESH_TIMEOUT_UNIT = TimeUnit.SECONDS;
public static final Duration DEFAULT_ADAPTIVE_REFRESH_TIMEOUT_DURATION = Duration
.ofSeconds(DEFAULT_ADAPTIVE_REFRESH_TIMEOUT);
public static final boolean DEFAULT_CLOSE_STALE_CONNECTIONS = true;
public static final boolean DEFAULT_DYNAMIC_REFRESH_SOURCES = true;
public static final boolean DEFAULT_PERIODIC_REFRESH_ENABLED = false;
public static final long DEFAULT_REFRESH_PERIOD = 60;
@Deprecated
public static final TimeUnit DEFAULT_REFRESH_PERIOD_UNIT = TimeUnit.SECONDS;
public static final Duration DEFAULT_REFRESH_PERIOD_DURATION = Duration.ofSeconds(DEFAULT_REFRESH_PERIOD);
public static final int DEFAULT_REFRESH_TRIGGERS_RECONNECT_ATTEMPTS = 5;
private final Set<RefreshTrigger> adaptiveRefreshTriggers;
private final Duration adaptiveRefreshTimeout;
private final boolean closeStaleConnections;
private final boolean dynamicRefreshSources;
private final boolean periodicRefreshEnabled;
private final Duration refreshPeriod;
private final int refreshTriggersReconnectAttempts;
protected ClusterTopologyRefreshOptions(Builder builder) {
this.adaptiveRefreshTriggers = Collections.unmodifiableSet(new HashSet<>(builder.adaptiveRefreshTriggers));
this.adaptiveRefreshTimeout = builder.adaptiveRefreshTimeout;
this.closeStaleConnections = builder.closeStaleConnections;
this.dynamicRefreshSources = builder.dynamicRefreshSources;
this.periodicRefreshEnabled = builder.periodicRefreshEnabled;
this.refreshPeriod = builder.refreshPeriod;
this.refreshTriggersReconnectAttempts = builder.refreshTriggersReconnectAttempts;
}
protected ClusterTopologyRefreshOptions(ClusterTopologyRefreshOptions original) {
this.adaptiveRefreshTriggers = Collections.unmodifiableSet(new HashSet<>(original.adaptiveRefreshTriggers));
this.adaptiveRefreshTimeout = original.adaptiveRefreshTimeout;
this.closeStaleConnections = original.closeStaleConnections;
this.dynamicRefreshSources = original.dynamicRefreshSources;
this.periodicRefreshEnabled = original.periodicRefreshEnabled;
this.refreshPeriod = original.refreshPeriod;
this.refreshTriggersReconnectAttempts = original.refreshTriggersReconnectAttempts;
}
/**
* Create a copy of {@literal options}.
*
* @param options the original
* @return A new instance of {@link ClusterTopologyRefreshOptions} containing the values of {@literal options}
*/
public static ClusterTopologyRefreshOptions copyOf(ClusterTopologyRefreshOptions options) {
return new ClusterTopologyRefreshOptions(options);
}
/**
* Returns a new {@link ClusterTopologyRefreshOptions.Builder} to construct {@link ClusterTopologyRefreshOptions}.
*
* @return a new {@link ClusterTopologyRefreshOptions.Builder} to construct {@link ClusterTopologyRefreshOptions}.
*/
public static ClusterTopologyRefreshOptions.Builder builder() {
return new ClusterTopologyRefreshOptions.Builder();
}
/**
* Create a new {@link ClusterTopologyRefreshOptions} using default settings.
*
* @return a new instance of default cluster client options.
*/
public static ClusterTopologyRefreshOptions create() {
return builder().build();
}
/**
* Create a new {@link ClusterTopologyRefreshOptions} using default settings with enabled periodic and adaptive refresh.
*
* @return a new instance of default cluster client options.
*/
public static ClusterTopologyRefreshOptions enabled() {
return builder().enablePeriodicRefresh().enableAllAdaptiveRefreshTriggers().build();
}
/**
* Builder for {@link ClusterTopologyRefreshOptions}.
*/
public static | ClusterTopologyRefreshOptions |
java | apache__camel | dsl/camel-endpointdsl/src/test/java/org/apache/camel/builder/endpoint/HttpProxyTest.java | {
"start": 1035,
"end": 3222
} | class ____ extends BaseEndpointDslTest {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testGiven() throws Exception {
Properties props = new Properties();
props.put("prop.proxyHost", "myproxy");
props.put("prop.proxyPort", "3280");
context.getPropertiesComponent().setInitialProperties(props);
context.start();
context.addRoutes(new EndpointRouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start")
.to(https("hello-world")
.proxyHost("{{prop.proxyHost}}")
.proxyPort("{{prop.proxyPort}}"));
}
});
HttpEndpoint he = (HttpEndpoint) context.getEndpoints().stream().filter(e -> e instanceof HttpEndpoint).findFirst()
.orElse(null);
assertEquals("myproxy", he.getProxyHost());
assertEquals(3280, he.getProxyPort());
assertEquals("https://hello-world?proxyHost=myproxy&proxyPort=3280", he.getEndpointUri());
context.stop();
}
@Test
public void testOptional() throws Exception {
Properties props = new Properties();
props.put("prop.proxyHost", "myproxy");
context.getPropertiesComponent().setInitialProperties(props);
context.start();
context.addRoutes(new EndpointRouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start")
.to(https("hello-world")
.proxyHost("{{?prop.proxyHost}}")
.proxyPort("{{?prop.proxyPort}}"));
}
});
HttpEndpoint he = (HttpEndpoint) context.getEndpoints().stream().filter(e -> e instanceof HttpEndpoint).findFirst()
.orElse(null);
assertEquals("myproxy", he.getProxyHost());
assertEquals(0, he.getProxyPort());
assertEquals("https://hello-world?proxyHost=myproxy", he.getEndpointUri());
context.stop();
}
}
| HttpProxyTest |
java | quarkusio__quarkus | extensions/smallrye-graphql-client/runtime/src/main/java/io/quarkus/smallrye/graphql/client/runtime/GraphQLClientSupport.java | {
"start": 176,
"end": 256
} | class ____ {
/**
* Allows the optional usage of short | GraphQLClientSupport |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java | {
"start": 102076,
"end": 102298
} | class ____ {",
" public static String build() {",
" return null;",
" }",
" }",
"",
" @AutoValue.Builder",
" public | StringFactory |
java | qos-ch__slf4j | jcl-over-slf4j/src/main/java/org/apache/commons/logging/impl/SimpleLog.java | {
"start": 3050,
"end": 3412
} | class ____ resource named
* <code>"simplelog.properties"</code>, and includes any matching definitions
* from this resource (if it exists).
*
*
* @author <a href="mailto:sanders@apache.org">Scott Sanders</a>
* @author Rod Waldhoff
* @author Robert Burrell Donkin
*
* @version $Id: SimpleLog.java,v 1.21 2004/06/06 20:47:56 rdonkin Exp $
*/
public | loader |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/id/UUIDGenerator.java | {
"start": 1650,
"end": 2276
} | class ____ implements IdentifierGenerator {
public static final String UUID_GEN_STRATEGY = "uuid_gen_strategy";
public static final String UUID_GEN_STRATEGY_CLASS = "uuid_gen_strategy_class";
private UUIDGenerationStrategy strategy;
private UUIDJavaType.ValueTransformer valueTransformer;
@Override
public void configure(GeneratorCreationContext creationContext, Properties parameters) throws MappingException {
// check first for an explicit strategy instance
strategy = (UUIDGenerationStrategy) parameters.get( UUID_GEN_STRATEGY );
if ( strategy == null ) {
// next check for an explicit strategy | UUIDGenerator |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/convert/support/GenericConversionServiceTests.java | {
"start": 34484,
"end": 34573
} | class ____ extends GenericBaseClass {
}
@SuppressWarnings("rawtypes")
private static | ARaw |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/errorhandler/NewDeadLetterChannelTest.java | {
"start": 1044,
"end": 1824
} | class ____ extends ContextTestSupport {
@Test
public void testNewDeadLetterChannel() throws Exception {
getMockEndpoint("mock:dead").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
DeadLetterChannelBuilder dlc = new DeadLetterChannelBuilder();
dlc.setDeadLetterUri("mock:dead");
errorHandler(dlc);
from("direct:start")
.throwException(new IllegalArgumentException("Forced"));
}
};
}
}
| NewDeadLetterChannelTest |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/jobmanager/TestingExecutionPlanStoreWatcher.java | {
"start": 1046,
"end": 1861
} | class ____ implements ExecutionPlanStoreWatcher {
private ExecutionPlanStore.ExecutionPlanListener executionPlanListener;
@Override
public void start(ExecutionPlanStore.ExecutionPlanListener executionPlanListener) {
this.executionPlanListener = executionPlanListener;
}
@Override
public void stop() {
// noop
}
public void addExecutionPlan(JobID jobID) {
checkNotNull(executionPlanListener, "TestingExecutionPlanStoreWatcher is not started.");
executionPlanListener.onAddedExecutionPlan(jobID);
}
public void removeExecutionPlan(JobID jobID) {
checkNotNull(executionPlanListener, "TestingExecutionPlanStoreWatcher is not started.");
executionPlanListener.onRemovedExecutionPlan(jobID);
}
}
| TestingExecutionPlanStoreWatcher |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/parameters/converters/YearParamConverter.java | {
"start": 283,
"end": 1028
} | class ____ extends TemporalParamConverter<Year> {
// lifted from the JDK as PARSER is private...
private static final DateTimeFormatter PARSER = new DateTimeFormatterBuilder()
.appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
.toFormatter();
// this can be called by generated code
public YearParamConverter() {
super(PARSER);
}
public YearParamConverter(DateTimeFormatter formatter) {
super(formatter);
}
@Override
protected Year convert(String value) {
return Year.parse(value);
}
@Override
protected Year convert(String value, DateTimeFormatter formatter) {
return Year.parse(value, formatter);
}
public static | YearParamConverter |
java | apache__kafka | tools/src/test/java/org/apache/kafka/tools/consumer/ShareGroupMessageFormatterTest.java | {
"start": 2090,
"end": 11851
} | class ____ extends CoordinatorRecordMessageFormatterTest {
private static final ShareGroupMemberMetadataKey SHARE_GROUP_MEMBER_METADATA_KEY = new ShareGroupMemberMetadataKey()
.setGroupId("group-id")
.setMemberId("member-id");
private static final ShareGroupMemberMetadataValue SHARE_GROUP_MEMBER_METADATA_VALUE = new ShareGroupMemberMetadataValue()
.setRackId("rack-a")
.setClientId("client-id")
.setClientHost("1.2.3.4")
.setSubscribedTopicNames(List.of("topic"));
private static final ShareGroupMetadataKey SHARE_GROUP_METADATA_KEY = new ShareGroupMetadataKey()
.setGroupId("group-id");
private static final ShareGroupMetadataValue SHARE_GROUP_METADATA_VALUE = new ShareGroupMetadataValue()
.setEpoch(1)
.setMetadataHash(1);
private static final ShareGroupTargetAssignmentMetadataKey SHARE_GROUP_TARGET_ASSIGNMENT_METADATA_KEY = new ShareGroupTargetAssignmentMetadataKey()
.setGroupId("group-id");
private static final ShareGroupTargetAssignmentMetadataValue SHARE_GROUP_TARGET_ASSIGNMENT_METADATA_VALUE = new ShareGroupTargetAssignmentMetadataValue()
.setAssignmentEpoch(1);
private static final ShareGroupTargetAssignmentMemberKey SHARE_GROUP_TARGET_ASSIGNMENT_MEMBER_KEY = new ShareGroupTargetAssignmentMemberKey()
.setGroupId("group-id")
.setMemberId("member-id");
private static final ShareGroupTargetAssignmentMemberValue SHARE_GROUP_TARGET_ASSIGNMENT_MEMBER_VALUE = new ShareGroupTargetAssignmentMemberValue()
.setTopicPartitions(List.of(new ShareGroupTargetAssignmentMemberValue.TopicPartition()
.setTopicId(Uuid.ONE_UUID)
.setPartitions(List.of(0, 1)))
);
private static final ShareGroupCurrentMemberAssignmentKey SHARE_GROUP_CURRENT_MEMBER_ASSIGNMENT_KEY = new ShareGroupCurrentMemberAssignmentKey()
.setGroupId("group-id")
.setMemberId("member-id");
private static final ShareGroupCurrentMemberAssignmentValue SHARE_GROUP_CURRENT_MEMBER_ASSIGNMENT_VALUE = new ShareGroupCurrentMemberAssignmentValue()
.setMemberEpoch(1)
.setPreviousMemberEpoch(0)
.setState((byte) 0)
.setAssignedPartitions(List.of(new ShareGroupCurrentMemberAssignmentValue.TopicPartitions()
.setTopicId(Uuid.ONE_UUID)
.setPartitions(List.of(0, 1)))
);
private static final ShareGroupStatePartitionMetadataKey SHARE_GROUP_STATE_PARTITION_METADATA_KEY = new ShareGroupStatePartitionMetadataKey()
.setGroupId("group-id");
private static final ShareGroupStatePartitionMetadataValue SHARE_GROUP_STATE_PARTITION_METADATA_VALUE = new ShareGroupStatePartitionMetadataValue()
.setInitializingTopics(List.of(new ShareGroupStatePartitionMetadataValue.TopicPartitionsInfo()
.setTopicId(Uuid.ONE_UUID)
.setTopicName("topic")
.setPartitions(List.of(1)))
)
.setInitializedTopics(List.of(new ShareGroupStatePartitionMetadataValue.TopicPartitionsInfo()
.setTopicId(Uuid.ONE_UUID)
.setTopicName("topic")
.setPartitions(List.of(0)))
)
.setDeletingTopics(List.of(new ShareGroupStatePartitionMetadataValue.TopicInfo()
.setTopicId(Uuid.ONE_UUID)
.setTopicName("topic"))
);
@Override
protected CoordinatorRecordMessageFormatter formatter() {
return new ShareGroupMessageFormatter();
}
@Override
protected Stream<Arguments> parameters() {
return Stream.of(
Arguments.of(
MessageUtil.toVersionPrefixedByteBuffer((short) 10, SHARE_GROUP_MEMBER_METADATA_KEY).array(),
MessageUtil.toVersionPrefixedByteBuffer((short) 0, SHARE_GROUP_MEMBER_METADATA_VALUE).array(),
"""
{"key":{"type":10,"data":{"groupId":"group-id","memberId":"member-id"}},
"value":{"version":0,
"data":{"rackId":"rack-a",
"clientId":"client-id",
"clientHost":"1.2.3.4",
"subscribedTopicNames":["topic"]}}}
"""
),
Arguments.of(
MessageUtil.toVersionPrefixedByteBuffer((short) 10, SHARE_GROUP_MEMBER_METADATA_KEY).array(),
null,
"""
{"key":{"type":10,"data":{"groupId":"group-id","memberId":"member-id"}},"value":null}
"""
),
Arguments.of(
MessageUtil.toVersionPrefixedByteBuffer((short) 11, SHARE_GROUP_METADATA_KEY).array(),
MessageUtil.toVersionPrefixedByteBuffer((short) 0, SHARE_GROUP_METADATA_VALUE).array(),
"""
{"key":{"type":11,"data":{"groupId":"group-id"}},
"value":{"version":0,
"data":{"epoch":1,
"metadataHash":1}}}
"""
),
Arguments.of(
MessageUtil.toVersionPrefixedByteBuffer((short) 11, SHARE_GROUP_METADATA_KEY).array(),
null,
"""
{"key":{"type":11,"data":{"groupId":"group-id"}},"value":null}
"""
),
Arguments.of(
MessageUtil.toVersionPrefixedByteBuffer((short) 12, SHARE_GROUP_TARGET_ASSIGNMENT_METADATA_KEY).array(),
MessageUtil.toVersionPrefixedByteBuffer((short) 0, SHARE_GROUP_TARGET_ASSIGNMENT_METADATA_VALUE).array(),
"""
{"key":{"type":12,"data":{"groupId":"group-id"}},
"value":{"version":0,
"data":{"assignmentEpoch":1}}}
"""
),
Arguments.of(
MessageUtil.toVersionPrefixedByteBuffer((short) 12, SHARE_GROUP_TARGET_ASSIGNMENT_METADATA_KEY).array(),
null,
"""
{"key":{"type":12,"data":{"groupId":"group-id"}},"value":null}
"""
),
Arguments.of(
MessageUtil.toVersionPrefixedByteBuffer((short) 13, SHARE_GROUP_TARGET_ASSIGNMENT_MEMBER_KEY).array(),
MessageUtil.toVersionPrefixedByteBuffer((short) 0, SHARE_GROUP_TARGET_ASSIGNMENT_MEMBER_VALUE).array(),
"""
{"key":{"type":13,"data":{"groupId":"group-id","memberId":"member-id"}},
"value":{"version":0,
"data":{"topicPartitions":[{"topicId":"AAAAAAAAAAAAAAAAAAAAAQ",
"partitions":[0,1]}]}}}
"""
),
Arguments.of(
MessageUtil.toVersionPrefixedByteBuffer((short) 13, SHARE_GROUP_TARGET_ASSIGNMENT_MEMBER_KEY).array(),
null,
"""
{"key":{"type":13,"data":{"groupId":"group-id","memberId":"member-id"}},"value":null}
"""
),
Arguments.of(
MessageUtil.toVersionPrefixedByteBuffer((short) 14, SHARE_GROUP_CURRENT_MEMBER_ASSIGNMENT_KEY).array(),
MessageUtil.toVersionPrefixedByteBuffer((short) 0, SHARE_GROUP_CURRENT_MEMBER_ASSIGNMENT_VALUE).array(),
"""
{"key":{"type":14,"data":{"groupId":"group-id","memberId":"member-id"}},
"value":{"version":0,
"data":{"memberEpoch":1,
"previousMemberEpoch":0,
"state":0,
"assignedPartitions":[{"topicId":"AAAAAAAAAAAAAAAAAAAAAQ",
"partitions":[0,1]}]}}}
"""
),
Arguments.of(
MessageUtil.toVersionPrefixedByteBuffer((short) 14, SHARE_GROUP_CURRENT_MEMBER_ASSIGNMENT_KEY).array(),
null,
"""
{"key":{"type":14,"data":{"groupId":"group-id","memberId":"member-id"}},"value":null}
"""
),
Arguments.of(
MessageUtil.toVersionPrefixedByteBuffer((short) 15, SHARE_GROUP_STATE_PARTITION_METADATA_KEY).array(),
MessageUtil.toVersionPrefixedByteBuffer((short) 0, SHARE_GROUP_STATE_PARTITION_METADATA_VALUE).array(),
"""
{"key":{"type":15,"data":{"groupId":"group-id"}},
"value":{"version":0,
"data":{"initializingTopics":[{"topicId":"AAAAAAAAAAAAAAAAAAAAAQ",
"topicName":"topic",
"partitions":[1]}],
"initializedTopics":[{"topicId":"AAAAAAAAAAAAAAAAAAAAAQ",
"topicName":"topic",
"partitions":[0]}],
"deletingTopics":[{"topicId":"AAAAAAAAAAAAAAAAAAAAAQ",
"topicName":"topic"}]}}}
"""
),
Arguments.of(
MessageUtil.toVersionPrefixedByteBuffer((short) 15, SHARE_GROUP_STATE_PARTITION_METADATA_KEY).array(),
null,
"""
{"key":{"type":15,"data":{"groupId":"group-id"}},"value":null}
"""
)
);
}
}
| ShareGroupMessageFormatterTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/WallDeleteTest.java | {
"start": 825,
"end": 1296
} | class ____ extends TestCase {
private String sql = "DELETE FROM T WHERE F1 = ?";
private WallConfig config = new WallConfig();
protected void setUp() throws Exception {
config.setDeleteAllow(false);
}
public void testMySql() throws Exception {
assertFalse(WallUtils.isValidateMySql(sql, config));
}
public void testORACLE() throws Exception {
assertFalse(WallUtils.isValidateOracle(sql, config));
}
}
| WallDeleteTest |
java | spring-projects__spring-framework | spring-jms/src/test/java/org/springframework/jms/annotation/AbstractJmsAnnotationDrivenTests.java | {
"start": 10021,
"end": 10148
} | class ____ {
@JmsListener(destination = "myQueue")
public void handleIt(String msg) {
}
}
@Component
static | DefaultBean |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveSchedulerClusterITCase.java | {
"start": 2979,
"end": 3139
} | class ____ integration tests for the adaptive scheduler which start a {@link
* org.apache.flink.runtime.minicluster.MiniCluster} per test case.
*/
public | contains |
java | apache__flink | flink-rpc/flink-rpc-core/src/main/java/org/apache/flink/runtime/rpc/RpcServiceUtils.java | {
"start": 1007,
"end": 2110
} | class ____ {
private static final AtomicLong nextNameOffset = new AtomicLong(0L);
/**
* Creates a random name of the form prefix_X, where X is an increasing number.
*
* @param prefix Prefix string to prepend to the monotonically increasing name offset number
* @return A random name of the form prefix_X where X is an increasing number
*/
public static String createRandomName(String prefix) {
Preconditions.checkNotNull(prefix, "Prefix must not be null.");
long nameOffset;
// obtain the next name offset by incrementing it atomically
do {
nameOffset = nextNameOffset.get();
} while (!nextNameOffset.compareAndSet(nameOffset, nameOffset + 1L));
return prefix + '_' + nameOffset;
}
/**
* Creates a wildcard name symmetric to {@link #createRandomName(String)}.
*
* @param prefix prefix of the wildcard name
* @return wildcard name starting with the prefix
*/
public static String createWildcardName(String prefix) {
return prefix + "_*";
}
}
| RpcServiceUtils |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/UnionResultSubpartitionView.java | {
"start": 9099,
"end": 11141
} | class ____.
throw new UnsupportedOperationException("Method should never be called.");
}
@Override
public Throwable getFailureCause() {
Throwable cause = null;
synchronized (lock) {
for (ResultSubpartitionView view : allViews.values()) {
if (view.getFailureCause() != null) {
cause = view.getFailureCause();
LOG.error(cause.toString());
}
}
}
return cause;
}
@Override
public AvailabilityWithBacklog getAvailabilityAndBacklog(boolean isCreditAvailable) {
synchronized (lock) {
try {
cacheBuffer();
} catch (IOException e) {
throw new RuntimeException(e);
}
if (cachedBuffers.isEmpty()) {
return new AvailabilityWithBacklog(false, 0);
}
return new AvailabilityWithBacklog(
isCreditAvailable || cachedBuffers.peek().f0.buffer().getDataType().isEvent(),
(int)
cachedBuffers.stream()
.filter(x -> x.f0.buffer().getDataType().isBuffer())
.count());
}
}
@Override
public void notifyRequiredSegmentId(int subpartitionId, int segmentId) {
synchronized (lock) {
allViews.get(subpartitionId).notifyRequiredSegmentId(subpartitionId, segmentId);
}
}
@Override
public int unsynchronizedGetNumberOfQueuedBuffers() {
return cachedBuffers.size();
}
@Override
public int getNumberOfQueuedBuffers() {
synchronized (lock) {
return cachedBuffers.size();
}
}
@Override
public void notifyNewBufferSize(int newBufferSize) {
synchronized (lock) {
for (ResultSubpartitionView view : allViews.values()) {
view.notifyNewBufferSize(newBufferSize);
}
}
}
}
| yet |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/support/DefaultLifecycleProcessor.java | {
"start": 3584,
"end": 20817
} | class ____ implements LifecycleProcessor, BeanFactoryAware {
/**
* Property name for a common context checkpoint: {@value}.
* @since 6.1
* @see #ON_REFRESH_VALUE
* @see org.crac.Core#checkpointRestore()
*/
public static final String CHECKPOINT_PROPERTY_NAME = "spring.context.checkpoint";
/**
* Property name for terminating the JVM when the context reaches a specific phase: {@value}.
* @since 6.1
* @see #ON_REFRESH_VALUE
*/
public static final String EXIT_PROPERTY_NAME = "spring.context.exit";
/**
* Recognized value for the context checkpoint and exit properties: {@value}.
* @since 6.1
* @see #CHECKPOINT_PROPERTY_NAME
* @see #EXIT_PROPERTY_NAME
*/
public static final String ON_REFRESH_VALUE = "onRefresh";
private static boolean checkpointOnRefresh =
ON_REFRESH_VALUE.equalsIgnoreCase(SpringProperties.getProperty(CHECKPOINT_PROPERTY_NAME));
private static final boolean exitOnRefresh =
ON_REFRESH_VALUE.equalsIgnoreCase(SpringProperties.getProperty(EXIT_PROPERTY_NAME));
private final Log logger = LogFactory.getLog(getClass());
private final Map<Integer, Long> concurrentStartupForPhases = new ConcurrentHashMap<>();
private final Map<Integer, Long> timeoutsForShutdownPhases = new ConcurrentHashMap<>();
private volatile long timeoutPerShutdownPhase = 10000;
private volatile boolean running;
private volatile @Nullable ConfigurableListableBeanFactory beanFactory;
private volatile @Nullable Set<String> stoppedBeans;
// Just for keeping a strong reference to the registered CRaC Resource, if any
private @Nullable Object cracResource;
public DefaultLifecycleProcessor() {
if (!NativeDetector.inNativeImage() && ClassUtils.isPresent("org.crac.Core", getClass().getClassLoader())) {
this.cracResource = new CracDelegate().registerResource();
}
else if (checkpointOnRefresh) {
throw new IllegalStateException(
"Checkpoint on refresh requires a CRaC-enabled JVM and 'org.crac:crac' on the classpath");
}
}
/**
* Switch to concurrent startup for each given phase (group of {@link SmartLifecycle}
* beans with the same 'phase' value) with corresponding timeouts.
* <p><b>Note: By default, the startup for every phase will be sequential without
* a timeout. Calling this setter with timeouts for the given phases switches to a
* mode where the beans in these phases will be started concurrently, cancelling
* the startup if the corresponding timeout is not met for any of these phases.</b>
* <p>For an actual concurrent startup, a bootstrap {@code Executor} needs to be
* set for the application context, typically through a "bootstrapExecutor" bean.
* @param phasesWithTimeouts a map of phase values (matching
* {@link SmartLifecycle#getPhase()}) and corresponding timeout values
* (in milliseconds)
* @since 6.2.6
* @see SmartLifecycle#getPhase()
* @see org.springframework.beans.factory.config.ConfigurableBeanFactory#getBootstrapExecutor()
*/
public void setConcurrentStartupForPhases(Map<Integer, Long> phasesWithTimeouts) {
this.concurrentStartupForPhases.putAll(phasesWithTimeouts);
}
/**
* Switch to concurrent startup for a specific phase (group of {@link SmartLifecycle}
* beans with the same 'phase' value) with a corresponding timeout.
* <p><b>Note: By default, the startup for every phase will be sequential without
* a timeout. Calling this setter with a timeout for the given phase switches to a
* mode where the beans in this phase will be started concurrently, cancelling
* the startup if the corresponding timeout is not met for this phase.</b>
* <p>For an actual concurrent startup, a bootstrap {@code Executor} needs to be
* set for the application context, typically through a "bootstrapExecutor" bean.
* @param phase the phase value (matching {@link SmartLifecycle#getPhase()})
* @param timeout the corresponding timeout value (in milliseconds)
* @since 6.2.6
* @see SmartLifecycle#getPhase()
* @see org.springframework.beans.factory.config.ConfigurableBeanFactory#getBootstrapExecutor()
*/
public void setConcurrentStartupForPhase(int phase, long timeout) {
this.concurrentStartupForPhases.put(phase, timeout);
}
/**
* Specify the maximum time allotted for the shutdown of each given phase
* (group of {@link SmartLifecycle} beans with the same 'phase' value).
* <p>In case of no specific timeout configured, the default timeout per
* shutdown phase will apply: 10000 milliseconds (10 seconds) as of 6.2.
* @param phasesWithTimeouts a map of phase values (matching
* {@link SmartLifecycle#getPhase()}) and corresponding timeout values
* (in milliseconds)
* @since 6.2
* @see SmartLifecycle#getPhase()
* @see #setTimeoutPerShutdownPhase
*/
public void setTimeoutsForShutdownPhases(Map<Integer, Long> phasesWithTimeouts) {
this.timeoutsForShutdownPhases.putAll(phasesWithTimeouts);
}
/**
* Specify the maximum time allotted for the shutdown of a specific phase
* (group of {@link SmartLifecycle} beans with the same 'phase' value).
* <p>In case of no specific timeout configured, the default timeout per
* shutdown phase will apply: 10000 milliseconds (10 seconds) as of 6.2.
* @param phase the phase value (matching {@link SmartLifecycle#getPhase()})
* @param timeout the corresponding timeout value (in milliseconds)
* @since 6.2
* @see SmartLifecycle#getPhase()
* @see #setTimeoutPerShutdownPhase
*/
public void setTimeoutForShutdownPhase(int phase, long timeout) {
this.timeoutsForShutdownPhases.put(phase, timeout);
}
/**
* Specify the maximum time allotted in milliseconds for the shutdown of any
* phase (group of {@link SmartLifecycle} beans with the same 'phase' value).
* <p>The default value is 10000 milliseconds (10 seconds) as of 6.2.
* @see SmartLifecycle#getPhase()
*/
public void setTimeoutPerShutdownPhase(long timeoutPerShutdownPhase) {
this.timeoutPerShutdownPhase = timeoutPerShutdownPhase;
}
@Override
public void setBeanFactory(BeanFactory beanFactory) {
if (!(beanFactory instanceof ConfigurableListableBeanFactory clbf)) {
throw new IllegalArgumentException(
"DefaultLifecycleProcessor requires a ConfigurableListableBeanFactory: " + beanFactory);
}
if (!this.concurrentStartupForPhases.isEmpty() && clbf.getBootstrapExecutor() == null) {
throw new IllegalStateException("'bootstrapExecutor' needs to be configured for concurrent startup");
}
this.beanFactory = clbf;
}
private ConfigurableListableBeanFactory getBeanFactory() {
ConfigurableListableBeanFactory beanFactory = this.beanFactory;
Assert.state(beanFactory != null, "No BeanFactory available");
return beanFactory;
}
private Executor getBootstrapExecutor() {
Executor executor = getBeanFactory().getBootstrapExecutor();
Assert.state(executor != null, "No 'bootstrapExecutor' available");
return executor;
}
private @Nullable Long determineConcurrentStartup(int phase) {
return this.concurrentStartupForPhases.get(phase);
}
private long determineShutdownTimeout(int phase) {
Long timeout = this.timeoutsForShutdownPhases.get(phase);
return (timeout != null ? timeout : this.timeoutPerShutdownPhase);
}
// Lifecycle implementation
/**
* Start all registered beans that implement {@link Lifecycle} and are <i>not</i>
* already running. Any bean that implements {@link SmartLifecycle} will be
* started within its 'phase', and all phases will be ordered from lowest to
* highest value. All beans that do not implement {@link SmartLifecycle} will be
* started in the default phase 0. A bean declared as a dependency of another bean
* will be started before the dependent bean regardless of the declared phase.
*/
@Override
public void start() {
this.stoppedBeans = null;
startBeans(false);
// If any bean failed to explicitly start, the exception propagates here.
// The caller may choose to subsequently call stop() if appropriate.
this.running = true;
}
/**
* Stop all registered beans that implement {@link Lifecycle} and <i>are</i>
* currently running. Any bean that implements {@link SmartLifecycle} will be
* stopped within its 'phase', and all phases will be ordered from highest to
* lowest value. All beans that do not implement {@link SmartLifecycle} will be
* stopped in the default phase 0. A bean declared as dependent on another bean
* will be stopped before the dependency bean regardless of the declared phase.
*/
@Override
public void stop() {
stopBeans(false);
this.running = false;
}
@Override
public void onRefresh() {
if (checkpointOnRefresh) {
checkpointOnRefresh = false;
new CracDelegate().checkpointRestore();
}
if (exitOnRefresh) {
Runtime.getRuntime().halt(0);
}
this.stoppedBeans = null;
try {
startBeans(true);
}
catch (ApplicationContextException ex) {
// Some bean failed to auto-start within context refresh:
// stop already started beans on context refresh failure.
stopBeans(false);
throw ex;
}
this.running = true;
}
@Override
public void onRestart() {
this.stoppedBeans = null;
if (this.running) {
stopBeans(true);
}
startBeans(true);
this.running = true;
}
@Override
public void onPause() {
if (this.running) {
stopBeans(true);
this.running = false;
}
}
@Override
public void onClose() {
stopBeans(false);
this.running = false;
}
@Override
public boolean isRunning() {
return this.running;
}
// Internal helpers
void stopForRestart() {
if (this.running) {
this.stoppedBeans = ConcurrentHashMap.newKeySet();
stopBeans(false);
this.running = false;
}
}
void restartAfterStop() {
if (this.stoppedBeans != null) {
startBeans(true);
this.stoppedBeans = null;
this.running = true;
}
}
private void startBeans(boolean autoStartupOnly) {
Map<String, Lifecycle> lifecycleBeans = getLifecycleBeans();
Map<Integer, LifecycleGroup> phases = new TreeMap<>();
lifecycleBeans.forEach((beanName, bean) -> {
if (!autoStartupOnly || isAutoStartupCandidate(beanName, bean)) {
int startupPhase = getPhase(bean);
phases.computeIfAbsent(
startupPhase, phase -> new LifecycleGroup(phase, lifecycleBeans, autoStartupOnly, false))
.add(beanName, bean);
}
});
if (!phases.isEmpty()) {
phases.values().forEach(LifecycleGroup::start);
}
}
private boolean isAutoStartupCandidate(String beanName, Lifecycle bean) {
Set<String> stoppedBeans = this.stoppedBeans;
return (stoppedBeans != null ? stoppedBeans.contains(beanName) :
(bean instanceof SmartLifecycle smartLifecycle && smartLifecycle.isAutoStartup()));
}
/**
* Start the specified bean as part of the given set of Lifecycle beans,
* making sure that any beans that it depends on are started first.
* @param lifecycleBeans a Map with bean name as key and Lifecycle instance as value
* @param beanName the name of the bean to start
*/
private void doStart(Map<String, ? extends Lifecycle> lifecycleBeans, String beanName,
boolean autoStartupOnly, @Nullable List<CompletableFuture<?>> futures) {
Lifecycle bean = lifecycleBeans.remove(beanName);
if (bean != null && bean != this) {
String[] dependenciesForBean = getBeanFactory().getDependenciesForBean(beanName);
for (String dependency : dependenciesForBean) {
doStart(lifecycleBeans, dependency, autoStartupOnly, futures);
}
if (!bean.isRunning() && (!autoStartupOnly || toBeStarted(beanName, bean))) {
if (futures != null) {
futures.add(CompletableFuture.runAsync(() -> doStart(beanName, bean), getBootstrapExecutor()));
}
else {
doStart(beanName, bean);
}
}
}
}
private void doStart(String beanName, Lifecycle bean) {
if (logger.isTraceEnabled()) {
logger.trace("Starting bean '" + beanName + "' of type [" + bean.getClass().getName() + "]");
}
try {
bean.start();
}
catch (Throwable ex) {
throw new ApplicationContextException("Failed to start bean '" + beanName + "'", ex);
}
if (logger.isDebugEnabled()) {
logger.debug("Successfully started bean '" + beanName + "'");
}
}
private boolean toBeStarted(String beanName, Lifecycle bean) {
Set<String> stoppedBeans = this.stoppedBeans;
return (stoppedBeans != null ? stoppedBeans.contains(beanName) :
(!(bean instanceof SmartLifecycle smartLifecycle) || smartLifecycle.isAutoStartup()));
}
private void stopBeans(boolean pauseableOnly) {
Map<String, Lifecycle> lifecycleBeans = getLifecycleBeans();
Map<Integer, LifecycleGroup> phases = new TreeMap<>(Comparator.reverseOrder());
lifecycleBeans.forEach((beanName, bean) -> {
int shutdownPhase = getPhase(bean);
phases.computeIfAbsent(
shutdownPhase, phase -> new LifecycleGroup(phase, lifecycleBeans, false, pauseableOnly))
.add(beanName, bean);
});
if (!phases.isEmpty()) {
phases.values().forEach(LifecycleGroup::stop);
}
}
/**
* Stop the specified bean as part of the given set of Lifecycle beans,
* making sure that any beans that depends on it are stopped first.
* @param lifecycleBeans a Map with bean name as key and Lifecycle instance as value
* @param beanName the name of the bean to stop
*/
private void doStop(Map<String, ? extends Lifecycle> lifecycleBeans, final String beanName,
boolean pauseableOnly, final CountDownLatch latch, final Set<String> countDownBeanNames) {
Lifecycle bean = lifecycleBeans.remove(beanName);
if (bean != null) {
String[] dependentBeans = getBeanFactory().getDependentBeans(beanName);
for (String dependentBean : dependentBeans) {
doStop(lifecycleBeans, dependentBean, pauseableOnly, latch, countDownBeanNames);
}
try {
if (bean.isRunning()) {
Set<String> stoppedBeans = this.stoppedBeans;
if (stoppedBeans != null) {
stoppedBeans.add(beanName);
}
if (bean instanceof SmartLifecycle smartLifecycle) {
if (!pauseableOnly || smartLifecycle.isPauseable()) {
if (logger.isTraceEnabled()) {
logger.trace("Asking bean '" + beanName + "' of type [" +
bean.getClass().getName() + "] to stop");
}
countDownBeanNames.add(beanName);
smartLifecycle.stop(() -> {
latch.countDown();
countDownBeanNames.remove(beanName);
if (logger.isDebugEnabled()) {
logger.debug("Bean '" + beanName + "' completed its stop procedure");
}
});
}
else {
// Don't wait for beans that aren't pauseable...
latch.countDown();
}
}
else if (!pauseableOnly) {
if (logger.isTraceEnabled()) {
logger.trace("Stopping bean '" + beanName + "' of type [" +
bean.getClass().getName() + "]");
}
bean.stop();
if (logger.isDebugEnabled()) {
logger.debug("Successfully stopped bean '" + beanName + "'");
}
}
}
else if (bean instanceof SmartLifecycle) {
// Don't wait for beans that aren't running...
latch.countDown();
}
}
catch (Throwable ex) {
if (logger.isWarnEnabled()) {
logger.warn("Failed to stop bean '" + beanName + "'", ex);
}
if (bean instanceof SmartLifecycle) {
latch.countDown();
}
}
}
}
// Overridable hooks
/**
* Retrieve all applicable Lifecycle beans: all singletons that have already been created,
* as well as all SmartLifecycle beans (even if they are marked as lazy-init).
* @return the Map of applicable beans, with bean names as keys and bean instances as values
*/
protected Map<String, Lifecycle> getLifecycleBeans() {
ConfigurableListableBeanFactory beanFactory = getBeanFactory();
Map<String, Lifecycle> beans = new LinkedHashMap<>();
String[] beanNames = beanFactory.getBeanNamesForType(Lifecycle.class, false, false);
for (String beanName : beanNames) {
String beanNameToRegister = BeanFactoryUtils.transformedBeanName(beanName);
boolean isFactoryBean = beanFactory.isFactoryBean(beanNameToRegister);
String beanNameToCheck = (isFactoryBean ? BeanFactory.FACTORY_BEAN_PREFIX + beanName : beanName);
if ((beanFactory.containsSingleton(beanNameToRegister) &&
(!isFactoryBean || matchesBeanType(Lifecycle.class, beanNameToCheck, beanFactory))) ||
matchesBeanType(SmartLifecycle.class, beanNameToCheck, beanFactory)) {
Object bean = beanFactory.getBean(beanNameToCheck);
if (bean != this && bean instanceof Lifecycle lifecycle) {
beans.put(beanNameToRegister, lifecycle);
}
}
}
return beans;
}
private boolean matchesBeanType(Class<?> targetType, String beanName, BeanFactory beanFactory) {
Class<?> beanType = beanFactory.getType(beanName);
return (beanType != null && targetType.isAssignableFrom(beanType));
}
/**
* Determine the lifecycle phase of the given bean.
* <p>The default implementation checks for the {@link Phased} interface, using
* a default of 0 otherwise. Can be overridden to apply other/further policies.
* @param bean the bean to introspect
* @return the phase (an integer value)
* @see Phased#getPhase()
* @see SmartLifecycle
*/
protected int getPhase(Lifecycle bean) {
return (bean instanceof Phased phased ? phased.getPhase() : 0);
}
/**
* Helper | DefaultLifecycleProcessor |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/indices/rollover/ConditionTests.java | {
"start": 916,
"end": 16783
} | class ____ extends ESTestCase {
public void testMaxAge() {
final MaxAgeCondition maxAgeCondition = new MaxAgeCondition(TimeValue.timeValueHours(1));
long indexCreatedMatch = System.currentTimeMillis() - TimeValue.timeValueMinutes(61).getMillis();
Condition.Result evaluate = maxAgeCondition.evaluate(
new Condition.Stats(0, indexCreatedMatch, randomByteSize(), randomByteSize(), randomNonNegativeLong())
);
assertThat(evaluate.condition(), equalTo(maxAgeCondition));
assertThat(evaluate.matched(), equalTo(true));
long indexCreatedNotMatch = System.currentTimeMillis() - TimeValue.timeValueMinutes(59).getMillis();
evaluate = maxAgeCondition.evaluate(
new Condition.Stats(0, indexCreatedNotMatch, randomByteSize(), randomByteSize(), randomNonNegativeLong())
);
assertThat(evaluate.condition(), equalTo(maxAgeCondition));
assertThat(evaluate.matched(), equalTo(false));
}
public void testMaxDocs() {
final MaxDocsCondition maxDocsCondition = new MaxDocsCondition(100L);
long maxDocsMatch = randomIntBetween(100, 1000);
Condition.Result evaluate = maxDocsCondition.evaluate(
new Condition.Stats(maxDocsMatch, 0, randomByteSize(), randomByteSize(), randomNonNegativeLong())
);
assertThat(evaluate.condition(), equalTo(maxDocsCondition));
assertThat(evaluate.matched(), equalTo(true));
long maxDocsNotMatch = randomIntBetween(0, 99);
evaluate = maxDocsCondition.evaluate(
new Condition.Stats(maxDocsNotMatch, 0, randomByteSize(), randomByteSize(), randomNonNegativeLong())
);
assertThat(evaluate.condition(), equalTo(maxDocsCondition));
assertThat(evaluate.matched(), equalTo(false));
}
public void testMaxSize() {
MaxSizeCondition maxSizeCondition = new MaxSizeCondition(ByteSizeValue.ofMb(randomIntBetween(10, 20)));
Condition.Result result = maxSizeCondition.evaluate(
new Condition.Stats(
randomNonNegativeLong(),
randomNonNegativeLong(),
ByteSizeValue.ofMb(0),
randomByteSize(),
randomNonNegativeLong()
)
);
assertThat(result.matched(), equalTo(false));
result = maxSizeCondition.evaluate(
new Condition.Stats(
randomNonNegativeLong(),
randomNonNegativeLong(),
ByteSizeValue.ofMb(randomIntBetween(0, 9)),
randomByteSize(),
randomNonNegativeLong()
)
);
assertThat(result.matched(), equalTo(false));
result = maxSizeCondition.evaluate(
new Condition.Stats(
randomNonNegativeLong(),
randomNonNegativeLong(),
ByteSizeValue.ofMb(randomIntBetween(20, 1000)),
randomByteSize(),
randomNonNegativeLong()
)
);
assertThat(result.matched(), equalTo(true));
}
public void testMaxPrimaryShardSize() {
MaxPrimaryShardSizeCondition maxPrimaryShardSizeCondition = new MaxPrimaryShardSizeCondition(
ByteSizeValue.ofMb(randomIntBetween(10, 20))
);
Condition.Result result = maxPrimaryShardSizeCondition.evaluate(
new Condition.Stats(
randomNonNegativeLong(),
randomNonNegativeLong(),
randomByteSize(),
ByteSizeValue.ofMb(0),
randomNonNegativeLong()
)
);
assertThat(result.matched(), equalTo(false));
result = maxPrimaryShardSizeCondition.evaluate(
new Condition.Stats(
randomNonNegativeLong(),
randomNonNegativeLong(),
randomByteSize(),
ByteSizeValue.ofMb(randomIntBetween(0, 9)),
randomNonNegativeLong()
)
);
assertThat(result.matched(), equalTo(false));
result = maxPrimaryShardSizeCondition.evaluate(
new Condition.Stats(
randomNonNegativeLong(),
randomNonNegativeLong(),
randomByteSize(),
ByteSizeValue.ofMb(randomIntBetween(20, 1000)),
randomNonNegativeLong()
)
);
assertThat(result.matched(), equalTo(true));
}
public void testMaxPrimaryShardDocs() {
final MaxPrimaryShardDocsCondition maxPrimaryShardDocsCondition = new MaxPrimaryShardDocsCondition(100L);
long maxPrimaryShardDocsMatch = randomIntBetween(100, 1000);
Condition.Result evaluate = maxPrimaryShardDocsCondition.evaluate(
new Condition.Stats(randomNonNegativeLong(), 0, randomByteSize(), randomByteSize(), maxPrimaryShardDocsMatch)
);
assertThat(evaluate.condition(), equalTo(maxPrimaryShardDocsCondition));
assertThat(evaluate.matched(), equalTo(true));
long maxPrimaryShardDocsNotMatch = randomIntBetween(0, 99);
evaluate = maxPrimaryShardDocsCondition.evaluate(
new Condition.Stats(randomNonNegativeLong(), 0, randomByteSize(), randomByteSize(), maxPrimaryShardDocsNotMatch)
);
assertThat(evaluate.condition(), equalTo(maxPrimaryShardDocsCondition));
assertThat(evaluate.matched(), equalTo(false));
}
public void testMinAge() {
final MinAgeCondition minAgeCondition = new MinAgeCondition(TimeValue.timeValueHours(1));
long indexCreatedMatch = System.currentTimeMillis() - TimeValue.timeValueMinutes(61).getMillis();
Condition.Result evaluate = minAgeCondition.evaluate(
new Condition.Stats(0, indexCreatedMatch, randomByteSize(), randomByteSize(), randomNonNegativeLong())
);
assertThat(evaluate.condition(), equalTo(minAgeCondition));
assertThat(evaluate.matched(), equalTo(true));
long indexCreatedNotMatch = System.currentTimeMillis() - TimeValue.timeValueMinutes(59).getMillis();
evaluate = minAgeCondition.evaluate(
new Condition.Stats(0, indexCreatedNotMatch, randomByteSize(), randomByteSize(), randomNonNegativeLong())
);
assertThat(evaluate.condition(), equalTo(minAgeCondition));
assertThat(evaluate.matched(), equalTo(false));
}
public void testMinDocs() {
final MinDocsCondition minDocsCondition = new MinDocsCondition(100L);
long minDocsMatch = randomIntBetween(100, 1000);
Condition.Result evaluate = minDocsCondition.evaluate(
new Condition.Stats(minDocsMatch, 0, randomByteSize(), randomByteSize(), randomNonNegativeLong())
);
assertThat(evaluate.condition(), equalTo(minDocsCondition));
assertThat(evaluate.matched(), equalTo(true));
long minDocsNotMatch = randomIntBetween(0, 99);
evaluate = minDocsCondition.evaluate(
new Condition.Stats(minDocsNotMatch, 0, randomByteSize(), randomByteSize(), randomNonNegativeLong())
);
assertThat(evaluate.condition(), equalTo(minDocsCondition));
assertThat(evaluate.matched(), equalTo(false));
}
public void testMinSize() {
MinSizeCondition minSizeCondition = new MinSizeCondition(ByteSizeValue.ofMb(randomIntBetween(10, 20)));
Condition.Result result = minSizeCondition.evaluate(
new Condition.Stats(
randomNonNegativeLong(),
randomNonNegativeLong(),
ByteSizeValue.ofMb(0),
randomByteSize(),
randomNonNegativeLong()
)
);
assertThat(result.matched(), equalTo(false));
result = minSizeCondition.evaluate(
new Condition.Stats(
randomNonNegativeLong(),
randomNonNegativeLong(),
ByteSizeValue.ofMb(randomIntBetween(0, 9)),
randomByteSize(),
randomNonNegativeLong()
)
);
assertThat(result.matched(), equalTo(false));
result = minSizeCondition.evaluate(
new Condition.Stats(
randomNonNegativeLong(),
randomNonNegativeLong(),
ByteSizeValue.ofMb(randomIntBetween(20, 1000)),
randomByteSize(),
randomNonNegativeLong()
)
);
assertThat(result.matched(), equalTo(true));
}
public void testMinPrimaryShardSize() {
MinPrimaryShardSizeCondition minPrimaryShardSizeCondition = new MinPrimaryShardSizeCondition(
ByteSizeValue.ofMb(randomIntBetween(10, 20))
);
Condition.Result result = minPrimaryShardSizeCondition.evaluate(
new Condition.Stats(
randomNonNegativeLong(),
randomNonNegativeLong(),
randomByteSize(),
ByteSizeValue.ofMb(0),
randomNonNegativeLong()
)
);
assertThat(result.matched(), equalTo(false));
result = minPrimaryShardSizeCondition.evaluate(
new Condition.Stats(
randomNonNegativeLong(),
randomNonNegativeLong(),
randomByteSize(),
ByteSizeValue.ofMb(randomIntBetween(0, 9)),
randomNonNegativeLong()
)
);
assertThat(result.matched(), equalTo(false));
result = minPrimaryShardSizeCondition.evaluate(
new Condition.Stats(
randomNonNegativeLong(),
randomNonNegativeLong(),
randomByteSize(),
ByteSizeValue.ofMb(randomIntBetween(20, 1000)),
randomNonNegativeLong()
)
);
assertThat(result.matched(), equalTo(true));
}
public void testMinPrimaryShardDocs() {
final MinPrimaryShardDocsCondition minPrimaryShardDocsCondition = new MinPrimaryShardDocsCondition(100L);
long minPrimaryShardDocsMatch = randomIntBetween(100, 1000);
Condition.Result evaluate = minPrimaryShardDocsCondition.evaluate(
new Condition.Stats(randomNonNegativeLong(), 0, randomByteSize(), randomByteSize(), minPrimaryShardDocsMatch)
);
assertThat(evaluate.condition(), equalTo(minPrimaryShardDocsCondition));
assertThat(evaluate.matched(), equalTo(true));
long minPrimaryShardDocsNotMatch = randomIntBetween(0, 99);
evaluate = minPrimaryShardDocsCondition.evaluate(
new Condition.Stats(randomNonNegativeLong(), 0, randomByteSize(), randomByteSize(), minPrimaryShardDocsNotMatch)
);
assertThat(evaluate.condition(), equalTo(minPrimaryShardDocsCondition));
assertThat(evaluate.matched(), equalTo(false));
}
public void testEqualsAndHashCode() {
MaxAgeCondition maxAgeCondition = new MaxAgeCondition(new TimeValue(randomNonNegativeLong()));
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
maxAgeCondition,
condition -> new MaxAgeCondition(condition.value),
condition -> new MaxAgeCondition(new TimeValue(randomNonNegativeLong()))
);
MaxDocsCondition maxDocsCondition = new MaxDocsCondition(randomLong());
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
maxDocsCondition,
condition -> new MaxDocsCondition(condition.value),
condition -> new MaxDocsCondition(randomLong())
);
MaxSizeCondition maxSizeCondition = new MaxSizeCondition(randomByteSize());
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
maxSizeCondition,
condition -> new MaxSizeCondition(condition.value),
condition -> new MaxSizeCondition(randomByteSize())
);
MaxPrimaryShardSizeCondition maxPrimaryShardSizeCondition = new MaxPrimaryShardSizeCondition(randomByteSize());
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
maxPrimaryShardSizeCondition,
condition -> new MaxPrimaryShardSizeCondition(condition.value),
condition -> new MaxPrimaryShardSizeCondition(randomByteSize())
);
MaxPrimaryShardDocsCondition maxPrimaryShardDocsCondition = new MaxPrimaryShardDocsCondition(randomNonNegativeLong());
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
maxPrimaryShardDocsCondition,
condition -> new MaxPrimaryShardDocsCondition(condition.value),
condition -> new MaxPrimaryShardDocsCondition(randomNonNegativeLong())
);
MinAgeCondition minAgeCondition = new MinAgeCondition(new TimeValue(randomNonNegativeLong()));
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
minAgeCondition,
condition -> new MinAgeCondition(condition.value),
condition -> new MinAgeCondition(new TimeValue(randomNonNegativeLong()))
);
MinDocsCondition minDocsCondition = new MinDocsCondition(randomLong());
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
minDocsCondition,
condition -> new MinDocsCondition(condition.value),
condition -> new MinDocsCondition(randomLong())
);
MinSizeCondition minSizeCondition = new MinSizeCondition(randomByteSize());
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
minSizeCondition,
condition -> new MinSizeCondition(condition.value),
condition -> new MinSizeCondition(randomByteSize())
);
MinPrimaryShardSizeCondition minPrimaryShardSizeCondition = new MinPrimaryShardSizeCondition(randomByteSize());
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
minPrimaryShardSizeCondition,
condition -> new MinPrimaryShardSizeCondition(condition.value),
condition -> new MinPrimaryShardSizeCondition(randomByteSize())
);
MinPrimaryShardDocsCondition minPrimaryShardDocsCondition = new MinPrimaryShardDocsCondition(randomNonNegativeLong());
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
minPrimaryShardDocsCondition,
condition -> new MinPrimaryShardDocsCondition(condition.value),
condition -> new MinPrimaryShardDocsCondition(randomNonNegativeLong())
);
OptimalShardCountCondition optimalShardCountCondition = new OptimalShardCountCondition(3);
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
optimalShardCountCondition,
condition -> new OptimalShardCountCondition(3),
condition -> new OptimalShardCountCondition(2)
);
}
public void testAutoShardCondition() {
OptimalShardCountCondition optimalShardCountCondition = new OptimalShardCountCondition(randomNonNegativeInt());
assertThat(
optimalShardCountCondition.evaluate(
new Condition.Stats(1, randomNonNegativeLong(), randomByteSizeValue(), randomByteSizeValue(), 1)
).matched(),
is(true)
);
}
public void testParseAutoShardConditionFromRolloverInfo() throws IOException {
long time = System.currentTimeMillis();
RolloverInfo info = new RolloverInfo("logs-nginx", List.of(new OptimalShardCountCondition(3)), time);
RolloverInfo parsedInfo = RolloverInfo.parse(
createParser(
JsonXContent.jsonXContent,
"{\n" + " \"met_conditions\": {\n" + " \"optimal_shard_count\": 3" + "\n},\n" + " \"time\": " + time + "\n" + " }"
),
"logs-nginx"
);
assertThat(parsedInfo, is(info));
}
private static ByteSizeValue randomByteSize() {
return ByteSizeValue.ofBytes(randomNonNegativeLong());
}
}
| ConditionTests |
java | google__guice | extensions/assistedinject/test/com/google/inject/assistedinject/ManyConstructorsTest.java | {
"start": 8465,
"end": 8680
} | interface ____ {
Farm popsFarm(String pop);
Farm momsFarm(@Assisted("mom") String mom);
Farm momAndPopsFarm(@Assisted("mom") String mom, @Assisted("pop") String pop);
}
public static | FamilyFarmFactory |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/expression/CachedExpressionEvaluator.java | {
"start": 1278,
"end": 3328
} | class ____ {
private final SpelExpressionParser parser;
private final ParameterNameDiscoverer parameterNameDiscoverer = new DefaultParameterNameDiscoverer();
/**
* Create a new instance with the default {@link SpelExpressionParser}.
*/
protected CachedExpressionEvaluator() {
this(new SpelExpressionParser());
}
/**
* Create a new instance with the specified {@link SpelExpressionParser}.
*/
protected CachedExpressionEvaluator(SpelExpressionParser parser) {
Assert.notNull(parser, "SpelExpressionParser must not be null");
this.parser = parser;
}
/**
* Return the {@link SpelExpressionParser} to use.
*/
protected SpelExpressionParser getParser() {
return this.parser;
}
/**
* Return a shared parameter name discoverer which caches data internally.
* @since 4.3
*/
protected ParameterNameDiscoverer getParameterNameDiscoverer() {
return this.parameterNameDiscoverer;
}
/**
* Return the parsed {@link Expression} for the specified SpEL expression.
* <p>{@linkplain #parseExpression(String) Parses} the expression if it hasn't
* already been parsed and cached.
* @param cache the cache to use
* @param elementKey the {@code AnnotatedElementKey} containing the element
* on which the expression is defined
* @param expression the expression to parse
*/
protected Expression getExpression(Map<ExpressionKey, Expression> cache,
AnnotatedElementKey elementKey, String expression) {
ExpressionKey expressionKey = createKey(elementKey, expression);
return cache.computeIfAbsent(expressionKey, key -> parseExpression(expression));
}
/**
* Parse the specified {@code expression}.
* @param expression the expression to parse
* @since 5.3.13
*/
protected Expression parseExpression(String expression) {
return getParser().parseExpression(expression);
}
private ExpressionKey createKey(AnnotatedElementKey elementKey, String expression) {
return new ExpressionKey(elementKey, expression);
}
/**
* An expression key.
*/
protected static | CachedExpressionEvaluator |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/handler/AbstractHandlerMethodExceptionResolver.java | {
"start": 944,
"end": 1237
} | class ____
* {@link org.springframework.web.servlet.HandlerExceptionResolver HandlerExceptionResolver}
* implementations that support handling exceptions from handlers of type {@link HandlerMethod}.
*
* @author Rossen Stoyanchev
* @author Juergen Hoeller
* @since 3.1
*/
public abstract | for |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestReferenceCountMap.java | {
"start": 3698,
"end": 4137
} | class ____ extends SubjectInheritingThread {
private ReferenceCountMap<AclFeature> referenceCountMap;
RemoveThread(ReferenceCountMap<AclFeature> referenceCountMap) {
this.referenceCountMap = referenceCountMap;
}
@Override
public void work() {
for (int i = 0; i < LOOP_COUNTER; i++) {
referenceCountMap.remove(aclFeature1);
referenceCountMap.remove(aclFeature2);
}
}
};
}
| RemoveThread |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/server/reactive/HttpHandler.java | {
"start": 1680,
"end": 1975
} | interface ____ {
/**
* Handle the given request and write to the response.
* @param request current request
* @param response current response
* @return indicates completion of request handling
*/
Mono<Void> handle(ServerHttpRequest request, ServerHttpResponse response);
}
| HttpHandler |
java | apache__flink | flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlAlterFunction.java | {
"start": 1420,
"end": 3800
} | class ____ extends SqlAlterObject {
public static final SqlSpecialOperator OPERATOR =
new SqlSpecialOperator("ALTER FUNCTION", SqlKind.OTHER_DDL);
private final SqlCharStringLiteral functionClassName;
private final String functionLanguage;
private final boolean ifExists;
private final boolean isSystemFunction;
private final boolean isTemporary;
public SqlAlterFunction(
SqlParserPos pos,
SqlIdentifier functionIdentifier,
SqlCharStringLiteral functionClassName,
String functionLanguage,
boolean ifExists,
boolean isTemporary,
boolean isSystemFunction) {
super(OPERATOR, pos, functionIdentifier);
this.functionClassName =
requireNonNull(functionClassName, "functionClassName should not be null");
this.isSystemFunction = isSystemFunction;
this.isTemporary = isTemporary;
this.functionLanguage = functionLanguage;
this.ifExists = ifExists;
}
@Override
public void unparseAlterOperation(SqlWriter writer, int leftPrec, int rightPrec) {
unparseAlterIfExists(writer, leftPrec, rightPrec);
writer.keyword("AS");
functionClassName.unparse(writer, leftPrec, rightPrec);
SqlUnparseUtils.unparseLanguage(functionLanguage, writer);
}
@Nonnull
@Override
public List<SqlNode> getOperandList() {
return ImmutableNullableList.of(name, functionClassName);
}
public String getFunctionLanguage() {
return functionLanguage;
}
public SqlCharStringLiteral getFunctionClassName() {
return this.functionClassName;
}
public boolean isTemporary() {
return isTemporary;
}
public boolean isSystemFunction() {
return isSystemFunction;
}
public boolean isIfExists() {
return this.ifExists;
}
private void unparseAlterIfExists(SqlWriter writer, int leftPrec, int rightPrec) {
writer.keyword("ALTER");
if (isTemporary) {
writer.keyword("TEMPORARY");
}
if (isSystemFunction) {
writer.keyword("SYSTEM");
}
writer.keyword("FUNCTION");
if (ifExists) {
writer.keyword("IF EXISTS");
}
name.unparse(writer, leftPrec, rightPrec);
}
}
| SqlAlterFunction |
java | junit-team__junit5 | junit-platform-engine/src/main/java/org/junit/platform/engine/support/descriptor/MethodSource.java | {
"start": 5235,
"end": 5866
} | class ____ of this source.
*/
public String getClassName() {
return this.className;
}
/**
* Get the method name of this source.
*/
public String getMethodName() {
return this.methodName;
}
/**
* Get the method parameter types of this source.
*/
public @Nullable String getMethodParameterTypes() {
return this.methodParameterTypes;
}
/**
* Get the {@linkplain Class Java class} of this source.
*
* <p>If the {@link Class} was not provided, but only the name, this method
* attempts to lazily load the {@link Class} based on its name and throws a
* {@link PreconditionViolationException} if the | name |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/DialectChecks.java | {
"start": 7985,
"end": 8155
} | class ____ implements DialectCheck {
public boolean isMatch(Dialect dialect) {
return dialect.supportsSubqueryInSelect();
}
}
public static | SupportsSubqueryInSelect |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/embeddable/NestedEmbeddedWitnNotOptionalManyToOneTest.java | {
"start": 2505,
"end": 2826
} | class ____ {
@Id
private Integer id;
private String name;
public ChildEntity() {
}
public ChildEntity(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
public String getName() {
return name;
}
}
@Embeddable
public static | ChildEntity |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JobManagerRunner.java | {
"start": 1318,
"end": 3773
} | interface ____ extends AutoCloseableAsync {
/**
* Start the execution of the {@link JobMaster}.
*
* @throws Exception if the JobMaster cannot be started
*/
void start() throws Exception;
/**
* Get the {@link JobMasterGateway} of the {@link JobMaster}. The future is only completed if
* the JobMaster becomes leader.
*
* @return Future with the JobMasterGateway once the underlying JobMaster becomes leader
*/
CompletableFuture<JobMasterGateway> getJobMasterGateway();
/**
* Get the result future of this runner. The future is completed once the executed job reaches a
* globally terminal state or if the initialization of the {@link JobMaster} fails. If the
* result future is completed exceptionally via {@link JobNotFinishedException}, then this
* signals that the job has not been completed successfully. All other exceptional completions
* denote an unexpected exception which leads to a process restart.
*
* @return Future which is completed with the job result
*/
CompletableFuture<JobManagerRunnerResult> getResultFuture();
/**
* Get the job id of the executed job.
*
* @return job id of the executed job
*/
JobID getJobID();
/**
* Cancels the currently executed job.
*
* @param timeout of this operation
* @return Future acknowledge of the operation
*/
CompletableFuture<Acknowledge> cancel(Duration timeout);
/**
* Requests the current job status.
*
* @param timeout for the rpc call
* @return Future containing the current job status
*/
CompletableFuture<JobStatus> requestJobStatus(Duration timeout);
/**
* Request the details of the executed job.
*
* @param timeout for the rpc call
* @return Future details of the executed job
*/
CompletableFuture<JobDetails> requestJobDetails(Duration timeout);
/**
* Requests the {@link ExecutionGraphInfo} of the executed job.
*
* @param timeout for the rpc call
* @return Future which is completed with the {@link ExecutionGraphInfo} of the executed job
*/
CompletableFuture<ExecutionGraphInfo> requestJob(Duration timeout);
/**
* Flag indicating if the JobManagerRunner has been initialized.
*
* @return true if the JobManagerRunner has been initialized.
*/
boolean isInitialized();
}
| JobManagerRunner |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/security/LocalizerTokenSecretManager.java | {
"start": 975,
"end": 1682
} | class ____ extends
SecretManager<LocalizerTokenIdentifier> {
private final SecretKey secretKey;
public LocalizerTokenSecretManager() {
this.secretKey = generateSecret();
}
@Override
protected byte[] createPassword(LocalizerTokenIdentifier identifier) {
return createPassword(identifier.getBytes(), secretKey);
}
@Override
public byte[] retrievePassword(LocalizerTokenIdentifier identifier)
throws org.apache.hadoop.security.token.SecretManager.InvalidToken {
return createPassword(identifier.getBytes(), secretKey);
}
@Override
public LocalizerTokenIdentifier createIdentifier() {
return new LocalizerTokenIdentifier();
}
}
| LocalizerTokenSecretManager |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/presentation/NumberGrouping_Test.java | {
"start": 787,
"end": 1556
} | class ____ {
@Test
void should_group_words_in_byte_hex_value() {
String hexLiteral = NumberGrouping.toHexLiteral("CA");
assertThat(hexLiteral).isEqualTo("CA");
}
@Test
void should_group_words_in_hex_value() {
String hexLiteral = NumberGrouping.toHexLiteral("01234567");
assertThat(hexLiteral).isEqualTo("0123_4567");
}
@Test
void should_group_bytes_in_integer() {
String literals = NumberGrouping.toBinaryLiteral("00000000000000000000000000000011");
assertThat(literals).isEqualTo("00000000_00000000_00000000_00000011");
}
@Test
void should_group_bytes_in_short() {
String literals = NumberGrouping.toBinaryLiteral("1000000000000011");
assertThat(literals).isEqualTo("10000000_00000011");
}
}
| NumberGrouping_Test |
java | quarkusio__quarkus | independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/maven/MavenLocalRepositoryManager.java | {
"start": 894,
"end": 5598
} | class ____ implements LocalRepositoryManager {
private final LocalRepositoryManager delegate;
private final Path secondaryRepo;
private final Path originalRepo;
public MavenLocalRepositoryManager(LocalRepositoryManager delegate, Path secondaryRepo) {
this.delegate = delegate;
this.secondaryRepo = secondaryRepo;
this.originalRepo = delegate.getRepository().getBasedir().toPath();
}
@Override
public LocalRepository getRepository() {
return delegate.getRepository();
}
@Override
public String getPathForLocalArtifact(Artifact artifact) {
return delegate.getPathForLocalArtifact(artifact);
}
@Override
public String getPathForRemoteArtifact(Artifact artifact, RemoteRepository repository, String context) {
return delegate.getPathForRemoteArtifact(artifact, repository, context);
}
@Override
public String getPathForLocalMetadata(Metadata metadata) {
return delegate.getPathForLocalMetadata(metadata);
}
@Override
public String getPathForRemoteMetadata(Metadata metadata, RemoteRepository repository, String context) {
return delegate.getPathForRemoteMetadata(metadata, repository, context);
}
public void relink(String groupId, String artifactId, String classifier, String type, String version, Path p) {
final Path creatorRepoPath = getLocalPath(originalRepo, groupId, artifactId, classifier, type, version);
try {
IoUtils.copy(p, creatorRepoPath);
} catch (IOException e) {
throw new IllegalStateException("Failed to copy " + p + " to a staging repo", e);
}
}
@Override
public LocalArtifactResult find(RepositorySystemSession session, LocalArtifactRequest request) {
final LocalArtifactResult result = delegate.find(session, request);
if (result.isAvailable()) {
return result;
}
final Artifact artifact = request.getArtifact();
final Path secondaryLocation = getLocalPath(secondaryRepo, artifact.getGroupId(), artifact.getArtifactId(),
artifact.getClassifier(), artifact.getExtension(), artifact.getVersion());
if (!Files.exists(secondaryLocation)) {
return result;
}
result.setFile(secondaryLocation.toFile());
artifact.setFile(result.getFile());
result.setAvailable(true);
return result;
}
@Override
public void add(RepositorySystemSession session, LocalArtifactRegistration request) {
delegate.add(session, request);
}
@Override
public LocalMetadataResult find(RepositorySystemSession session, LocalMetadataRequest request) {
final LocalMetadataResult result = delegate.find(session, request);
if (result.getFile() != null && result.getFile().exists()) {
return result;
}
final Metadata metadata = request.getMetadata();
final Path userRepoPath = getMetadataPath(secondaryRepo, metadata.getGroupId(), metadata.getArtifactId(),
metadata.getType(), metadata.getVersion());
if (!Files.exists(userRepoPath)) {
return result;
}
result.setFile(userRepoPath.toFile());
metadata.setFile(result.getFile());
return result;
}
@Override
public void add(RepositorySystemSession session, LocalMetadataRegistration request) {
delegate.add(session, request);
}
private Path getMetadataPath(Path repoHome, String groupId, String artifactId, String type, String version) {
Path p = repoHome;
final String[] groupParts = groupId.split("\\.");
for (String part : groupParts) {
p = p.resolve(part);
}
if (artifactId != null) {
p = p.resolve(artifactId);
}
if (version != null) {
p = p.resolve(version);
}
return p.resolve("maven-metadata-local.xml");
}
private Path getLocalPath(Path repoHome, String groupId, String artifactId, String classifier, String type,
String version) {
Path p = repoHome;
final String[] groupParts = groupId.split("\\.");
for (String part : groupParts) {
p = p.resolve(part);
}
final StringBuilder fileName = new StringBuilder();
fileName.append(artifactId).append('-').append(version);
if (classifier != null && !classifier.isEmpty()) {
fileName.append('-').append(classifier);
}
fileName.append('.').append(type);
return p.resolve(artifactId).resolve(version).resolve(fileName.toString());
}
}
| MavenLocalRepositoryManager |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/common/CommonExecCalc.java | {
"start": 2368,
"end": 5090
} | class ____ extends ExecNodeBase<RowData>
implements SingleTransformationTranslator<RowData> {
public static final String CALC_TRANSFORMATION = "calc";
public static final String FIELD_NAME_PROJECTION = "projection";
public static final String FIELD_NAME_CONDITION = "condition";
@JsonProperty(FIELD_NAME_PROJECTION)
protected final List<RexNode> projection;
@JsonProperty(FIELD_NAME_CONDITION)
protected final @Nullable RexNode condition;
private final Class<?> operatorBaseClass;
private final boolean retainHeader;
protected CommonExecCalc(
int id,
ExecNodeContext context,
ReadableConfig persistedConfig,
List<RexNode> projection,
@Nullable RexNode condition,
Class<?> operatorBaseClass,
boolean retainHeader,
List<InputProperty> inputProperties,
RowType outputType,
String description) {
super(id, context, persistedConfig, inputProperties, outputType, description);
checkArgument(inputProperties.size() == 1);
this.projection = checkNotNull(projection);
this.condition = condition;
this.operatorBaseClass = checkNotNull(operatorBaseClass);
this.retainHeader = retainHeader;
}
@SuppressWarnings("unchecked")
@Override
protected Transformation<RowData> translateToPlanInternal(
PlannerBase planner, ExecNodeConfig config) {
final ExecEdge inputEdge = getInputEdges().get(0);
final Transformation<RowData> inputTransform =
(Transformation<RowData>) inputEdge.translateToPlan(planner);
final CodeGeneratorContext ctx =
new CodeGeneratorContext(config, planner.getFlinkContext().getClassLoader())
.setOperatorBaseClass(operatorBaseClass);
final CodeGenOperatorFactory<RowData> substituteStreamOperator =
CalcCodeGenerator.generateCalcOperator(
ctx,
inputTransform,
(RowType) getOutputType(),
JavaScalaConversionUtil.toScala(projection),
JavaScalaConversionUtil.toScala(Optional.ofNullable(this.condition)),
retainHeader,
getClass().getSimpleName());
return ExecNodeUtil.createOneInputTransformation(
inputTransform,
createTransformationMeta(CALC_TRANSFORMATION, config),
substituteStreamOperator,
InternalTypeInfo.of(getOutputType()),
inputTransform.getParallelism(),
false);
}
}
| CommonExecCalc |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java | {
"start": 779775,
"end": 784414
} | class ____ extends YamlDeserializerBase<PropertyExpressionDefinition> {
public PropertyExpressionDefinitionDeserializer() {
super(PropertyExpressionDefinition.class);
}
@Override
protected PropertyExpressionDefinition newInstance() {
return new PropertyExpressionDefinition();
}
@Override
protected boolean setProperty(PropertyExpressionDefinition target, String propertyKey,
String propertyName, Node node) {
propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey);
switch(propertyKey) {
case "expression": {
org.apache.camel.model.language.ExpressionDefinition val = asType(node, org.apache.camel.model.language.ExpressionDefinition.class);
target.setExpression(val);
break;
}
case "key": {
String val = asText(node);
target.setKey(val);
break;
}
default: {
ExpressionDefinition ed = target.getExpressionType();
if (ed != null) {
throw new org.apache.camel.dsl.yaml.common.exception.DuplicateFieldException(node, propertyName, "as an expression");
}
ed = ExpressionDeserializers.constructExpressionType(propertyKey, node);
if (ed != null) {
target.setExpressionType(ed);
} else {
return false;
}
}
}
return true;
}
}
@YamlType(
nodes = "protobuf",
inline = true,
types = org.apache.camel.model.dataformat.ProtobufDataFormat.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
displayName = "Protobuf",
description = "Serialize and deserialize Java objects using Google's Protocol buffers.",
deprecated = false,
properties = {
@YamlProperty(name = "allowJmsType", type = "boolean", defaultValue = "false", description = "Used for JMS users to allow the JMSType header from the JMS spec to specify a FQN classname to use to unmarshal to.", displayName = "Allow Jms Type"),
@YamlProperty(name = "allowUnmarshallType", type = "boolean", defaultValue = "false", description = "If enabled then Jackson is allowed to attempt to use the CamelJacksonUnmarshalType header during the unmarshalling. This should only be enabled when desired to be used.", displayName = "Allow Unmarshall Type"),
@YamlProperty(name = "autoDiscoverObjectMapper", type = "boolean", defaultValue = "false", description = "If set to true then Jackson will lookup for an objectMapper into the registry", displayName = "Auto Discover Object Mapper"),
@YamlProperty(name = "autoDiscoverSchemaResolver", type = "boolean", defaultValue = "true", description = "When not disabled, the SchemaResolver will be looked up into the registry", displayName = "Auto Discover Schema Resolver"),
@YamlProperty(name = "collectionType", type = "string", description = "Refers to a custom collection type to lookup in the registry to use. This option should rarely be used, but allows to use different collection types than java.util.Collection based as default.", displayName = "Collection Type"),
@YamlProperty(name = "contentTypeFormat", type = "enum:native,json", defaultValue = "native", description = "Defines a content type format in which protobuf message will be serialized/deserialized from(to) the Java been. The format can either be native or json for either native protobuf or json fields representation. The default value is native.", displayName = "Content Type Format"),
@YamlProperty(name = "contentTypeHeader", type = "boolean", defaultValue = "true", description = "Whether the data format should set the Content-Type header with the type from the data format. For example application/xml for data formats marshalling to XML, or application/json for data formats marshalling to JSON", displayName = "Content Type Header"),
@YamlProperty(name = "disableFeatures", type = "string", description = "Set of features to disable on the Jackson com.fasterxml.jackson.databind.ObjectMapper. The features should be a name that matches a | PropertyExpressionDefinitionDeserializer |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/LocalizationProtocolPB.java | {
"start": 1126,
"end": 1218
} | interface ____ extends LocalizationProtocolService.BlockingInterface {
}
| LocalizationProtocolPB |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-jackson/deployment/src/main/java/io/quarkus/resteasy/reactive/jackson/deployment/processor/JacksonFeatureBuildItem.java | {
"start": 467,
"end": 570
} | enum ____ {
JSON_VIEW,
CUSTOM_SERIALIZATION,
CUSTOM_DESERIALIZATION
}
}
| Feature |
java | reactor__reactor-core | reactor-core-micrometer/src/main/java/reactor/core/observability/micrometer/MicrometerObservationListener.java | {
"start": 2548,
"end": 9941
} | class ____ for io.micrometer.context.ThreadLocalAccessor not found}
* in reactor-netty while compiling a similar arrangement. A unit test in MicrometerTest acts as a smoke test in case
* micrometer-observation's {@code ObservationThreadLocalAccessor.KEY} changes to something else.
*/
static final String CONTEXT_KEY_OBSERVATION = "micrometer.observation";
final MicrometerObservationListenerConfiguration configuration;
final ContextView originalContext;
final Observation tapObservation;
@Nullable Context contextWithObservation;
boolean valued;
volatile int status = STATUS_INCOMPLETE;
@SuppressWarnings("rawtypes")
static final AtomicIntegerFieldUpdater<MicrometerObservationListener> STATUS_UPDATER =
AtomicIntegerFieldUpdater.newUpdater(MicrometerObservationListener.class, "status");
static final int STATUS_INCOMPLETE = 0;
static final int STATUS_CANCELLED = 1;
static final int STATUS_COMPLETED_IN_ON_NEXT = 2;
static final int STATUS_COMPLETED = 3;
static final int STATUS_ERROR = 4;
MicrometerObservationListener(ContextView subscriberContext, MicrometerObservationListenerConfiguration configuration) {
this(subscriberContext, configuration, null);
}
MicrometerObservationListener(ContextView subscriberContext,
MicrometerObservationListenerConfiguration configuration,
@Nullable Function<ObservationRegistry, Observation> observationSupplier) {
this.configuration = configuration;
this.originalContext = subscriberContext;
//creation of the listener matches subscription (Publisher.subscribe(Subscriber) / doFirst)
//while doOnSubscription matches the moment where the Publisher acknowledges said subscription
//NOTE: we don't use the `DocumentedObservation` features to create the Observation, even for the ANONYMOUS case,
//because the discovered tags could be more than the documented defaults
tapObservation = supplyOrCreateObservation(configuration, observationSupplier)
.lowCardinalityKeyValues(configuration.commonKeyValues);
}
private static Observation supplyOrCreateObservation(
MicrometerObservationListenerConfiguration configuration,
@Nullable Function<ObservationRegistry, Observation> observationSupplier) {
if (observationSupplier != null) {
final Observation observation = observationSupplier.apply(configuration.registry);
if (observation != null) {
if (observation.getContext().getContextualName() != null) {
return observation;
}
else {
return observation.contextualName(configuration.sequenceName);
}
}
}
return Observation.createNotStarted(configuration.sequenceName, configuration.registry)
.contextualName(configuration.sequenceName);
}
@Override
public void doFirst() {
/* Implementation note on using parentObservation vs openScope:
Opening a Scope is never necessary in this tap listener, because the Observation we create is stored in
the Context the tap operator will expose to upstream, rather than via ThreadLocal population.
We also make a best-effort attempt to discover such an Observation in the context here in doFirst, so that this
can explicitly be used as the parentObservation. At this point, if none is found we take also the opportunity
of checking if the registry has a currentObservation.
As a consequence, fanout (eg. with a `flatMap`) upstream of the tap should be able to see the current Observation
in the context and the inner publishers should inherit it as their parent observation if they also use `tap(Micrometer.observation())`.
Note that Reactor's threading model doesn't generally guarantee that doFirst and doOnNext/doOnComplete/doOnError run
in the same thread, and that's the main reason why Scopes are avoided here (as their sole purpose is to set up
Thread Local variables).
*/
Observation o;
Observation p;
if (this.originalContext.hasKey(CONTEXT_KEY_OBSERVATION)) {
p = this.originalContext.get(CONTEXT_KEY_OBSERVATION);
}
else {
p = this.configuration.registry.getCurrentObservation();
}
if (p != null) {
o = this.tapObservation
.parentObservation(p)
.start();
}
else {
o = this.tapObservation.start();
}
this.contextWithObservation = Context.of(this.originalContext)
.put(CONTEXT_KEY_OBSERVATION, o);
}
@Override
public Context addToContext(Context originalContext) {
if (this.originalContext != originalContext) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("addToContext call on Observation {} with unexpected originalContext {}",
this.tapObservation, originalContext);
}
return originalContext;
}
if (this.contextWithObservation == null) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("addToContext call on Observation {} before contextWithScope is set",
this.tapObservation);
}
return originalContext;
}
return contextWithObservation;
}
@Override
public void doOnCancel() {
if (STATUS_UPDATER.compareAndSet(this, STATUS_INCOMPLETE, STATUS_CANCELLED)) {
Observation observation =
tapObservation.lowCardinalityKeyValue(STATUS.asString(),
TAG_STATUS_CANCELLED);
observation.stop();
}
}
@Override
public void doOnComplete() {
// The comparison can fail if the Publisher was terminated by error,
// cancellation, or recording with OnComplete tag was done directly in onNext for
// the Mono(valued) case
if (STATUS_UPDATER.compareAndSet(this, STATUS_INCOMPLETE, STATUS_COMPLETED)) {
// We differentiate between empty completion and value completion via tags.
String status = valued ? TAG_STATUS_COMPLETED : TAG_STATUS_COMPLETED_EMPTY;
Observation completeObservation = tapObservation
.lowCardinalityKeyValue(STATUS.asString(), status);
completeObservation.stop();
}
}
@Override
public void doOnError(Throwable e) {
if (STATUS_UPDATER.compareAndSet(this, STATUS_INCOMPLETE, STATUS_ERROR)) {
Observation errorObservation =
tapObservation.lowCardinalityKeyValue(STATUS.asString(), TAG_STATUS_ERROR)
.error(e);
errorObservation.stop();
}
}
@Override
public void doOnNext(T t) {
valued = true;
if (configuration.isMono
&& STATUS_UPDATER.compareAndSet(this, STATUS_INCOMPLETE, STATUS_COMPLETED_IN_ON_NEXT)) {
//record valued completion directly
Observation completeObservation = tapObservation
.lowCardinalityKeyValue(STATUS.asString(), TAG_STATUS_COMPLETED);
completeObservation.stop();
}
}
@Override
public void handleListenerError(Throwable listenerError) {
LOGGER.error("unhandled listener error", listenerError);
}
//unused hooks
@Override
public void doOnSubscription() {
// NO-OP. We rather initialize everything in `doFirst`, as it is closer to actual Publisher.subscriber call
// and gives us a chance to store the Scope in the SignalListener's context.
}
@Override
public void doOnMalformedOnComplete() {
//NO-OP
}
@Override
public void doOnMalformedOnError(Throwable e) {
// NO-OP
}
@Override
public void doOnMalformedOnNext(T value) {
// NO-OP
}
@Override
public void doOnRequest(long l) {
// NO-OP
}
@Override
public void doOnFusion(int negotiatedFusion) {
// NO-OP
}
@Override
public void doFinally(SignalType terminationType) {
// NO-OP
}
@Override
public void doAfterComplete() {
// NO-OP
}
@Override
public void doAfterError(Throwable error) {
// NO-OP
}
}
| file |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/graph/LoadAndFetchGraphAssociationNotExplicitlySpecifiedTest.java | {
"start": 2213,
"end": 10616
} | class ____ {
@BeforeEach
void init(SessionFactoryScope scope) {
scope.inTransaction( session -> {
for ( long i = 0; i < 3; ++i ) {
RootEntity root = new RootEntity( i * 100 );
long j = i * 100;
root.setLazyOneToOneOwned( new ContainedEntity( ++j ) );
root.setLazyManyToOneOwned( new ContainedEntity( ++j ) );
root.setEagerOneToOneOwned( new ContainedEntity( ++j ) );
root.setEagerManyToOneOwned( new ContainedEntity( ++j ) );
session.persist( root );
ContainedEntity contained;
contained = new ContainedEntity( ++j );
root.setLazyOneToOneUnowned( contained );
contained.setInverseSideOfLazyOneToOneUnowned( root );
session.persist( contained );
contained = new ContainedEntity( ++j );
root.setEagerOneToOneUnowned( contained );
contained.setInverseSideOfEagerOneToOneUnowned( root );
session.persist( contained );
}
} );
}
@AfterEach
void cleanUp(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
// Arguments for the parameterized test below
List<Arguments> queryWithEntityGraph() {
List<Arguments> args = new ArrayList<>();
for ( GraphSemantic graphSemantic : GraphSemantic.values() ) {
for ( String propertyName : RootEntity.LAZY_PROPERTY_NAMES ) {
args.add( Arguments.of( graphSemantic, propertyName ) );
}
for ( String propertyName : RootEntity.EAGER_PROPERTY_NAMES ) {
args.add( Arguments.of( graphSemantic, propertyName ) );
}
}
// Also test without a graph, for reference
args.add( Arguments.of( null, null ) );
return args;
}
@Test
public void testWithFetchGraph(SessionFactoryScope scope) {
String propertySpecifiedInGraph = "eagerOneToOneOwned";
scope.inTransaction( session -> {
var sqlStatementInspector = scope.getCollectingStatementInspector();
sqlStatementInspector.clear();
var query = session.createQuery( "select e from RootEntity e where id in (:ids)", RootEntity.class )
.setFetchSize( 100 )
// Selecting multiple entities to make sure we don't have side effects (e.g. some context shared across entity instances)
.setParameter( "ids", List.of( 0L, 100L, 200L ) );
var graph = session.createEntityGraph( RootEntity.class );
graph.addAttributeNode( propertySpecifiedInGraph );
query.applyGraph( graph, FETCH );
var resultList = query.list();
assertThat( resultList ).isNotEmpty();
for ( String propertyName : RootEntity.LAZY_PROPERTY_NAMES ) {
var expectInitialized = propertyName.equals( propertySpecifiedInGraph );
assertAssociationInitialized( resultList, propertyName, expectInitialized, sqlStatementInspector );
}
for ( String propertyName : RootEntity.EAGER_PROPERTY_NAMES ) {
var expectInitialized = propertyName.equals( propertySpecifiedInGraph );
assertAssociationInitialized( resultList, propertyName, expectInitialized, sqlStatementInspector );
}
} );
}
@ParameterizedTest
@MethodSource
public void queryWithEntityGraph(GraphSemantic graphSemantic, String propertySpecifiedInGraph, SessionFactoryScope scope) {
scope.inTransaction( session -> {
var sqlStatementInspector = scope.getCollectingStatementInspector();
sqlStatementInspector.clear();
var query = session.createQuery( "select e from RootEntity e where id in (:ids)", RootEntity.class )
.setFetchSize( 100 )
// Selecting multiple entities to make sure we don't have side effects (e.g. some context shared across entity instances)
.setParameter( "ids", List.of( 0L, 100L, 200L ) );
if ( graphSemantic != null ) {
var graph = session.createEntityGraph( RootEntity.class );
graph.addAttributeNode( propertySpecifiedInGraph );
query.applyGraph( graph, graphSemantic );
} // else just run the query without a graph
var resultList = query.list();
assertThat( resultList ).isNotEmpty();
for ( String propertyName : RootEntity.LAZY_PROPERTY_NAMES ) {
var expectInitialized = propertyName.equals( propertySpecifiedInGraph );
assertAssociationInitialized( resultList, propertyName, expectInitialized, sqlStatementInspector );
}
for ( String propertyName : RootEntity.EAGER_PROPERTY_NAMES ) {
var expectInitialized = propertyName.equals( propertySpecifiedInGraph )
// Under LOAD semantics, or when not using graphs,
// eager properties also get loaded (even if not specified in the graph).
|| GraphSemantic.LOAD.equals( graphSemantic ) || graphSemantic == null;
assertAssociationInitialized( resultList, propertyName, expectInitialized, sqlStatementInspector );
}
} );
}
private void assertAssociationInitialized(
List<RootEntity> resultList,
String propertyName,
boolean expectInitialized,
SQLStatementInspector sqlStatementInspector) {
for ( var rootEntity : resultList ) {
sqlStatementInspector.clear();
if ( propertyName.endsWith( "Unowned" ) ) {
final Supplier<ContainedEntity> supplier;
switch ( propertyName ) {
case ( "lazyOneToOneUnowned" ):
supplier = () -> rootEntity.getLazyOneToOneUnowned();
break;
case ( "eagerOneToOneUnowned" ):
supplier = () -> rootEntity.getEagerOneToOneUnowned();
break;
default:
supplier = null;
fail( "unknown association property name : " + propertyName );
}
assertUnownedAssociationLazyness(
supplier,
rootEntity,
propertyName,
expectInitialized,
sqlStatementInspector
);
}
else {
final Supplier<ContainedEntity> supplier;
switch ( propertyName ) {
case "lazyOneToOneOwned":
supplier = () -> rootEntity.getLazyOneToOneOwned();
break;
case "lazyManyToOneOwned":
supplier = () -> rootEntity.getLazyManyToOneOwned();
break;
case "eagerOneToOneOwned":
supplier = () -> rootEntity.getEagerOneToOneOwned();
break;
case "eagerManyToOneOwned":
supplier = () -> rootEntity.getEagerManyToOneOwned();
break;
default:
supplier = null;
fail( "unknown association property name : " + propertyName );
}
assertOwnedAssociationLazyness(
supplier,
propertyName,
expectInitialized,
sqlStatementInspector
);
}
}
}
private static void assertUnownedAssociationLazyness(
Supplier<ContainedEntity> associationSupplier,
RootEntity rootEntity,
String associationName,
boolean expectInitialized,
SQLStatementInspector sqlStatementInspector) {
// for an unowned lazy association the value is null and accessing the association triggers its initialization
assertThat( Hibernate.isPropertyInitialized( rootEntity, associationName ) )
.as( associationName + " association expected to be initialized ? expected is :" + expectInitialized + " but it's not " )
.isEqualTo( expectInitialized );
if ( !expectInitialized ) {
var containedEntity = associationSupplier.get();
sqlStatementInspector.assertExecutedCount( 1 );
assertThat( Hibernate.isInitialized( containedEntity ) );
sqlStatementInspector.clear();
assertThat( containedEntity ).isNotNull();
associationSupplier.get().getName();
sqlStatementInspector.assertExecutedCount( 0 );
}
}
private static void assertOwnedAssociationLazyness(
Supplier<ContainedEntity> associationSupplier,
String associationName,
boolean expectInitialized,
SQLStatementInspector sqlStatementInspector) {
// for an owned lazy association the value is an enhanced proxy, Hibernate.isPropertyInitialized( rootEntity, "lazyManyToOneOwned" ) returns true.
// accessing the association does not trigger its initialization
assertThat( Hibernate.isInitialized( associationSupplier.get() ) )
.as( associationName + " association expected to be initialized ? expected is :" + expectInitialized + " but it's not " )
.isEqualTo( expectInitialized );
if ( !expectInitialized ) {
var containedEntity = associationSupplier.get();
sqlStatementInspector.assertExecutedCount( 0 );
containedEntity.getName();
sqlStatementInspector.assertExecutedCount( 1 );
assertThat( Hibernate.isInitialized( containedEntity ) ).isTrue();
sqlStatementInspector.clear();
assertThat( containedEntity ).isNotNull();
associationSupplier.get().getName();
sqlStatementInspector.assertExecutedCount( 0 );
}
}
@Entity(name = "RootEntity")
static | LoadAndFetchGraphAssociationNotExplicitlySpecifiedTest |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/util/ScoreSystem.java | {
"start": 2449,
"end": 4657
} | class ____ {
public final String message;
public final int score;
public Diagnostic(String message, int percentageScore) {
this.message = message;
this.score = percentageScore;
}
@Override
public String toString() {
return message + ": " + score + "/100";
}
public static Diagnostic ExecutionNonBlocking = new Diagnostic("Dispatched on the IO thread", 100);
public static Diagnostic ExecutionBlocking = new Diagnostic("Relies on a blocking worker thread", 0);
public static Diagnostic ExecutionVirtualThread = new Diagnostic("Relies on a virtual thread", 66);
public static Diagnostic ResourceSingleton = new Diagnostic("Single resource instance for all requests", 100);
public static Diagnostic ResourcePerRequest = new Diagnostic("New resource instance for every request", 0);
public static Diagnostic WriterBuildTime(MessageBodyWriter<?> buildTimeWriter) {
return new Diagnostic("Single writer set at build time: " + buildTimeWriter.getClass().getName(), 90);
}
public static Diagnostic WriterBuildTimeDirect(MessageBodyWriter<?> buildTimeWriter) {
return new Diagnostic("Single direct writer set at build time: " + buildTimeWriter.getClass().getName(), 100);
}
public static Diagnostic WriterBuildTimeMultiple(List<MessageBodyWriter<?>> buildTimeWriters) {
return new Diagnostic("Multiple writers set at build time: [" + getClassNames(buildTimeWriters) + "]", 50);
}
private static String getClassNames(List<MessageBodyWriter<?>> buildTimeWriters) {
List<String> classNames = new ArrayList<>(buildTimeWriters.size());
for (MessageBodyWriter<?> buildTimeWriter : buildTimeWriters) {
classNames.add(buildTimeWriter.getClass().getName());
}
return String.join(", ", classNames);
}
public static Diagnostic WriterRunTime = new Diagnostic("Run time writers required", 0);
public static Diagnostic WriterNotRequired = new Diagnostic("No writers required", 100);
}
public | Diagnostic |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsClientConfigKeys.java | {
"start": 21605,
"end": 22150
} | interface ____ {
String PREFIX = BlockWrite.PREFIX + "replace-datanode-on-failure.";
String ENABLE_KEY = PREFIX + "enable";
boolean ENABLE_DEFAULT = true;
String POLICY_KEY = PREFIX + "policy";
String POLICY_DEFAULT = "DEFAULT";
String BEST_EFFORT_KEY = PREFIX + "best-effort";
boolean BEST_EFFORT_DEFAULT = false;
String MIN_REPLICATION = PREFIX + "min-replication";
short MIN_REPLICATION_DEFAULT = 0;
}
}
/** dfs.client.read configuration properties */
| ReplaceDatanodeOnFailure |
java | playframework__playframework | persistence/play-java-jpa/src/main/java/play/db/jpa/JPAApi.java | {
"start": 316,
"end": 2578
} | interface ____ {
/**
* Initialise JPA entity manager factories.
*
* @return JPAApi instance
*/
JPAApi start();
/**
* Get a newly created EntityManager for the specified persistence unit name.
*
* @param name The persistence unit name
* @return EntityManager for the specified persistence unit name
*/
EntityManager em(String name);
/**
* Run a block of code with a newly created EntityManager for the default Persistence Unit.
*
* @param block Block of code to execute
* @param <T> type of result
* @return code execution result
*/
<T> T withTransaction(Function<EntityManager, T> block);
/**
* Run a block of code with a newly created EntityManager for the default Persistence Unit.
*
* @param block Block of code to execute
*/
void withTransaction(Consumer<EntityManager> block);
/**
* Run a block of code with a newly created EntityManager for the named Persistence Unit.
*
* @param name The persistence unit name
* @param block Block of code to execute
* @param <T> type of result
* @return code execution result
*/
<T> T withTransaction(String name, Function<EntityManager, T> block);
/**
* Run a block of code with a newly created EntityManager for the named Persistence Unit.
*
* @param name The persistence unit name
* @param block Block of code to execute
*/
void withTransaction(String name, Consumer<EntityManager> block);
/**
* Run a block of code with a newly created EntityManager for the named Persistence Unit.
*
* @param name The persistence unit name
* @param readOnly Is the transaction read-only?
* @param block Block of code to execute
* @param <T> type of result
* @return code execution result
*/
<T> T withTransaction(String name, boolean readOnly, Function<EntityManager, T> block);
/**
* Run a block of code with a newly created EntityManager for the named Persistence Unit.
*
* @param name The persistence unit name
* @param readOnly Is the transaction read-only?
* @param block Block of code to execute
*/
void withTransaction(String name, boolean readOnly, Consumer<EntityManager> block);
/** Close all entity manager factories. */
void shutdown();
}
| JPAApi |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/lang/TestRunnableCallable.java | {
"start": 1084,
"end": 1215
} | class ____ implements Runnable {
boolean RUN;
@Override
public void run() {
RUN = true;
}
}
public static | R |
java | apache__dubbo | dubbo-plugin/dubbo-native/src/main/java/org/apache/dubbo/aot/api/ExecutableMode.java | {
"start": 778,
"end": 1372
} | enum ____ {
/**
* Only retrieving the {@link Executable} and its metadata is required.
*/
INTROSPECT,
/**
* Full reflection support is required, including the ability to invoke
* the {@link Executable}.
*/
INVOKE;
/**
* Specify if this mode already includes the specified {@code other} mode.
* @param other the other mode to check
* @return {@code true} if this mode includes the other mode
*/
boolean includes(ExecutableMode other) {
return (other == null || this.ordinal() >= other.ordinal());
}
}
| ExecutableMode |
java | apache__camel | components/camel-debezium/camel-debezium-common/camel-debezium-common-component/src/test/java/org/apache/camel/component/debezium/configuration/FileConnectorEmbeddedDebeziumConfiguration.java | {
"start": 1004,
"end": 2428
} | class ____ extends EmbeddedDebeziumConfiguration {
private Path testFilePath;
private String topicConfig;
@Override
protected Configuration createConnectorConfiguration() {
return Configuration.create()
.with(FileStreamSourceConnector.FILE_CONFIG, testFilePath)
.with(FileStreamSourceConnector.TOPIC_CONFIG, topicConfig)
.build();
}
@Override
protected ConfigurationValidation validateConnectorConfiguration() {
if (isFieldValueNotSet(testFilePath)) {
return ConfigurationValidation.notValid("testFilePath is not set");
}
if (isFieldValueNotSet(topicConfig)) {
return ConfigurationValidation.notValid("topicConfig is not set");
}
return ConfigurationValidation.valid();
}
@Override
public String getConnectorDatabaseType() {
return "file";
}
@Override
protected Class<?> configureConnectorClass() {
return FileStreamSourceConnector.class;
}
public Path getTestFilePath() {
return testFilePath;
}
public void setTestFilePath(Path testFilePath) {
this.testFilePath = testFilePath;
}
public String getTopicConfig() {
return topicConfig;
}
public void setTopicConfig(String topicConfig) {
this.topicConfig = topicConfig;
}
}
| FileConnectorEmbeddedDebeziumConfiguration |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java | {
"start": 1454,
"end": 3472
} | class ____ extends TransportMasterNodeAction<CreateSnapshotRequest, CreateSnapshotResponse> {
public static final ActionType<CreateSnapshotResponse> TYPE = new ActionType<>("cluster:admin/snapshot/create");
private final SnapshotsService snapshotsService;
private final ProjectResolver projectResolver;
@Inject
public TransportCreateSnapshotAction(
TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool,
SnapshotsService snapshotsService,
ActionFilters actionFilters,
ProjectResolver projectResolver
) {
super(
TYPE.name(),
transportService,
clusterService,
threadPool,
actionFilters,
CreateSnapshotRequest::new,
CreateSnapshotResponse::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.snapshotsService = snapshotsService;
this.projectResolver = projectResolver;
}
@Override
protected ClusterBlockException checkBlock(CreateSnapshotRequest request, ClusterState state) {
// We only check metadata block, as we want to snapshot closed indices (which have a read block)
return state.blocks().globalBlockedException(projectResolver.getProjectId(), ClusterBlockLevel.METADATA_READ);
}
@Override
protected void masterOperation(
Task task,
final CreateSnapshotRequest request,
ClusterState state,
final ActionListener<CreateSnapshotResponse> listener
) {
if (request.waitForCompletion()) {
snapshotsService.executeSnapshot(projectResolver.getProjectId(), request, listener.map(CreateSnapshotResponse::new));
} else {
snapshotsService.createSnapshot(
projectResolver.getProjectId(),
request,
listener.map(snapshot -> new CreateSnapshotResponse((SnapshotInfo) null))
);
}
}
}
| TransportCreateSnapshotAction |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/MultiVariableDeclarationTest.java | {
"start": 3295,
"end": 3780
} | class ____ {
{
int a = 1;
}
int x = 1;
int y = 2;
{
int a = 1;
}
}
""")
.doTest(TEXT_MATCH);
}
@Test
public void positiveCinit() {
BugCheckerRefactoringTestHelper.newInstance(MultiVariableDeclaration.class, getClass())
.addInputLines(
"in/A.java",
"""
package a;
public | A |
java | quarkusio__quarkus | devtools/cli-common/src/main/java/io/quarkus/cli/common/BuildToolContext.java | {
"start": 432,
"end": 1816
} | class ____ {
private final Path projectRoot;
private final RunModeOption runModeOption;
private final BuildOptions buildOptions;
private final PropertiesOptions propertiesOptions;
private final List<String> forcedExtensions;
private final List<String> params;
public BuildToolContext(Path projectRoot, RunModeOption runModeOption, BuildOptions buildOptions,
PropertiesOptions propertiesOptions, List<String> forcedExtensions, List<String> params) {
this.projectRoot = projectRoot;
this.runModeOption = runModeOption;
this.buildOptions = buildOptions;
this.propertiesOptions = propertiesOptions;
this.forcedExtensions = forcedExtensions;
this.params = params;
}
public Path getProjectRoot() {
return projectRoot;
}
public RunModeOption getRunModeOption() {
return runModeOption;
}
public BuildOptions getBuildOptions() {
return buildOptions;
}
public PropertiesOptions getPropertiesOptions() {
return propertiesOptions;
}
public List<String> getForcedExtensions() {
return forcedExtensions;
}
public List<String> getParams() {
return params;
}
public BuildTool getBuildTool() {
return QuarkusProjectHelper.detectExistingBuildTool(projectRoot); // nullable
}
}
| BuildToolContext |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/insertordering/InsertOrderingSelfReferenceTest.java | {
"start": 3448,
"end": 3848
} | class ____ extends Parameter {
@ManyToOne(fetch = FetchType.LAZY)
InputParameter parent;
@OneToMany(cascade = CascadeType.ALL, orphanRemoval = true, mappedBy = "parent")
@SortNatural
@SQLRestriction("TYPE = 'INPUT'")
@Fetch(FetchMode.SUBSELECT)
List<InputParameter> children = new ArrayList<>();
}
@Entity(name = "OutputParameter")
@DiscriminatorValue("OUTPUT")
static | InputParameter |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.