language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/request/completion/GoogleAiStudioCompletionRequestEntityTests.java | {
"start": 848,
"end": 1888
} | class ____ extends ESTestCase {
public void testToXContent_WritesSingleMessage() throws IOException {
var entity = new GoogleAiStudioCompletionRequestEntity(List.of("input"));
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
entity.toXContent(builder, null);
String xContentResult = Strings.toString(builder);
assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString("""
{
"contents": [
{
"parts": [
{
"text":"input"
}
],
"role": "user"
}
],
"generationConfig": {
"candidateCount": 1
}
}"""));
}
}
| GoogleAiStudioCompletionRequestEntityTests |
java | quarkusio__quarkus | extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/MultiplePersistenceProviderResolver.java | {
"start": 251,
"end": 974
} | class ____ implements PersistenceProviderResolver {
private final List<PersistenceProvider> persistenceProviders = new ArrayList<>();
public MultiplePersistenceProviderResolver(PersistenceProvider... persistenceProviders) {
this.persistenceProviders.addAll(List.of(persistenceProviders));
}
@Override
public List<PersistenceProvider> getPersistenceProviders() {
return Collections.unmodifiableList(persistenceProviders);
}
public void addPersistenceProvider(PersistenceProvider persistenceProvider) {
persistenceProviders.add(persistenceProvider);
}
@Override
public void clearCachedProviders() {
// done!
}
}
| MultiplePersistenceProviderResolver |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsResponse.java | {
"start": 1227,
"end": 4269
} | class ____ implements Writeable {
private final String index;
private final String id;
private final Exception cause;
public Failure(String index, String id, Exception cause) {
this.index = index;
this.id = id;
this.cause = cause;
}
public Failure(StreamInput in) throws IOException {
index = in.readString();
if (in.getTransportVersion().before(TransportVersions.V_8_0_0)) {
// types no longer relevant so ignore
String type = in.readOptionalString();
if (type != null) {
throw new IllegalStateException("types are no longer supported but found [" + type + "]");
}
}
id = in.readString();
cause = in.readException();
}
/**
* The index name of the action.
*/
public String getIndex() {
return this.index;
}
/**
* The id of the action.
*/
public String getId() {
return id;
}
/**
* The failure cause.
*/
public Exception getCause() {
return this.cause;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(index);
if (out.getTransportVersion().before(TransportVersions.V_8_0_0)) {
// types not supported so send an empty array to previous versions
out.writeOptionalString(null);
}
out.writeString(id);
out.writeException(cause);
}
}
private final MultiTermVectorsItemResponse[] responses;
public MultiTermVectorsResponse(MultiTermVectorsItemResponse[] responses) {
this.responses = responses;
}
public MultiTermVectorsItemResponse[] getResponses() {
return this.responses;
}
@Override
public Iterator<MultiTermVectorsItemResponse> iterator() {
return Iterators.forArray(responses);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.startArray(Fields.DOCS);
for (MultiTermVectorsItemResponse response : responses) {
if (response.isFailed()) {
builder.startObject();
Failure failure = response.getFailure();
builder.field(Fields._INDEX, failure.getIndex());
builder.field(Fields._ID, failure.getId());
ElasticsearchException.generateFailureXContent(builder, params, failure.getCause(), true);
builder.endObject();
} else {
TermVectorsResponse getResponse = response.getResponse();
getResponse.toXContent(builder, params);
}
}
builder.endArray();
builder.endObject();
return builder;
}
static final | Failure |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java | {
"start": 27404,
"end": 27777
} | class ____ extends Scorable {
float score;
@Override
public float score() {
return score;
}
}
/**
* Query that allows to artificially simulate a timeout error thrown at different stages during the execution of the query.
* Used in combination with {@link MatchAllWeight}.
*/
private abstract static | Score |
java | elastic__elasticsearch | build-conventions/src/main/java/org/elasticsearch/gradle/internal/checkstyle/MissingJavadocTypeCheck.java | {
"start": 1868,
"end": 5499
} | class ____ extends AbstractCheck {
/**
* A key is pointing to the warning message text in "messages.properties"
* file.
*/
public static final String MSG_JAVADOC_MISSING = "javadoc.missing";
/** Specify the visibility scope where Javadoc comments are checked. */
private Scope scope = Scope.PUBLIC;
/** Specify the visibility scope where Javadoc comments are not checked. */
private Scope excludeScope;
/** Specify pattern for types to ignore. */
private Pattern ignorePattern = Pattern.compile("^$");
/**
* Specify the set of annotations that allow missed documentation.
* Only short names are allowed, e.g. {@code Generated}.
*/
private Set<String> skipAnnotations = Set.of("Generated");
/**
* Setter to specify the visibility scope where Javadoc comments are checked.
*
* @param scope a scope.
*/
public void setScope(Scope scope) {
this.scope = scope;
}
/**
* Setter to specify the visibility scope where Javadoc comments are not checked.
*
* @param excludeScope a scope.
*/
public void setExcludeScope(Scope excludeScope) {
this.excludeScope = excludeScope;
}
/**
* Setter to specify the list of annotations that allow missed documentation.
* Only short names are allowed, e.g. {@code Generated}.
*
* @param userAnnotations user's value.
*/
public void setSkipAnnotations(String... userAnnotations) {
skipAnnotations = Arrays.stream(userAnnotations).collect(Collectors.toSet());
}
/**
* Setter to specify pattern for types to ignore.
*
* @param pattern a pattern.
*/
public final void setIgnorePattern(Pattern pattern) {
ignorePattern = pattern;
}
@Override
public int[] getDefaultTokens() {
return getAcceptableTokens();
}
@Override
public int[] getAcceptableTokens() {
return new int[] {
TokenTypes.INTERFACE_DEF,
TokenTypes.CLASS_DEF,
TokenTypes.ENUM_DEF,
TokenTypes.ANNOTATION_DEF,
TokenTypes.RECORD_DEF };
}
@Override
public int[] getRequiredTokens() {
return CommonUtil.EMPTY_INT_ARRAY;
}
// suppress deprecation until https://github.com/checkstyle/checkstyle/issues/11166
@SuppressWarnings("deprecation")
@Override
public void visitToken(DetailAST ast) {
if (shouldCheck(ast)) {
final FileContents contents = getFileContents();
final int lineNo = ast.getLineNo();
final TextBlock textBlock = contents.getJavadocBefore(lineNo);
if (textBlock == null) {
log(ast, MSG_JAVADOC_MISSING);
}
}
}
/**
* Whether we should check this node.
*
* @param ast a given node.
* @return whether we should check a given node.
*/
private boolean shouldCheck(final DetailAST ast) {
final Scope customScope = ScopeUtil.getScope(ast);
final Scope surroundingScope = ScopeUtil.getSurroundingScope(ast);
final String outerTypeName = ast.findFirstToken(TokenTypes.IDENT).getText();
return customScope.isIn(scope)
&& (surroundingScope == null || surroundingScope.isIn(scope))
&& (excludeScope == null || !customScope.isIn(excludeScope) || surroundingScope != null && !surroundingScope.isIn(excludeScope))
&& !AnnotationUtil.containsAnnotation(ast, skipAnnotations)
&& ignorePattern.matcher(outerTypeName).find() == false;
}
}
| MissingJavadocTypeCheck |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/api/AssertNotEqualsAssertionsTests.java | {
"start": 19893,
"end": 20027
} | class ____ {
@Override
public boolean equals(Object obj) {
throw new NumberFormatException();
}
}
}
| EqualsThrowsExceptionClass |
java | quarkusio__quarkus | extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/health/KafkaHealthCheck.java | {
"start": 457,
"end": 1365
} | class ____ implements HealthCheck {
KafkaAdminClient kafkaAdminClient;
public KafkaHealthCheck(KafkaAdminClient kafkaAdminClient) {
this.kafkaAdminClient = kafkaAdminClient;
}
@Override
public HealthCheckResponse call() {
HealthCheckResponseBuilder builder = HealthCheckResponse.named("Kafka connection health check").up();
try {
StringBuilder nodes = new StringBuilder();
for (Node node : kafkaAdminClient.getCluster().nodes().get()) {
if (nodes.length() > 0) {
nodes.append(',');
}
nodes.append(node.host()).append(':').append(node.port());
}
return builder.withData("nodes", nodes.toString()).build();
} catch (Exception e) {
return builder.down().withData("reason", e.getMessage()).build();
}
}
}
| KafkaHealthCheck |
java | square__retrofit | samples/src/main/java/com/example/retrofit/AnnotatedConverters.java | {
"start": 3505,
"end": 3557
} | interface ____ {}
@Retention(RUNTIME)
public @ | Moshi |
java | apache__camel | components/camel-braintree/src/test/java/org/apache/camel/component/braintree/PaymentMethodGatewayIT.java | {
"start": 1680,
"end": 8208
} | class ____ extends AbstractBraintreeTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(PaymentMethodGatewayIT.class);
private static final String PATH_PREFIX = getApiNameAsString(PaymentMethodGatewayApiMethod.class);
private BraintreeGateway gateway;
private Customer customer;
private final List<String> paymentMethodsTokens;
// *************************************************************************
//
// *************************************************************************
public PaymentMethodGatewayIT() {
this.customer = null;
this.gateway = null;
this.paymentMethodsTokens = new LinkedList<>();
}
@Override
protected void doPostSetup() {
this.gateway = getGateway();
this.customer = gateway.customer().create(
new CustomerRequest()
.firstName("user")
.lastName(UUID.randomUUID().toString()))
.getTarget();
if (customer != null) {
LOG.info("Customer created - id={}", this.customer.getId());
}
}
@Override
public void doPostTearDown() {
if (this.gateway != null) {
for (String token : this.paymentMethodsTokens) {
if (this.gateway.paymentMethod().delete(token).isSuccess()) {
LOG.info("PaymentMethod deleted - token={}", token);
} else {
LOG.warn("Unable to delete PaymentMethod - token={}", token);
}
}
this.paymentMethodsTokens.clear();
if (this.gateway.customer().delete(this.customer.getId()).isSuccess()) {
LOG.info("Customer deleted - id={}", this.customer.getId());
} else {
LOG.warn("Unable to delete customer - id={}", this.customer.getId());
}
}
}
private PaymentMethod createPaymentMethod() {
Result<? extends PaymentMethod> result = this.gateway.paymentMethod().create(
new PaymentMethodRequest()
.customerId(this.customer.getId())
.paymentMethodNonce("fake-valid-payroll-nonce"));
assertNotNull(result, "create result");
assertTrue(result.isSuccess());
LOG.info("PaymentMethod created - token={}", result.getTarget().getToken());
return result.getTarget();
}
// *************************************************************************
//
// *************************************************************************
@Test
public void testCreate() {
assertNotNull(this.gateway, "BraintreeGateway can't be null");
assertNotNull(this.customer, "Customer can't be null");
final Result<PaymentMethod> result = requestBody("direct://CREATE",
new PaymentMethodRequest()
.customerId(this.customer.getId())
.paymentMethodNonce("fake-valid-payroll-nonce"),
Result.class);
assertNotNull(result, "create result");
assertTrue(result.isSuccess());
LOG.info("PaymentMethod created - token={}", result.getTarget().getToken());
this.paymentMethodsTokens.add(result.getTarget().getToken());
}
@Test
public void testDelete() {
assertNotNull(this.gateway, "BraintreeGateway can't be null");
assertNotNull(this.customer, "Customer can't be null");
final PaymentMethod paymentMethod = createPaymentMethod();
final Result<PaymentMethod> deleteResult = requestBody(
"direct://DELETE", paymentMethod.getToken(), Result.class);
assertNotNull(deleteResult, "create result");
assertTrue(deleteResult.isSuccess());
LOG.info("PaymentMethod deleted - token={}", paymentMethod.getToken());
}
@Test
public void testFind() {
assertNotNull(this.gateway, "BraintreeGateway can't be null");
assertNotNull(this.customer, "Customer can't be null");
final PaymentMethod paymentMethod = createPaymentMethod();
this.paymentMethodsTokens.add(paymentMethod.getToken());
final PaymentMethod method = requestBody(
"direct://FIND", paymentMethod.getToken(), PaymentMethod.class);
assertNotNull(method, "find result");
LOG.info("PaymentMethod found - token={}", method.getToken());
}
@Test
public void testUpdate() {
assertNotNull(this.gateway, "BraintreeGateway can't be null");
assertNotNull(this.customer, "Customer can't be null");
final PaymentMethod paymentMethod = createPaymentMethod();
this.paymentMethodsTokens.add(paymentMethod.getToken());
final Result<PaymentMethod> result = requestBodyAndHeaders(
"direct://UPDATE", null,
new BraintreeHeaderBuilder()
.add("token", paymentMethod.getToken())
.add("request", new PaymentMethodRequest()
.billingAddress()
.company("Apache")
.streetAddress("100 Maple Lane")
.done())
.build(),
Result.class);
assertNotNull(result, "update result");
assertTrue(result.isSuccess());
LOG.info("PaymentMethod updated - token={}", result.getTarget().getToken());
}
// *************************************************************************
// ROUTES
// *************************************************************************
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// test route for create
from("direct://CREATE")
.to("braintree://" + PATH_PREFIX + "/create?inBody=request");
// test route for delete
from("direct://DELETE")
.to("braintree://" + PATH_PREFIX + "/delete?inBody=token");
// test route for find
from("direct://FIND")
.to("braintree://" + PATH_PREFIX + "/find?inBody=token");
// test route for update
from("direct://UPDATE")
.to("braintree://" + PATH_PREFIX + "/update");
}
};
}
}
| PaymentMethodGatewayIT |
java | quarkusio__quarkus | extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeanBuildItem.java | {
"start": 1821,
"end": 2337
} | class ____ automatically registered as a resulting bean type.
*
* @param implClazz
* @return a new configurator instance
* @see ExtendedBeanConfigurator#done()
*/
public static ExtendedBeanConfigurator configure(DotName implClazz) {
return new ExtendedBeanConfigurator(implClazz).addType(implClazz);
}
/**
* Returns a configurator object allowing for further customization of the synthetic bean.
* <p>
* Unlike {@link #configure(Class)}, the implementation | is |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/client/BufferingClientHttpRequestWrapper.java | {
"start": 946,
"end": 1562
} | class ____ extends AbstractBufferingClientHttpRequest {
private final ClientHttpRequest request;
BufferingClientHttpRequestWrapper(ClientHttpRequest request) {
this.request = request;
}
@Override
public HttpMethod getMethod() {
return this.request.getMethod();
}
@Override
public URI getURI() {
return this.request.getURI();
}
@Override
protected ClientHttpResponse executeInternal(HttpHeaders headers, byte[] bufferedOutput) throws IOException {
this.request.getHeaders().putAll(headers);
return executeWithRequest(this.request, bufferedOutput, true);
}
}
| BufferingClientHttpRequestWrapper |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/enums/EnumAsMapKeySerializationTest.java | {
"start": 2184,
"end": 2480
} | class ____ extends ValueSerializer<Foo661> {
@Override
public void serialize(Foo661 value, JsonGenerator g, SerializationContext provider)
{
g.writeName("X-"+value.name());
}
}
}
// [databind#2129]
public | Serializer |
java | apache__dubbo | dubbo-spring-boot-project/dubbo-spring-boot-autoconfigure/src/main/java/org/apache/dubbo/spring/boot/autoconfigure/observability/DubboMicrometerTracingAutoConfiguration.java | {
"start": 2140,
"end": 2740
} | class ____ available starting from Boot 3.0. It's not available if you're using Boot < 3.0
*/
@ConditionalOnProperty(prefix = DUBBO_PREFIX, name = "enabled", matchIfMissing = true)
@ConditionalOnDubboTracingEnable
@ConditionalOnClass(
name = {
"io.micrometer.observation.Observation",
"io.micrometer.tracing.Tracer",
"io.micrometer.tracing.propagation.Propagator"
})
@AutoConfigureAfter(
name = "org.springframework.boot.actuate.autoconfigure.tracing.MicrometerTracingAutoConfiguration",
value = DubboAutoConfiguration.class)
public | is |
java | apache__flink | flink-kubernetes/src/test/java/org/apache/flink/kubernetes/kubeclient/TestingFlinkKubeClient.java | {
"start": 9034,
"end": 15813
} | class ____ {
private Function<KubernetesPod, CompletableFuture<Void>> createTaskManagerPodFunction =
(ignore) -> FutureUtils.completedVoidFuture();
private Function<String, CompletableFuture<Void>> stopPodFunction =
(ignore) -> FutureUtils.completedVoidFuture();
private Consumer<String> stopAndCleanupClusterConsumer = (ignore) -> {};
private Function<Map<String, String>, List<KubernetesPod>> getPodsWithLabelsFunction =
(ignore) -> Collections.emptyList();
private BiFunction<
Map<String, String>,
WatchCallbackHandler<KubernetesPod>,
CompletableFuture<KubernetesWatch>>
watchPodsAndDoCallbackFunction =
(ignore1, ignore2) ->
CompletableFuture.supplyAsync(MockKubernetesWatch::new);
private Function<KubernetesConfigMap, CompletableFuture<Void>> createConfigMapFunction =
(ignore) -> FutureUtils.completedVoidFuture();
private Function<String, Optional<KubernetesConfigMap>> getConfigMapFunction =
(ignore) -> Optional.empty();
private BiFunction<
String,
Function<KubernetesConfigMap, Optional<KubernetesConfigMap>>,
CompletableFuture<Boolean>>
checkAndUpdateConfigMapFunction =
(ignore1, ignore2) -> CompletableFuture.completedFuture(true);
private Function<String, CompletableFuture<Void>> deleteConfigMapFunction =
(ignore) -> FutureUtils.completedVoidFuture();
private Consumer<Void> closeConsumer = (ignore) -> {};
private BiFunction<
KubernetesLeaderElectionConfiguration,
KubernetesLeaderElector.LeaderCallbackHandler,
KubernetesLeaderElector>
createLeaderElectorFunction = TestingKubernetesLeaderElector::new;
private Function<String, KubernetesConfigMapSharedWatcher>
createConfigMapSharedWatcherFunction = TestingKubernetesConfigMapSharedWatcher::new;
private Builder() {}
public Builder setCreateTaskManagerPodFunction(
Function<KubernetesPod, CompletableFuture<Void>> createTaskManagerPodFunction) {
this.createTaskManagerPodFunction =
Preconditions.checkNotNull(createTaskManagerPodFunction);
return this;
}
public Builder setStopPodFunction(
Function<String, CompletableFuture<Void>> stopPodFunction) {
this.stopPodFunction = Preconditions.checkNotNull(stopPodFunction);
return this;
}
public Builder setStopAndCleanupClusterConsumer(
Consumer<String> stopAndCleanupClusterConsumer) {
this.stopAndCleanupClusterConsumer =
Preconditions.checkNotNull(stopAndCleanupClusterConsumer);
return this;
}
public Builder setGetPodsWithLabelsFunction(
Function<Map<String, String>, List<KubernetesPod>> getPodsWithLabelsFunction) {
this.getPodsWithLabelsFunction = Preconditions.checkNotNull(getPodsWithLabelsFunction);
return this;
}
public Builder setWatchPodsAndDoCallbackFunction(
BiFunction<
Map<String, String>,
WatchCallbackHandler<KubernetesPod>,
CompletableFuture<KubernetesWatch>>
watchPodsAndDoCallbackFunction) {
this.watchPodsAndDoCallbackFunction =
Preconditions.checkNotNull(watchPodsAndDoCallbackFunction);
return this;
}
public Builder setCreateConfigMapFunction(
Function<KubernetesConfigMap, CompletableFuture<Void>> createConfigMapFunction) {
this.createConfigMapFunction = createConfigMapFunction;
return this;
}
public Builder setGetConfigMapFunction(
Function<String, Optional<KubernetesConfigMap>> getConfigMapFunction) {
this.getConfigMapFunction = getConfigMapFunction;
return this;
}
public Builder setCheckAndUpdateConfigMapFunction(
BiFunction<
String,
Function<KubernetesConfigMap, Optional<KubernetesConfigMap>>,
CompletableFuture<Boolean>>
checkAndUpdateConfigMapFunction) {
this.checkAndUpdateConfigMapFunction = checkAndUpdateConfigMapFunction;
return this;
}
public Builder setDeleteConfigMapFunction(
Function<String, CompletableFuture<Void>> deleteConfigMapFunction) {
this.deleteConfigMapFunction = deleteConfigMapFunction;
return this;
}
public Builder setCloseConsumer(Consumer<Void> closeConsumer) {
this.closeConsumer = closeConsumer;
return this;
}
public Builder setCreateLeaderElectorFunction(
BiFunction<
KubernetesLeaderElectionConfiguration,
KubernetesLeaderElector.LeaderCallbackHandler,
KubernetesLeaderElector>
createLeaderElectorFunction) {
this.createLeaderElectorFunction = createLeaderElectorFunction;
return this;
}
public Builder setCreateConfigMapSharedWatcherFunction(
Function<String, KubernetesConfigMapSharedWatcher>
createConfigMapSharedWatcherFunction) {
this.createConfigMapSharedWatcherFunction = createConfigMapSharedWatcherFunction;
return this;
}
public TestingFlinkKubeClient build() {
return new TestingFlinkKubeClient(
createTaskManagerPodFunction,
stopPodFunction,
stopAndCleanupClusterConsumer,
getPodsWithLabelsFunction,
watchPodsAndDoCallbackFunction,
createConfigMapFunction,
getConfigMapFunction,
checkAndUpdateConfigMapFunction,
deleteConfigMapFunction,
closeConsumer,
createLeaderElectorFunction,
createConfigMapSharedWatcherFunction);
}
}
/** Testing implementation of {@link KubernetesWatch} and {@link Watch}. */
public static | Builder |
java | quarkusio__quarkus | integration-tests/spring-data-jpa/src/main/java/io/quarkus/it/spring/data/jpa/PersonRepositoryImpl.java | {
"start": 161,
"end": 614
} | class ____ implements PersonFragment, PersonFragment2 {
@PersistenceContext
EntityManager entityManager;
@Override
public List<Person> findAll() {
return entityManager.createQuery("SELECT p FROM Person p").getResultList();
}
@Override
public void makeNameUpperCase(Person person) {
person.setName(person.getName().toUpperCase());
}
@Override
public void doNothing() {
}
}
| PersonRepositoryImpl |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java | {
"start": 6436,
"end": 8484
} | class ____ {
private LeafCollector currentLeafCollector;
private TopDocsCollector<? extends ScoreDoc> tdc;
private long parentBucket;
private int matchedDocs;
PerParentBucketSamples(long parentBucket, Scorable scorer, AggregationExecutionContext aggCtx) {
try {
this.parentBucket = parentBucket;
// Add to CB based on the size and the implementations per-doc overhead
circuitBreakerConsumer.accept((long) shardSize * getPriorityQueueSlotSize());
tdc = createTopDocsCollector(shardSize);
currentLeafCollector = tdc.getLeafCollector(aggCtx.getLeafReaderContext());
setScorer(scorer);
} catch (IOException e) {
throw new ElasticsearchException("IO error creating collector", e);
}
}
public void getMatches(List<ScoreDoc> allDocs) {
TopDocs topDocs = tdc.topDocs();
ScoreDoc[] sd = topDocs.scoreDocs;
matchedDocs = sd.length;
for (ScoreDoc scoreDoc : sd) {
// A bit of a hack to (ab)use shardIndex property here to
// hold a bucket ID but avoids allocating extra data structures
// and users should have bigger concerns if bucket IDs
// exceed int capacity..
scoreDoc.shardIndex = (int) parentBucket;
}
allDocs.addAll(Arrays.asList(sd));
}
public void collect(int doc) throws IOException {
currentLeafCollector.collect(doc);
}
public void setScorer(Scorable scorer) throws IOException {
currentLeafCollector.setScorer(scorer);
}
public void changeSegment(AggregationExecutionContext aggCtx) throws IOException {
currentLeafCollector = tdc.getLeafCollector(aggCtx.getLeafReaderContext());
}
public int getDocCount() {
return matchedDocs;
}
}
| PerParentBucketSamples |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/aot/BeanRegistrationAotContributionTests.java | {
"start": 1186,
"end": 2720
} | class ____ {
@Test
void concatWithBothNullReturnsNull() {
assertThat(BeanRegistrationAotContribution.concat(null, null)).isNull();
}
@Test
void concatWithFirstNullReturnsSecondAsIs() {
BeanRegistrationAotContribution contribution = mock(BeanRegistrationAotContribution.class);
assertThat(BeanRegistrationAotContribution.concat(null, contribution)).isSameAs(contribution);
verifyNoInteractions(contribution);
}
@Test
void concatWithSecondNullReturnsFirstAsIs() {
BeanRegistrationAotContribution contribution = mock(BeanRegistrationAotContribution.class);
assertThat(BeanRegistrationAotContribution.concat(contribution, null)).isSameAs(contribution);
verifyNoInteractions(contribution);
}
@Test
void concatApplyContributionsInOrder() {
BeanRegistrationAotContribution first = mock(BeanRegistrationAotContribution.class);
BeanRegistrationAotContribution second = mock(BeanRegistrationAotContribution.class);
BeanRegistrationAotContribution combined = BeanRegistrationAotContribution.concat(first, second);
assertThat(combined).isNotNull();
TestGenerationContext generationContext = new TestGenerationContext();
BeanRegistrationCode beanRegistrationCode = new MockBeanRegistrationCode(generationContext);
combined.applyTo(generationContext, beanRegistrationCode);
InOrder ordered = inOrder(first, second);
ordered.verify(first).applyTo(generationContext, beanRegistrationCode);
ordered.verify(second).applyTo(generationContext, beanRegistrationCode);
}
}
| BeanRegistrationAotContributionTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/onetoone/hhh4851/Device.java | {
"start": 396,
"end": 889
} | class ____ extends Hardware {
private ManagedDevice managedDevice;
private String tag;
public Device() {
}
@OneToOne(fetch = FetchType.LAZY, mappedBy = "device")
public ManagedDevice getManagedDevice() {
return managedDevice;
}
@Column(unique = true, nullable = true)
public String getTag() {
return tag;
}
public void setManagedDevice(ManagedDevice logicalterminal) {
this.managedDevice = logicalterminal;
}
public void setTag(String tag) {
this.tag = tag;
}
}
| Device |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/single/SingleResumeNext.java | {
"start": 1069,
"end": 1683
} | class ____<T> extends Single<T> {
final SingleSource<? extends T> source;
final Function<? super Throwable, ? extends SingleSource<? extends T>> nextFunction;
public SingleResumeNext(SingleSource<? extends T> source,
Function<? super Throwable, ? extends SingleSource<? extends T>> nextFunction) {
this.source = source;
this.nextFunction = nextFunction;
}
@Override
protected void subscribeActual(final SingleObserver<? super T> observer) {
source.subscribe(new ResumeMainSingleObserver<>(observer, nextFunction));
}
static final | SingleResumeNext |
java | elastic__elasticsearch | libs/exponential-histogram/src/main/java/org/elasticsearch/exponentialhistogram/CompressedHistogramData.java | {
"start": 6828,
"end": 10947
} | class ____ {
private long currentIndex;
/**
* The count for the bucket this iterator is currently pointing at.
* A value of {@code -1} is used to represent that the end has been reached.
*/
private long currentCount;
private final AccessibleByteArrayStreamInput bucketsStreamInput;
private BucketsDecoder(int encodedBucketsStartOffset, int length) {
if (length > 0) {
bucketsStreamInput = new AccessibleByteArrayStreamInput(encodedData, encodedBucketsStartOffset, length);
currentIndex = bucketsStreamInput.readZLong() - 1;
currentCount = 0;
advance();
} else {
bucketsStreamInput = null;
// no data means we are iterating over an empty set of buckets
markEndReached();
}
}
private BucketsDecoder(BucketsDecoder toCopy) {
if (toCopy.bucketsStreamInput != null) {
int position = toCopy.bucketsStreamInput.getPosition();
bucketsStreamInput = new AccessibleByteArrayStreamInput(
encodedData,
position,
toCopy.bucketsStreamInput.getLimit() - position
);
} else {
bucketsStreamInput = null;
}
currentCount = toCopy.currentCount;
currentIndex = toCopy.currentIndex;
}
BucketsDecoder copy() {
return new BucketsDecoder(this);
}
private void markEndReached() {
currentCount = -1;
}
boolean hasNext() {
return currentCount != -1;
}
long peekCount() {
assert currentCount != -1 : "End has already been reached";
return currentCount;
}
long peekIndex() {
assert currentCount != -1 : "End has already been reached";
return currentIndex;
}
void advance() {
assert currentCount != -1 : "End has already been reached";
if (bucketsStreamInput.available() > 0) {
currentIndex++;
long countOrNumEmptyBuckets = bucketsStreamInput.readZLong();
if (countOrNumEmptyBuckets < 0) {
// we have encountered a negative value, this means we "skip"
// the given amount of empty buckets
long numEmptyBuckets = -countOrNumEmptyBuckets;
currentIndex += numEmptyBuckets;
// after we have skipped empty buckets, we know that the next value is a non-empty bucket
currentCount = bucketsStreamInput.readZLong();
} else {
currentCount = countOrNumEmptyBuckets;
}
assert currentCount > 0;
} else {
markEndReached();
}
}
private static void serializeBuckets(OutputStream out, BucketIterator buckets) throws IOException {
if (buckets.hasNext() == false) {
return; // no buckets, therefore nothing to write
}
long firstIndex = buckets.peekIndex();
writeZLong(firstIndex, out);
writeZLong(buckets.peekCount(), out);
buckets.advance();
long prevIndex = firstIndex;
while (buckets.hasNext()) {
long index = buckets.peekIndex();
long count = buckets.peekCount();
long indexDelta = index - prevIndex;
assert indexDelta > 0; // values must be sorted and unique
assert count > 0;
long numEmptyBucketsInBetween = indexDelta - 1;
if (numEmptyBucketsInBetween > 0) {
writeZLong(-numEmptyBucketsInBetween, out);
}
writeZLong(count, out);
buckets.advance();
prevIndex = index;
}
}
}
private static | BucketsDecoder |
java | spring-projects__spring-boot | cli/spring-boot-cli/src/test/java/org/springframework/boot/cli/command/OptionParsingCommandTests.java | {
"start": 1202,
"end": 1399
} | class ____ extends OptionParsingCommand {
TestOptionParsingCommand(String name, String description, OptionHandler handler) {
super(name, description, handler);
}
}
}
| TestOptionParsingCommand |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/HazelcastInstanceComponentBuilderFactory.java | {
"start": 5769,
"end": 6912
} | class ____
extends AbstractComponentBuilder<HazelcastInstanceComponent>
implements HazelcastInstanceComponentBuilder {
@Override
protected HazelcastInstanceComponent buildConcreteComponent() {
return new HazelcastInstanceComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "bridgeErrorHandler": ((HazelcastInstanceComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "autowiredEnabled": ((HazelcastInstanceComponent) component).setAutowiredEnabled((boolean) value); return true;
case "hazelcastInstance": ((HazelcastInstanceComponent) component).setHazelcastInstance((com.hazelcast.core.HazelcastInstance) value); return true;
case "hazelcastMode": ((HazelcastInstanceComponent) component).setHazelcastMode((java.lang.String) value); return true;
default: return false;
}
}
}
} | HazelcastInstanceComponentBuilderImpl |
java | quarkusio__quarkus | extensions/hibernate-search-orm-outbox-polling/runtime/src/main/java/io/quarkus/hibernate/search/orm/outboxpolling/runtime/HibernateSearchOutboxPollingRecorder.java | {
"start": 1845,
"end": 5748
} | class ____
implements HibernateOrmIntegrationStaticInitListener {
private final HibernateSearchOutboxPollingBuildTimeConfigPersistenceUnit buildTimeConfig;
private StaticInitListener(HibernateSearchOutboxPollingBuildTimeConfigPersistenceUnit buildTimeConfig) {
this.buildTimeConfig = buildTimeConfig;
}
@Override
public void onMetadataInitialized(Metadata metadata, BootstrapContext bootstrapContext,
BiConsumer<String, Object> propertyCollector) {
// Nothing to do
}
@Override
public void contributeBootProperties(BiConsumer<String, Object> propertyCollector) {
if (buildTimeConfig == null) {
return;
}
contributeCoordinationBuildTimeProperties(propertyCollector, buildTimeConfig.coordination());
}
private void contributeCoordinationBuildTimeProperties(BiConsumer<String, Object> propertyCollector,
HibernateSearchOutboxPollingBuildTimeConfigPersistenceUnit.CoordinationConfig config) {
HibernateSearchOutboxPollingConfigUtil.addCoordinationConfig(propertyCollector,
HibernateOrmMapperOutboxPollingSettings.CoordinationRadicals.ENTITY_MAPPING_AGENT_CATALOG,
config.entityMapping().agent().catalog());
HibernateSearchOutboxPollingConfigUtil.addCoordinationConfig(propertyCollector,
HibernateOrmMapperOutboxPollingSettings.CoordinationRadicals.ENTITY_MAPPING_AGENT_SCHEMA,
config.entityMapping().agent().schema());
HibernateSearchOutboxPollingConfigUtil.addCoordinationConfig(propertyCollector,
HibernateOrmMapperOutboxPollingSettings.CoordinationRadicals.ENTITY_MAPPING_AGENT_TABLE,
config.entityMapping().agent().table());
HibernateSearchOutboxPollingConfigUtil.addCoordinationConfig(propertyCollector,
HibernateOrmMapperOutboxPollingSettings.CoordinationRadicals.ENTITY_MAPPING_AGENT_UUID_GEN_STRATEGY,
config.entityMapping().agent().uuidGenStrategy());
HibernateSearchOutboxPollingConfigUtil.addCoordinationConfig(propertyCollector,
HibernateOrmMapperOutboxPollingSettings.CoordinationRadicals.ENTITY_MAPPING_AGENT_UUID_TYPE,
config.entityMapping().agent().uuidType());
HibernateSearchOutboxPollingConfigUtil.addCoordinationConfig(propertyCollector,
HibernateOrmMapperOutboxPollingSettings.CoordinationRadicals.ENTITY_MAPPING_OUTBOXEVENT_CATALOG,
config.entityMapping().outboxEvent().catalog());
HibernateSearchOutboxPollingConfigUtil.addCoordinationConfig(propertyCollector,
HibernateOrmMapperOutboxPollingSettings.CoordinationRadicals.ENTITY_MAPPING_OUTBOXEVENT_SCHEMA,
config.entityMapping().outboxEvent().schema());
HibernateSearchOutboxPollingConfigUtil.addCoordinationConfig(propertyCollector,
HibernateOrmMapperOutboxPollingSettings.CoordinationRadicals.ENTITY_MAPPING_OUTBOXEVENT_TABLE,
config.entityMapping().outboxEvent().table());
HibernateSearchOutboxPollingConfigUtil.addCoordinationConfig(propertyCollector,
HibernateOrmMapperOutboxPollingSettings.CoordinationRadicals.ENTITY_MAPPING_OUTBOXEVENT_UUID_GEN_STRATEGY,
config.entityMapping().outboxEvent().uuidGenStrategy());
HibernateSearchOutboxPollingConfigUtil.addCoordinationConfig(propertyCollector,
HibernateOrmMapperOutboxPollingSettings.CoordinationRadicals.ENTITY_MAPPING_OUTBOXEVENT_UUID_TYPE,
config.entityMapping().outboxEvent().uuidType());
}
}
private static final | StaticInitListener |
java | apache__maven | impl/maven-cli/src/main/java/org/apache/maven/cling/invoker/BaseParser.java | {
"start": 2861,
"end": 24060
} | class ____ {
public final ParserRequest parserRequest;
public final Map<String, String> systemPropertiesOverrides;
public LocalContext(ParserRequest parserRequest) {
this.parserRequest = parserRequest;
this.systemPropertiesOverrides = new HashMap<>();
}
public boolean parsingFailed = false;
public Path cwd;
public Path installationDirectory;
public Path userHomeDirectory;
public Map<String, String> systemProperties;
public Map<String, String> userProperties;
public Path topDirectory;
@Nullable
public Path rootDirectory;
@Nullable
public List<CoreExtensions> extensions;
@Nullable
public CIInfo ciInfo;
@Nullable
public Options options;
public Map<String, String> extraInterpolationSource() {
Map<String, String> extra = new HashMap<>();
extra.put("session.topDirectory", topDirectory.toString());
if (rootDirectory != null) {
extra.put("session.rootDirectory", rootDirectory.toString());
}
return extra;
}
}
@Override
public InvokerRequest parseInvocation(ParserRequest parserRequest) {
requireNonNull(parserRequest);
LocalContext context = new LocalContext(parserRequest);
// the basics
try {
context.cwd = getCwd(context);
} catch (Exception e) {
context.parsingFailed = true;
context.cwd = getCanonicalPath(Paths.get("."));
parserRequest.logger().error("Error determining working directory", e);
}
try {
context.installationDirectory = getInstallationDirectory(context);
} catch (Exception e) {
context.parsingFailed = true;
context.installationDirectory = context.cwd;
parserRequest.logger().error("Error determining installation directory", e);
}
try {
context.userHomeDirectory = getUserHomeDirectory(context);
} catch (Exception e) {
context.parsingFailed = true;
context.userHomeDirectory = context.cwd;
parserRequest.logger().error("Error determining user home directory", e);
}
// top/root
try {
context.topDirectory = getTopDirectory(context);
} catch (Exception e) {
context.parsingFailed = true;
context.topDirectory = context.cwd;
parserRequest.logger().error("Error determining top directory", e);
}
try {
context.rootDirectory = getRootDirectory(context);
} catch (Exception e) {
context.parsingFailed = true;
context.rootDirectory = context.cwd;
parserRequest.logger().error("Error determining root directory", e);
}
// options
try {
context.options = parseCliOptions(context);
} catch (Exception e) {
context.parsingFailed = true;
context.options = null;
parserRequest.logger().error("Error parsing program arguments", e);
}
// system and user properties
try {
context.systemProperties = populateSystemProperties(context);
} catch (Exception e) {
context.parsingFailed = true;
context.systemProperties = new HashMap<>();
parserRequest.logger().error("Error populating system properties", e);
}
try {
context.userProperties = populateUserProperties(context);
} catch (Exception e) {
context.parsingFailed = true;
context.userProperties = new HashMap<>();
parserRequest.logger().error("Error populating user properties", e);
}
// options: interpolate
if (context.options != null) {
context.options = context.options.interpolate(Interpolator.chain(
context.extraInterpolationSource()::get,
context.userProperties::get,
context.systemProperties::get));
}
// below we use effective properties as both system + user are present
// core extensions
try {
context.extensions = readCoreExtensionsDescriptor(context);
} catch (Exception e) {
context.parsingFailed = true;
parserRequest.logger().error("Error reading core extensions descriptor", e);
}
// CI detection
context.ciInfo = detectCI(context);
// only if not failed so far; otherwise we may have no options to validate
if (!context.parsingFailed) {
validate(context);
}
return getInvokerRequest(context);
}
protected void validate(LocalContext context) {
Options options = context.options;
options.failOnSeverity().ifPresent(severity -> {
String c = severity.toLowerCase(Locale.ENGLISH);
if (!Arrays.asList("warn", "warning", "error").contains(c)) {
context.parsingFailed = true;
context.parserRequest
.logger()
.error("Invalid fail on severity threshold '" + c
+ "'. Supported values are 'WARN', 'WARNING' and 'ERROR'.");
}
});
options.altUserSettings()
.ifPresent(userSettings ->
failIfFileNotExists(context, userSettings, "The specified user settings file does not exist"));
options.altProjectSettings()
.ifPresent(projectSettings -> failIfFileNotExists(
context, projectSettings, "The specified project settings file does not exist"));
options.altInstallationSettings()
.ifPresent(installationSettings -> failIfFileNotExists(
context, installationSettings, "The specified installation settings file does not exist"));
options.altUserToolchains()
.ifPresent(userToolchains -> failIfFileNotExists(
context, userToolchains, "The specified user toolchains file does not exist"));
options.altInstallationToolchains()
.ifPresent(installationToolchains -> failIfFileNotExists(
context, installationToolchains, "The specified installation toolchains file does not exist"));
options.color().ifPresent(color -> {
String c = color.toLowerCase(Locale.ENGLISH);
if (!Arrays.asList("always", "yes", "force", "never", "no", "none", "auto", "tty", "if-tty")
.contains(c)) {
context.parsingFailed = true;
context.parserRequest
.logger()
.error("Invalid color configuration value '" + c
+ "'. Supported values are 'auto', 'always', 'never'.");
}
});
}
protected void failIfFileNotExists(LocalContext context, String fileName, String message) {
Path path = context.cwd.resolve(fileName);
if (!Files.isRegularFile(path)) {
context.parsingFailed = true;
context.parserRequest.logger().error(message + ": " + path);
}
}
protected InvokerRequest getInvokerRequest(LocalContext context) {
return new BaseInvokerRequest(
context.parserRequest,
context.parsingFailed,
context.cwd,
context.installationDirectory,
context.userHomeDirectory,
context.userProperties,
context.systemProperties,
context.topDirectory,
context.rootDirectory,
context.extensions,
context.ciInfo,
context.options);
}
protected Path getCwd(LocalContext context) {
if (context.parserRequest.cwd() != null) {
Path result = getCanonicalPath(context.parserRequest.cwd());
context.systemPropertiesOverrides.put("user.dir", result.toString());
return result;
} else {
Path result = getCanonicalPath(Paths.get(System.getProperty("user.dir")));
mayOverrideDirectorySystemProperty(context, "user.dir", result);
return result;
}
}
protected Path getInstallationDirectory(LocalContext context) {
if (context.parserRequest.mavenHome() != null) {
Path result = getCanonicalPath(context.parserRequest.mavenHome());
context.systemPropertiesOverrides.put(Constants.MAVEN_HOME, result.toString());
return result;
} else {
String mavenHome = System.getProperty(Constants.MAVEN_HOME);
if (mavenHome == null) {
throw new IllegalStateException(
"local mode requires " + Constants.MAVEN_HOME + " Java System Property set");
}
Path result = getCanonicalPath(Paths.get(mavenHome));
mayOverrideDirectorySystemProperty(context, Constants.MAVEN_HOME, result);
return result;
}
}
protected Path getUserHomeDirectory(LocalContext context) {
if (context.parserRequest.userHome() != null) {
Path result = getCanonicalPath(context.parserRequest.userHome());
context.systemPropertiesOverrides.put("user.home", result.toString());
return result;
} else {
Path result = getCanonicalPath(Paths.get(System.getProperty("user.home")));
mayOverrideDirectorySystemProperty(context, "user.home", result);
return result;
}
}
/**
* This method is needed to "align" values used later on for interpolations and path calculations.
* We enforce "canonical" paths, so IF key and canonical path value disagree, let override it.
*/
protected void mayOverrideDirectorySystemProperty(LocalContext context, String javaSystemPropertyKey, Path value) {
String valueString = value.toString();
if (!Objects.equals(System.getProperty(javaSystemPropertyKey), valueString)) {
context.systemPropertiesOverrides.put(javaSystemPropertyKey, valueString);
}
}
protected Path getTopDirectory(LocalContext context) {
// We need to locate the top level project which may be pointed at using
// the -f/--file option.
Path topDirectory = requireNonNull(context.cwd);
boolean isAltFile = false;
for (String arg : context.parserRequest.args()) {
if (isAltFile) {
// this is the argument following -f/--file
Path path = topDirectory.resolve(stripLeadingAndTrailingQuotes(arg));
if (Files.isDirectory(path)) {
topDirectory = path;
} else if (Files.isRegularFile(path)) {
topDirectory = path.getParent();
if (!Files.isDirectory(topDirectory)) {
throw new IllegalArgumentException("Directory " + topDirectory
+ " extracted from the -f/--file command-line argument " + arg + " does not exist");
}
} else {
throw new IllegalArgumentException(
"POM file " + arg + " specified with the -f/--file command line argument does not exist");
}
break;
} else {
// Check if this is the -f/--file option
isAltFile = arg.equals("-f") || arg.equals("--file");
}
}
return getCanonicalPath(topDirectory);
}
@Nullable
protected Path getRootDirectory(LocalContext context) {
return CliUtils.findRoot(context.topDirectory);
}
protected Map<String, String> populateSystemProperties(LocalContext context) {
Properties systemProperties = new Properties();
// ----------------------------------------------------------------------
// Load environment and system properties
// ----------------------------------------------------------------------
EnvironmentUtils.addEnvVars(systemProperties);
SystemProperties.addSystemProperties(systemProperties);
systemProperties.putAll(context.systemPropertiesOverrides);
// ----------------------------------------------------------------------
// Properties containing info about the currently running version of Maven
// These override any corresponding properties set on the command line
// ----------------------------------------------------------------------
Properties buildProperties = CLIReportingUtils.getBuildProperties();
String mavenVersion = buildProperties.getProperty(CLIReportingUtils.BUILD_VERSION_PROPERTY);
systemProperties.setProperty(Constants.MAVEN_VERSION, mavenVersion);
boolean snapshot = mavenVersion.endsWith("SNAPSHOT");
if (snapshot) {
mavenVersion = mavenVersion.substring(0, mavenVersion.length() - "SNAPSHOT".length());
if (mavenVersion.endsWith("-")) {
mavenVersion = mavenVersion.substring(0, mavenVersion.length() - 1);
}
}
String[] versionElements = mavenVersion.split("\\.");
if (versionElements.length != 3) {
throw new IllegalStateException("Maven version is expected to have 3 segments: '" + mavenVersion + "'");
}
systemProperties.setProperty(Constants.MAVEN_VERSION_MAJOR, versionElements[0]);
systemProperties.setProperty(Constants.MAVEN_VERSION_MINOR, versionElements[1]);
systemProperties.setProperty(Constants.MAVEN_VERSION_PATCH, versionElements[2]);
systemProperties.setProperty(Constants.MAVEN_VERSION_SNAPSHOT, Boolean.toString(snapshot));
String mavenBuildVersion = CLIReportingUtils.createMavenVersionString(buildProperties);
systemProperties.setProperty(Constants.MAVEN_BUILD_VERSION, mavenBuildVersion);
Path mavenConf;
if (systemProperties.getProperty(Constants.MAVEN_INSTALLATION_CONF) != null) {
mavenConf = context.installationDirectory.resolve(
systemProperties.getProperty(Constants.MAVEN_INSTALLATION_CONF));
} else if (systemProperties.getProperty("maven.conf") != null) {
mavenConf = context.installationDirectory.resolve(systemProperties.getProperty("maven.conf"));
} else if (systemProperties.getProperty(Constants.MAVEN_HOME) != null) {
mavenConf = context.installationDirectory
.resolve(systemProperties.getProperty(Constants.MAVEN_HOME))
.resolve("conf");
} else {
mavenConf = context.installationDirectory.resolve("");
}
UnaryOperator<String> callback = or(
context.extraInterpolationSource()::get,
context.systemPropertiesOverrides::get,
systemProperties::getProperty);
Path propertiesFile = mavenConf.resolve("maven-system.properties");
try {
MavenPropertiesLoader.loadProperties(systemProperties, propertiesFile, callback, false);
} catch (IOException e) {
throw new IllegalStateException("Error loading properties from " + propertiesFile, e);
}
Map<String, String> result = toMap(systemProperties);
result.putAll(context.systemPropertiesOverrides);
return result;
}
protected Map<String, String> populateUserProperties(LocalContext context) {
Properties userProperties = new Properties();
Map<String, String> paths = context.extraInterpolationSource();
// ----------------------------------------------------------------------
// Options that are set on the command line become system properties
// and therefore are set in the session properties. System properties
// are most dominant.
// ----------------------------------------------------------------------
Map<String, String> userSpecifiedProperties = context.options != null
? new HashMap<>(context.options.userProperties().orElse(new HashMap<>()))
: new HashMap<>();
createInterpolator().interpolate(userSpecifiedProperties, paths::get);
// ----------------------------------------------------------------------
// Load config files
// ----------------------------------------------------------------------
UnaryOperator<String> callback =
or(paths::get, prefix("cli.", userSpecifiedProperties::get), context.systemProperties::get);
Path mavenConf;
if (context.systemProperties.get(Constants.MAVEN_INSTALLATION_CONF) != null) {
mavenConf = context.installationDirectory.resolve(
context.systemProperties.get(Constants.MAVEN_INSTALLATION_CONF));
} else if (context.systemProperties.get("maven.conf") != null) {
mavenConf = context.installationDirectory.resolve(context.systemProperties.get("maven.conf"));
} else if (context.systemProperties.get(Constants.MAVEN_HOME) != null) {
mavenConf = context.installationDirectory
.resolve(context.systemProperties.get(Constants.MAVEN_HOME))
.resolve("conf");
} else {
mavenConf = context.installationDirectory.resolve("");
}
Path propertiesFile = mavenConf.resolve("maven-user.properties");
try {
MavenPropertiesLoader.loadProperties(userProperties, propertiesFile, callback, false);
} catch (IOException e) {
throw new IllegalStateException("Error loading properties from " + propertiesFile, e);
}
// CLI specified properties are most dominant
userProperties.putAll(userSpecifiedProperties);
return toMap(userProperties);
}
protected abstract Options parseCliOptions(LocalContext context);
/**
* Important: This method must return list of {@link CoreExtensions} in precedence order.
*/
protected List<CoreExtensions> readCoreExtensionsDescriptor(LocalContext context) {
ArrayList<CoreExtensions> result = new ArrayList<>();
Path file;
List<CoreExtension> loaded;
Map<String, String> eff = new HashMap<>(context.systemProperties);
eff.putAll(context.userProperties);
// project
file = context.cwd.resolve(eff.get(Constants.MAVEN_PROJECT_EXTENSIONS));
loaded = readCoreExtensionsDescriptorFromFile(file, false);
if (!loaded.isEmpty()) {
result.add(new CoreExtensions(file, loaded));
}
// user
file = context.userHomeDirectory.resolve(eff.get(Constants.MAVEN_USER_EXTENSIONS));
loaded = readCoreExtensionsDescriptorFromFile(file, true);
if (!loaded.isEmpty()) {
result.add(new CoreExtensions(file, loaded));
}
// installation
file = context.installationDirectory.resolve(eff.get(Constants.MAVEN_INSTALLATION_EXTENSIONS));
loaded = readCoreExtensionsDescriptorFromFile(file, true);
if (!loaded.isEmpty()) {
result.add(new CoreExtensions(file, loaded));
}
return result.isEmpty() ? null : List.copyOf(result);
}
protected List<CoreExtension> readCoreExtensionsDescriptorFromFile(Path extensionsFile, boolean allowMetaVersions) {
try {
if (extensionsFile != null && Files.exists(extensionsFile)) {
try (InputStream is = Files.newInputStream(extensionsFile)) {
return validateCoreExtensionsDescriptorFromFile(
extensionsFile,
List.copyOf(new CoreExtensionsStaxReader()
.read(is, true, InputSource.of(extensionsFile.toString()))
.getExtensions()),
allowMetaVersions);
}
}
return List.of();
} catch (XMLStreamException | IOException e) {
throw new IllegalArgumentException("Failed to parse extensions file: " + extensionsFile, e);
}
}
protected List<CoreExtension> validateCoreExtensionsDescriptorFromFile(
Path extensionFile, List<CoreExtension> coreExtensions, boolean allowMetaVersions) {
Map<String, List<InputLocation>> gasLocations = new HashMap<>();
Map<String, List<InputLocation>> metaVersionLocations = new HashMap<>();
for (CoreExtension coreExtension : coreExtensions) {
String ga = coreExtension.getGroupId() + ":" + coreExtension.getArtifactId();
InputLocation location = coreExtension.getLocation("");
gasLocations.computeIfAbsent(ga, k -> new ArrayList<>()).add(location);
// TODO: metaversions could be extensible | LocalContext |
java | google__error-prone | core/src/main/java/com/google/errorprone/refaster/URepeated.java | {
"start": 2235,
"end": 2349
} | class ____ extends Bindings.Key<List<JCExpression>> {
public Key(String name) {
super(name);
}
}
}
| Key |
java | apache__camel | core/camel-main/src/main/java/org/apache/camel/main/IBMSecretsManagerVaultConfigurationProperties.java | {
"start": 1100,
"end": 2218
} | class ____ extends IBMSecretsManagerVaultConfiguration
implements BootstrapCloseable {
private MainConfigurationProperties parent;
public IBMSecretsManagerVaultConfigurationProperties(MainConfigurationProperties parent) {
this.parent = parent;
}
public MainConfigurationProperties end() {
return parent;
}
@Override
public void close() {
parent = null;
}
// getter and setters
// --------------------------------------------------------------
// these are inherited from the parent class
// fluent builders
// --------------------------------------------------------------
/**
* The IBM Secrets Manager Vault token
*/
public IBMSecretsManagerVaultConfigurationProperties withToken(String token) {
setToken(token);
return this;
}
/**
* The IBM Secrets Manager service url
*/
public IBMSecretsManagerVaultConfigurationProperties withServiceUrl(String serviceUrl) {
setServiceUrl(serviceUrl);
return this;
}
}
| IBMSecretsManagerVaultConfigurationProperties |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/AuditManagerS3A.java | {
"start": 1831,
"end": 3538
} | interface ____ extends Service,
AuditSpanSource<AuditSpanS3A>,
AWSAuditEventCallbacks,
ActiveThreadSpanSource<AuditSpanS3A> {
/**
* Get the auditor; valid once initialized.
* @return the auditor.
*/
OperationAuditor getAuditor();
/**
* Create the execution interceptor(s) for this audit service.
* The list returned is mutable; new interceptors may be added.
* @return list of interceptors for the SDK.
* @throws IOException failure.
*/
List<ExecutionInterceptor> createExecutionInterceptors() throws IOException;
/**
* Return a transfer callback which
* fixes the active span context to be that in which
* the transfer listener was created.
* This can be used to audit the creation of the multipart
* upload initiation request which the transfer manager
* makes when a file to be copied is split up.
* This must be invoked/used within the active span.
* @return a transfer listener.
*/
TransferListener createTransferListener();
/**
* Check for permission to access a path.
* The path is fully qualified and the status is the
* status of the path.
* This is called from the {@code FileSystem.access()} command
* and is a soft permission check used by Hive.
* @param path path to check
* @param status status of the path.
* @param mode access mode.
* @return true if access is allowed.
* @throws IOException failure
*/
boolean checkAccess(Path path, S3AFileStatus status, FsAction mode)
throws IOException;
/**
* Update audit flags, especially the out of span rejection option.
* @param flags audit flags.
*/
void setAuditFlags(EnumSet<AuditorFlags> flags);
}
| AuditManagerS3A |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/alterTable/MySqlAlterTableTest16.java | {
"start": 986,
"end": 2021
} | class ____ extends TestCase {
public void test_alter_first() throws Exception {
String sql = "alter table st_jklsxxb enable constraint FK_ST_xxx_REFERENCE_ST_xxx";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
String output = SQLUtils.toMySqlString(stmt);
assertEquals("ALTER TABLE st_jklsxxb" +
"\n\tENABLE CONSTRAINT FK_ST_xxx_REFERENCE_ST_xxx", output);
assertEquals(1, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
}
}
| MySqlAlterTableTest16 |
java | quarkusio__quarkus | extensions/smallrye-metrics/deployment/src/main/java/io/quarkus/smallrye/metrics/deployment/jandex/JandexBeanInfoAdapter.java | {
"start": 731,
"end": 3731
} | class ____ implements BeanInfoAdapter<ClassInfo> {
private static final DotName OBJECT = DotName.createSimple(Object.class.getName());
private final IndexView indexView;
private final TransformedAnnotationsBuildItem transformedAnnotations;
public JandexBeanInfoAdapter(IndexView indexView, TransformedAnnotationsBuildItem transformedAnnotations) {
this.indexView = indexView;
this.transformedAnnotations = transformedAnnotations;
}
@Override
public BeanInfo convert(ClassInfo input) {
BeanInfo superClassInfo = null;
DotName superName = input.superName();
if (superName != null && indexView.getClassByName(superName) != null && !superName.equals(OBJECT)) {
superClassInfo = this.convert(indexView.getClassByName(superName));
}
JandexAnnotationInfoAdapter annotationInfoAdapter = new JandexAnnotationInfoAdapter(indexView);
// add all class-level annotations, including inherited - SmallRye expects them here
List<AnnotationInfo> annotations = new ArrayList<>();
ClassInfo clazz = input;
while (clazz != null && clazz.superName() != null) {
List<AnnotationInfo> annotationsSuper = transformedAnnotations.getAnnotations(clazz)
.stream()
.filter(SmallRyeMetricsDotNames::isMetricAnnotation)
.map(annotationInfoAdapter::convert)
.collect(Collectors.toList());
annotations.addAll(annotationsSuper);
// a metric annotation can also be added through a CDI stereotype, so look into stereotypes
List<AnnotationInfo> annotationsThroughStereotypes = transformedAnnotations.getAnnotations(clazz)
.stream()
.flatMap(a -> getMetricAnnotationsThroughStereotype(a, indexView))
.collect(Collectors.toList());
annotations.addAll(annotationsThroughStereotypes);
clazz = indexView.getClassByName(clazz.superName());
}
return new RawBeanInfo(input.simpleName(),
input.name().prefix() == null ? "" : input.name().prefix().toString(),
annotations,
superClassInfo);
}
private Stream<AnnotationInfo> getMetricAnnotationsThroughStereotype(AnnotationInstance stereotypeInstance,
IndexView indexView) {
ClassInfo annotationType = indexView.getClassByName(stereotypeInstance.name());
if (annotationType != null && annotationType.declaredAnnotation(DotNames.STEREOTYPE) != null) {
JandexAnnotationInfoAdapter adapter = new JandexAnnotationInfoAdapter(indexView);
return transformedAnnotations.getAnnotations(annotationType)
.stream()
.filter(SmallRyeMetricsDotNames::isMetricAnnotation)
.map(adapter::convert);
} else {
return Stream.empty();
}
}
}
| JandexBeanInfoAdapter |
java | apache__hadoop | hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java | {
"start": 3219,
"end": 6679
} | class ____ implements Tool {
public static final int VERSION = 3;
private static final Logger LOG = LoggerFactory.getLogger(HadoopArchives.class);
private static final String NAME = "har";
private static final String ARCHIVE_NAME = "archiveName";
private static final String REPLICATION = "r";
private static final String PARENT_PATH = "p";
private static final String HELP = "help";
static final String SRC_LIST_LABEL = NAME + ".src.list";
static final String DST_DIR_LABEL = NAME + ".dest.path";
static final String TMP_DIR_LABEL = NAME + ".tmp.dir";
static final String JOB_DIR_LABEL = NAME + ".job.dir";
static final String SRC_COUNT_LABEL = NAME + ".src.count";
static final String TOTAL_SIZE_LABEL = NAME + ".total.size";
static final String DST_HAR_LABEL = NAME + ".archive.name";
static final String SRC_PARENT_LABEL = NAME + ".parent.path";
/** the size of the blocks that will be created when archiving **/
static final String HAR_BLOCKSIZE_LABEL = NAME + ".block.size";
/** the replication factor for the file in archiving. **/
static final String HAR_REPLICATION_LABEL = NAME + ".replication.factor";
/** the size of the part files that will be created when archiving **/
static final String HAR_PARTSIZE_LABEL = NAME + ".partfile.size";
/** size of each part file size **/
long partSize = 2 * 1024 * 1024 * 1024l;
/** size of blocks in hadoop archives **/
long blockSize = 512 * 1024 * 1024l;
/** the desired replication degree; default is 3 **/
short repl = 3;
private static final String usage = "archive"
+ " <-archiveName <NAME>.har> <-p <parent path>> [-r <replication factor>]" +
" <src>* <dest>" +
"\n";
private JobConf conf;
public void setConf(Configuration conf) {
if (conf instanceof JobConf) {
this.conf = (JobConf) conf;
} else {
this.conf = new JobConf(conf, HadoopArchives.class);
}
// This is for test purposes since MR2, different from Streaming
// here it is not possible to add a JAR to the classpath the tool
// will when running the mapreduce job.
String testJar = System.getProperty(TEST_HADOOP_ARCHIVES_JAR_PATH, null);
if (testJar != null) {
this.conf.setJar(testJar);
}
}
public Configuration getConf() {
return this.conf;
}
public HadoopArchives(Configuration conf) {
setConf(conf);
}
// check the src paths
private static void checkPaths(Configuration conf, List<Path> paths) throws
IOException {
for (Path p : paths) {
FileSystem fs = p.getFileSystem(conf);
fs.getFileStatus(p);
}
}
/**
* this assumes that there are two types of files file/dir
* @param fs the input filesystem
* @param fdir the filestatusdir of the path
* @param out the list of paths output of recursive ls
* @throws IOException
*/
private void recursivels(FileSystem fs, FileStatusDir fdir, List<FileStatusDir> out)
throws IOException {
if (fdir.getFileStatus().isFile()) {
out.add(fdir);
return;
}
else {
out.add(fdir);
FileStatus[] listStatus = fs.listStatus(fdir.getFileStatus().getPath());
fdir.setChildren(listStatus);
for (FileStatus stat: listStatus) {
FileStatusDir fstatDir = new FileStatusDir(stat, null);
recursivels(fs, fstatDir, out);
}
}
}
/** HarEntry is used in the {@link HArchivesMapper} as the input value. */
private static | HadoopArchives |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet2/HamletSpec.java | {
"start": 3902,
"end": 4313
} | enum ____ {
/**
*
*/
text,
/**
*
*/
password,
/**
*
*/
checkbox,
/**
*
*/
radio,
/**
*
*/
submit,
/**
*
*/
reset,
/**
*
*/
file,
/**
*
*/
hidden,
/**
*
*/
image,
/**
*
*/
button
};
/** Values for button types */
public | InputType |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/flogger/FloggerRequiredModifiersTest.java | {
"start": 894,
"end": 1298
} | class ____ {
private BugCheckerRefactoringTestHelper refactoringHelper() {
return BugCheckerRefactoringTestHelper.newInstance(FloggerRequiredModifiers.class, getClass());
}
@Test
public void negative() {
refactoringHelper()
.addInputLines(
"Holder.java",
"""
import com.google.common.flogger.FluentLogger;
| FloggerRequiredModifiersTest |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/annotation/ProcedureHint.java | {
"start": 4172,
"end": 4595
} | class ____ implements Procedure {
* @ProcedureHint(
* input = [@DataTypeHint("ROW<f BOOLEAN>")],
* isVarArgs = true
* )
* Integer[] call(Row... r) { ... }
*
* Integer[] call(boolean... b) { ... }
* }
* }</pre>
*
* @see DataTypeHint
* @see ArgumentHint
*/
@PublicEvolving
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.METHOD})
@Repeatable(ProcedureHints.class)
public @ | X |
java | apache__camel | components/camel-undertow/src/test/java/org/apache/camel/component/undertow/rest/RestUndertowHttpContextPathMatchGetTest.java | {
"start": 1066,
"end": 2596
} | class ____ extends BaseUndertowTest {
@Test
public void testProducerGet() {
String out = template.requestBody("undertow:http://localhost:{{port}}/users/123", null, String.class);
assertEquals("123;Donald Duck", out);
out = template.requestBody("undertow:http://localhost:{{port}}/users/list", null, String.class);
assertEquals("123;Donald Duck\n456;John Doe", out);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// configure to use undertow on localhost with the given port
restConfiguration().component("undertow").host("localhost").port(getPort());
// use the rest DSL to define the rest services
rest("/users/")
.get("{id}").to("direct:id")
.get("list").to("direct:list");
from("direct:id")
.to("mock:input")
.process(exchange -> {
String id = exchange.getIn().getHeader("id", String.class);
exchange.getMessage().setBody(id + ";Donald Duck");
});
from("direct:list")
.to("mock:input")
.process(exchange -> exchange.getMessage().setBody("123;Donald Duck\n456;John Doe"));
}
};
}
}
| RestUndertowHttpContextPathMatchGetTest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/memory/OpaqueMemoryResource.java | {
"start": 1353,
"end": 2582
} | class ____<T> implements AutoCloseable {
private final T resourceHandle;
private final long size;
private final ThrowingRunnable<Exception> disposer;
private final AtomicBoolean closed = new AtomicBoolean();
public OpaqueMemoryResource(T resourceHandle, long size, ThrowingRunnable<Exception> disposer) {
checkArgument(size >= 0, "size must be >= 0");
this.resourceHandle = checkNotNull(resourceHandle, "resourceHandle");
this.disposer = checkNotNull(disposer, "disposer");
this.size = size;
}
/** Gets the handle to the resource. */
public T getResourceHandle() {
return resourceHandle;
}
/** Gets the size, in bytes. */
public long getSize() {
return size;
}
/** Releases this resource. This method is idempotent. */
@Override
public void close() throws Exception {
if (closed.compareAndSet(false, true)) {
disposer.run();
}
}
@Override
public String toString() {
return "OpaqueMemoryResource ("
+ size
+ " bytes) @ "
+ resourceHandle
+ (closed.get() ? " (disposed)" : "");
}
}
| OpaqueMemoryResource |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/MonoPublishMulticastTest.java | {
"start": 1251,
"end": 6340
} | class ____ {
@Test
void normal() {
AtomicInteger i = new AtomicInteger();
Mono<Integer> m = Mono.fromCallable(i::incrementAndGet)
.publish(o -> o.flatMap(s -> Mono.just(2)));
StepVerifier.create(m)
.expectNoFusionSupport()
.expectNext(2)
.verifyComplete();
StepVerifier.create(m)
.expectNoFusionSupport()
.expectNext(2)
.verifyComplete();
}
@Test
void cancelComposes() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
TestPublisher<Integer> testPublisher = TestPublisher.create();
testPublisher.mono()
.publish(o -> Mono.<Integer>never())
.subscribe(ts);
testPublisher.assertNotCancelled()
.assertSubscribers();
ts.cancel();
testPublisher.assertNoSubscribers()
.assertCancelled();
}
@Test
void cancelComposes2() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
TestPublisher<Integer> testPublisher = TestPublisher.create();
testPublisher.mono()
.publish(o -> Mono.<Integer>empty())
.subscribe(ts);
testPublisher.assertCancelled()
.assertNoSubscribers();
}
@Test
void nullFunction() {
assertThatNullPointerException()
.isThrownBy(() -> Mono.just("Foo")
.publish(null))
.withMessage("transform");
}
@Test
void npeFunction() {
StepVerifier.create(Mono.just("Foo")
.publish(m -> null))
.expectErrorSatisfies(e -> assertThat(e)
.isInstanceOf(NullPointerException.class)
.hasMessage("The transform returned a null Mono"))
.verify();
}
@Test
void failingFunction() {
RuntimeException expected = new IllegalStateException("boom");
StepVerifier.create(Mono.just("Foo")
.publish(m -> {
throw expected;
}))
.expectErrorSatisfies(e -> assertThat(e).isSameAs(expected))
.verify();
}
@Test
void syncCancelBeforeComplete() {
assertThat(Mono.just(Mono.just(1).publish(v -> v)).flatMapMany(v -> v).blockLast()).isEqualTo(1);
}
@Test
void normalCancelBeforeComplete() {
assertThat(Mono.just(Mono.just(1).hide().publish(v -> v)).flatMapMany(v -> v).blockLast()).isEqualTo(1);
}
//see https://github.com/reactor/reactor-core/issues/2600
@Test
void errorPropagated() {
final String errorMessage = "Error in Mono";
final Mono<Object> source = Mono.error(new RuntimeException(errorMessage));
final Mono<Object> published = source.publish(coordinator -> coordinator.flatMap(Mono::just));
StepVerifier.create(published)
.expectNoFusionSupport()
.expectErrorMessage(errorMessage)
.verify();
StepVerifier.create(published, StepVerifierOptions.create().scenarioName("second shared invocation"))
.expectNoFusionSupport()
.expectErrorMessage(errorMessage)
.verify();
}
@Test
void scanMulticaster() {
MonoPublishMulticast.MonoPublishMulticaster<Integer> test =
new MonoPublishMulticast.MonoPublishMulticaster<>(Context.empty());
Subscription parent = Operators.emptySubscription();
test.onSubscribe(parent);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent);
assertThat(test.scan(Scannable.Attr.PREFETCH)).isEqualTo(1);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
assertThat(test.scan(Scannable.Attr.BUFFERED)).isEqualTo(0);
test.value = 1;
assertThat(test.scan(Scannable.Attr.BUFFERED)).isEqualTo(1);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
assertThat(test.scan(Scannable.Attr.ERROR)).isNull();
test.error = new IllegalArgumentException("boom");
assertThat(test.scan(Scannable.Attr.ERROR)).isSameAs(test.error);
test.onComplete();
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.terminate();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue();
}
@Test
void scanMulticastInner() {
CoreSubscriber<Integer> actual = new LambdaSubscriber<>(null, e -> {}, null, null);
MonoPublishMulticast.MonoPublishMulticaster<Integer> parent =
new MonoPublishMulticast.MonoPublishMulticaster<>(Context.empty());
MonoPublishMulticast.PublishMulticastInner<Integer> test =
new MonoPublishMulticast.PublishMulticastInner<>(parent, actual);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
test.request(789);
//does not track request in the Mono version
assertThat(test.scan(Scannable.Attr.REQUESTED_FROM_DOWNSTREAM)).isEqualTo(0);
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.cancel();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue();
}
}
| MonoPublishMulticastTest |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/AnnotationUtilsTests.java | {
"start": 53180,
"end": 53456
} | interface ____ {
@MyRepeatable("A")
@MyRepeatableContainer({@MyRepeatable("B"), @MyRepeatable("C")})
@MyRepeatableMeta1
void foo();
}
@MyRepeatable("A")
@MyRepeatableContainer({@MyRepeatable("B"), @MyRepeatable("C")})
@MyRepeatableMeta1
static | InterfaceWithRepeated |
java | quarkusio__quarkus | integration-tests/gradle/src/test/java/io/quarkus/gradle/CustomConfigSourcesTest.java | {
"start": 130,
"end": 667
} | class ____ extends QuarkusGradleWrapperTestBase {
@Test
public void testCustomConfigSources() throws Exception {
var projectDir = getProjectDir("custom-config-sources");
// The test is successful, if the build works, see https://github.com/quarkusio/quarkus/issues/36716
runGradleWrapper(projectDir, "clean", "build", "--no-build-cache");
var p = projectDir.toPath().resolve("build").resolve("quarkus-app").resolve("quarkus-run.jar");
assertThat(p).exists();
}
}
| CustomConfigSourcesTest |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java | {
"start": 22884,
"end": 26156
} | class ____ {
int start;
long timestamp;
final int end;
final Buffer buffer;
Slice(Buffer buffer, int start, int end) {
this.buffer = buffer;
this.start = start;
this.end = end;
this.timestamp = buffer.timestamps.get(start);
}
boolean exhausted() {
return start >= end;
}
int next() {
int index = start++;
if (start < end) {
timestamp = buffer.timestamps.get(start);
}
return index;
}
}
@Override
public void evaluateFinal(Block[] blocks, int offset, IntVector selected, GroupingAggregatorEvaluationContext evalContext) {
BlockFactory blockFactory = driverContext.blockFactory();
int positionCount = selected.getPositionCount();
try (var rates = blockFactory.newDoubleBlockBuilder(positionCount)) {
for (int p = 0; p < positionCount; p++) {
int group = selected.getInt(p);
var state = flushAndCombineState(group);
if (state == null || state.samples < 2) {
rates.appendNull();
continue;
}
// combine intervals for the final evaluation
Interval[] intervals = state.intervals;
ArrayUtil.timSort(intervals);
for (int i = 1; i < intervals.length; i++) {
Interval next = intervals[i - 1]; // reversed
Interval prev = intervals[i];
if (prev.v1 > next.v2) {
state.resets += prev.v1;
}
}
final double rate;
if (evalContext instanceof TimeSeriesGroupingAggregatorEvaluationContext tsContext) {
rate = extrapolateRate(state, tsContext.rangeStartInMillis(group), tsContext.rangeEndInMillis(group), isRateOverTime);
} else {
rate = computeRateWithoutExtrapolate(state, isRateOverTime);
}
rates.appendDouble(rate);
}
blocks[offset] = rates.build();
}
}
ReducedState flushAndCombineState(int groupId) {
ReducedState state = groupId < reducedStates.size() ? reducedStates.getAndSet(groupId, null) : null;
Buffer buffer = groupId < buffers.size() ? buffers.getAndSet(groupId, null) : null;
if (buffer != null) {
try (buffer) {
if (state == null) {
state = new ReducedState();
}
buffer.flush(state);
}
}
return state;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
record Interval(long t1, double v1, long t2, double v2) implements Comparable<Interval> {
@Override
public int compareTo(Interval other) {
return Long.compare(other.t1, t1); // want most recent first
}
}
static final | Slice |
java | elastic__elasticsearch | modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/AcceptChannelHandler.java | {
"start": 900,
"end": 1866
} | class ____ extends AbstractRemoteAddressFilter<InetSocketAddress> {
private final BiPredicate<String, InetSocketAddress> predicate;
private final String profile;
private final ThreadContext threadContext;
public AcceptChannelHandler(
final BiPredicate<String, InetSocketAddress> predicate,
final String profile,
final ThreadContext threadContext
) {
this.predicate = predicate;
this.profile = profile;
this.threadContext = threadContext;
}
@Override
protected boolean accept(final ChannelHandlerContext ctx, final InetSocketAddress remoteAddress) throws Exception {
// this prevents thread-context changes to propagate beyond the channel accept test, as netty worker threads are reused
try (ThreadContext.StoredContext ignore = threadContext.newStoredContext()) {
return predicate.test(profile, remoteAddress);
}
}
public | AcceptChannelHandler |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/ttl/TtlMergingStateTestContext.java | {
"start": 3413,
"end": 4090
} | class ____<
S extends InternalMergingState<?, String, ?, ?, GV>, UV extends Number, GV>
extends TtlMergingStateTestContext<S, UV, GV> {
@Override
UV generateRandomUpdate() {
return (UV) (Integer) RANDOM.nextInt(1000);
}
int getIntegerMergeResult(
List<Tuple2<String, UV>> unexpiredUpdatesToMerge,
List<Tuple2<String, UV>> finalUpdatesToMerge) {
return unexpiredUpdatesToMerge.stream().mapToInt(t -> (Integer) t.f1).sum()
+ finalUpdatesToMerge.stream().mapToInt(t -> (Integer) t.f1).sum();
}
}
}
| TtlIntegerMergingStateTestContext |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/RowData.java | {
"start": 1604,
"end": 2287
} | interface ____ an internal data structure representing data of {@link RowType} and other
* (possibly nested) structured types such as {@link StructuredType} in the table ecosystem.
*
* <p>All top-level records that are travelling through Table API or SQL pipelines during runtime
* are instances of this interface. Each {@link RowData} contains a {@link RowKind} which represents
* the kind of change that a row describes in a changelog. The {@link RowKind} is just metadata
* information of row and thus not part of the table's schema, i.e., not a dedicated field.
*
* <p>Note: All fields of this data structure must be internal data structures.
*
* <p>The {@link RowData} | for |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RLocalCachedMapCacheReactive.java | {
"start": 962,
"end": 1075
} | interface ____<K, V> extends RMapCacheReactive<K, V>, RLocalCachedMapReactive<K, V> {
}
| RLocalCachedMapCacheReactive |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/PolicyConfig.java | {
"start": 1740,
"end": 1903
} | class ____ declare exactly one constructor
* that accepts permission name (`String`) or permission name and actions (`String`, `String[]`).
* Permission | must |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/metrics/TestRBFMetrics.java | {
"start": 2251,
"end": 17081
} | class ____ extends TestMetricsBase {
public static final String FEDERATION_BEAN =
"Hadoop:service=Router,name=FederationState";
public static final String ROUTER_BEAN =
"Hadoop:service=Router,name=Router";
@Test
public void testClusterStatsJMX()
throws MalformedObjectNameException, IOException {
FederationMBean federationBean = getBean(FEDERATION_BEAN,
FederationMBean.class);
validateClusterStatsFederationBean(federationBean);
testCapacity(federationBean);
RouterMBean routerBean = getBean(ROUTER_BEAN, RouterMBean.class);
validateClusterStatsRouterBean(routerBean);
}
@Test
public void testClusterStatsDataSource() throws IOException {
RBFMetrics metrics = getRouter().getMetrics();
validateClusterStatsFederationBean(metrics);
validateClusterStatsRouterBean(metrics);
}
@Test
public void testMountTableStatsDataSource()
throws IOException, JSONException {
RBFMetrics metrics = getRouter().getMetrics();
String jsonString = metrics.getMountTable();
JSONArray jsonArray = new JSONArray(jsonString);
assertEquals(jsonArray.length(), getMockMountTable().size());
int match = 0;
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject json = jsonArray.getJSONObject(i);
String src = json.getString("sourcePath");
for (MountTable entry : getMockMountTable()) {
if (entry.getSourcePath().equals(src)) {
assertEquals(entry.getDefaultLocation().getNameserviceId(),
json.getString("nameserviceId"));
assertEquals(entry.getDefaultLocation().getDest(),
json.getString("path"));
assertEquals(entry.getOwnerName(), json.getString("ownerName"));
assertEquals(entry.getGroupName(), json.getString("groupName"));
assertEquals(entry.getMode().toString(), json.getString("mode"));
assertEquals(entry.getQuota().toString(), json.getString("quota"));
assertNotNullAndNotEmpty(json.getString("dateCreated"));
assertNotNullAndNotEmpty(json.getString("dateModified"));
match++;
}
}
}
assertEquals(match, getMockMountTable().size());
}
private MembershipState findMockNamenode(String nsId, String nnId) {
@SuppressWarnings("unchecked")
List<MembershipState> namenodes =
ListUtils.union(getActiveMemberships(), getStandbyMemberships());
for (MembershipState nn : namenodes) {
if (nn.getNamenodeId().equals(nnId)
&& nn.getNameserviceId().equals(nsId)) {
return nn;
}
}
return null;
}
@Test
public void testNamenodeStatsDataSource() throws IOException, JSONException {
RBFMetrics metrics = getRouter().getMetrics();
String jsonString = metrics.getNamenodes();
JSONObject jsonObject = new JSONObject(jsonString);
Iterator<?> keys = jsonObject.keys();
int nnsFound = 0;
while (keys.hasNext()) {
// Validate each entry against our mocks
JSONObject json = jsonObject.getJSONObject((String) keys.next());
String nameserviceId = json.getString("nameserviceId");
String namenodeId = json.getString("namenodeId");
MembershipState mockEntry =
this.findMockNamenode(nameserviceId, namenodeId);
assertNotNull(mockEntry);
assertEquals(json.getString("state"), mockEntry.getState().toString());
MembershipStats stats = mockEntry.getStats();
assertEquals(json.getLong("numOfActiveDatanodes"),
stats.getNumOfActiveDatanodes());
assertEquals(json.getLong("numOfDeadDatanodes"),
stats.getNumOfDeadDatanodes());
assertEquals(json.getLong("numOfStaleDatanodes"),
stats.getNumOfStaleDatanodes());
assertEquals(json.getLong("numOfDecommissioningDatanodes"),
stats.getNumOfDecommissioningDatanodes());
assertEquals(json.getLong("numOfDecomActiveDatanodes"),
stats.getNumOfDecomActiveDatanodes());
assertEquals(json.getLong("numOfDecomDeadDatanodes"),
stats.getNumOfDecomDeadDatanodes());
assertEquals(json.getLong("numOfInMaintenanceLiveDataNodes"),
stats.getNumOfInMaintenanceLiveDataNodes());
assertEquals(json.getLong("numOfInMaintenanceDeadDataNodes"),
stats.getNumOfInMaintenanceDeadDataNodes());
assertEquals(json.getLong("numOfEnteringMaintenanceDataNodes"),
stats.getNumOfEnteringMaintenanceDataNodes());
assertEquals(json.getLong("numOfBlocks"), stats.getNumOfBlocks());
assertEquals(json.getString("rpcAddress"), mockEntry.getRpcAddress());
assertEquals(json.getString("webScheme"), mockEntry.getWebScheme());
assertEquals(json.getString("webAddress"), mockEntry.getWebAddress());
nnsFound++;
}
// Validate all memberships are present
assertEquals(getActiveMemberships().size() + getStandbyMemberships().size(),
nnsFound);
}
@Test
public void testNameserviceStatsDataSource()
throws IOException, JSONException {
RBFMetrics metrics = getRouter().getMetrics();
String jsonString = metrics.getNameservices();
JSONObject jsonObject = new JSONObject(jsonString);
Iterator<?> keys = jsonObject.keys();
int nameservicesFound = 0;
while (keys.hasNext()) {
JSONObject json = jsonObject.getJSONObject((String) keys.next());
String nameserviceId = json.getString("nameserviceId");
String namenodeId = json.getString("namenodeId");
MembershipState mockEntry =
this.findMockNamenode(nameserviceId, namenodeId);
assertNotNull(mockEntry);
// NS should report the active NN
assertEquals(mockEntry.getState().toString(), json.getString("state"));
assertEquals("ACTIVE", json.getString("state"));
// Stats in the NS should reflect the stats for the most active NN
MembershipStats stats = mockEntry.getStats();
assertEquals(stats.getNumOfFiles(), json.getLong("numOfFiles"));
assertEquals(stats.getTotalSpace(), json.getLong("totalSpace"));
assertEquals(stats.getAvailableSpace(),
json.getLong("availableSpace"));
assertEquals(stats.getNumOfBlocksMissing(),
json.getLong("numOfBlocksMissing"));
assertEquals(stats.getNumOfActiveDatanodes(),
json.getLong("numOfActiveDatanodes"));
assertEquals(stats.getNumOfDeadDatanodes(),
json.getLong("numOfDeadDatanodes"));
assertEquals(stats.getNumOfStaleDatanodes(),
json.getLong("numOfStaleDatanodes"));
assertEquals(stats.getNumOfDecommissioningDatanodes(),
json.getLong("numOfDecommissioningDatanodes"));
assertEquals(stats.getNumOfDecomActiveDatanodes(),
json.getLong("numOfDecomActiveDatanodes"));
assertEquals(stats.getNumOfDecomDeadDatanodes(),
json.getLong("numOfDecomDeadDatanodes"));
assertEquals(stats.getNumOfInMaintenanceLiveDataNodes(),
json.getLong("numOfInMaintenanceLiveDataNodes"));
assertEquals(stats.getNumOfInMaintenanceDeadDataNodes(),
json.getLong("numOfInMaintenanceDeadDataNodes"));
assertEquals(stats.getNumOfStaleDatanodes(),
json.getLong("numOfEnteringMaintenanceDataNodes"));
assertEquals(stats.getProvidedSpace(),
json.getLong("providedSpace"));
assertEquals(stats.getPendingSPSPaths(),
json.getInt("pendingSPSPaths"));
nameservicesFound++;
}
assertEquals(getNameservices().size(), nameservicesFound);
}
@Test
public void testRouterStatsDataSource() throws IOException, JSONException {
RBFMetrics metrics = getRouter().getMetrics();
String jsonString = metrics.getRouters();
JSONObject jsonObject = new JSONObject(jsonString);
Iterator<?> keys = jsonObject.keys();
int routersFound = 0;
while (keys.hasNext()) {
JSONObject json = jsonObject.getJSONObject((String) keys.next());
String address = json.getString("address");
assertNotNullAndNotEmpty(address);
RouterState router = findMockRouter(address);
assertNotNull(router);
assertEquals(router.getStatus().toString(), json.getString("status"));
assertEquals(router.getCompileInfo(), json.getString("compileInfo"));
assertEquals(router.getVersion(), json.getString("version"));
assertEquals(router.getDateStarted(), json.getLong("dateStarted"));
assertEquals(router.getDateCreated(), json.getLong("dateCreated"));
assertEquals(router.getDateModified(), json.getLong("dateModified"));
StateStoreVersion version = router.getStateStoreVersion();
assertEquals(
RBFMetrics.getDateString(version.getMembershipVersion()),
json.get("lastMembershipUpdate"));
assertEquals(
RBFMetrics.getDateString(version.getMountTableVersion()),
json.get("lastMountTableUpdate"));
assertEquals(version.getMembershipVersion(),
json.get("membershipVersion"));
assertEquals(version.getMountTableVersion(),
json.get("mountTableVersion"));
routersFound++;
}
assertEquals(getMockRouters().size(), routersFound);
}
private void assertNotNullAndNotEmpty(String field) {
assertNotNull(field);
assertTrue(field.length() > 0);
}
private RouterState findMockRouter(String routerId) {
for (RouterState router : getMockRouters()) {
if (router.getAddress().equals(routerId)) {
return router;
}
}
return null;
}
private void validateClusterStatsFederationBean(FederationMBean bean) {
// Determine aggregates
long numBlocks = 0;
long numLive = 0;
long numDead = 0;
long numStale = 0;
long numDecom = 0;
long numDecomLive = 0;
long numDecomDead = 0;
long numInMaintenanceLive = 0;
long numInMaintenanceDead = 0;
long numEnteringMaintenance = 0;
int numCorruptsFilesCount = 0;
long scheduledReplicationBlocks = 0;
long numberOfMissingBlocksWithReplicationFactorOne = 0;
long numberOfBadlyDistributedBlocks = 0;
long highestPriorityLowRedundancyReplicatedBlocks = 0;
long highestPriorityLowRedundancyECBlocks = 0;
long numFiles = 0;
int pendingSPSPaths = 0;
for (MembershipState mock : getActiveMemberships()) {
MembershipStats stats = mock.getStats();
numBlocks += stats.getNumOfBlocks();
numLive += stats.getNumOfActiveDatanodes();
numDead += stats.getNumOfDeadDatanodes();
numStale += stats.getNumOfStaleDatanodes();
numDecom += stats.getNumOfDecommissioningDatanodes();
numDecomLive += stats.getNumOfDecomActiveDatanodes();
numDecomDead += stats.getNumOfDecomDeadDatanodes();
numInMaintenanceLive += stats.getNumOfInMaintenanceLiveDataNodes();
numInMaintenanceDead += stats.getNumOfInMaintenanceLiveDataNodes();
numEnteringMaintenance += stats.getNumOfEnteringMaintenanceDataNodes();
numCorruptsFilesCount += stats.getCorruptFilesCount();
scheduledReplicationBlocks += stats.getScheduledReplicationBlocks();
numberOfMissingBlocksWithReplicationFactorOne +=
stats.getNumberOfMissingBlocksWithReplicationFactorOne();
numberOfBadlyDistributedBlocks += stats.getNumberOfBadlyDistributedBlocks();
highestPriorityLowRedundancyReplicatedBlocks +=
stats.getHighestPriorityLowRedundancyReplicatedBlocks();
highestPriorityLowRedundancyECBlocks +=
stats.getHighestPriorityLowRedundancyECBlocks();
pendingSPSPaths += stats.getPendingSPSPaths();
}
assertEquals(numBlocks, bean.getNumBlocks());
assertEquals(numLive, bean.getNumLiveNodes());
assertEquals(numDead, bean.getNumDeadNodes());
assertEquals(numStale, bean.getNumStaleNodes());
assertEquals(numDecom, bean.getNumDecommissioningNodes());
assertEquals(numDecomLive, bean.getNumDecomLiveNodes());
assertEquals(numDecomDead, bean.getNumDecomDeadNodes());
assertEquals(numInMaintenanceLive, bean.getNumInMaintenanceLiveDataNodes());
assertEquals(numInMaintenanceDead, bean.getNumInMaintenanceDeadDataNodes());
assertEquals(numEnteringMaintenance,
bean.getNumEnteringMaintenanceDataNodes());
assertEquals(numFiles, bean.getNumFiles());
assertEquals(getActiveMemberships().size() + getStandbyMemberships().size(),
bean.getNumNamenodes());
assertEquals(getNameservices().size(), bean.getNumNameservices());
assertEquals(numCorruptsFilesCount, bean.getCorruptFilesCount());
assertEquals(scheduledReplicationBlocks,
bean.getScheduledReplicationBlocks());
assertEquals(numberOfMissingBlocksWithReplicationFactorOne,
bean.getNumberOfMissingBlocksWithReplicationFactorOne());
assertEquals(numberOfBadlyDistributedBlocks,
bean.getNumberOfBadlyDistributedBlocks());
assertEquals(highestPriorityLowRedundancyReplicatedBlocks,
bean.getHighestPriorityLowRedundancyReplicatedBlocks());
assertEquals(highestPriorityLowRedundancyECBlocks,
bean.getHighestPriorityLowRedundancyECBlocks());
assertEquals(pendingSPSPaths, bean.getPendingSPSPaths());
}
private void validateClusterStatsRouterBean(RouterMBean bean) {
assertTrue(bean.getVersion().length() > 0);
assertTrue(bean.getCompiledDate().length() > 0);
assertTrue(bean.getCompileInfo().length() > 0);
assertTrue(bean.getRouterStarted().length() > 0);
assertTrue(bean.getHostAndPort().length() > 0);
assertFalse(bean.isSecurityEnabled());
}
private void testCapacity(FederationMBean bean) throws IOException {
List<MembershipState> memberships = getActiveMemberships();
assertTrue(memberships.size() > 1);
BigInteger availableCapacity = BigInteger.valueOf(0);
BigInteger totalCapacity = BigInteger.valueOf(0);
BigInteger unitCapacity = BigInteger.valueOf(Long.MAX_VALUE);
for (MembershipState mock : memberships) {
MembershipStats stats = mock.getStats();
stats.setTotalSpace(Long.MAX_VALUE);
stats.setAvailableSpace(Long.MAX_VALUE);
// reset stats to make the new value persistent
mock.setStats(stats);
// write back the new namenode information to state store
assertTrue(refreshNamenodeRegistration(
NamenodeHeartbeatRequest.newInstance(mock)));
totalCapacity = totalCapacity.add(unitCapacity);
availableCapacity = availableCapacity.add(unitCapacity);
}
// for local cache update
assertEquals(totalCapacity, bean.getTotalCapacityBigInt());
// not equal since overflow happened.
assertNotEquals(totalCapacity, BigInteger.valueOf(bean.getTotalCapacity()));
assertEquals(availableCapacity, bean.getRemainingCapacityBigInt());
// not equal since overflow happened.
assertNotEquals(availableCapacity,
BigInteger.valueOf(bean.getRemainingCapacity()));
}
}
| TestRBFMetrics |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/proxy/FetchGraphTest.java | {
"start": 27397,
"end": 27738
} | class ____ extends BaseEntity {
private Integer b1;
private String b2;
public Integer getB1() {
return b1;
}
public void setB1(Integer b1) {
this.b1 = b1;
}
public String getB2() {
return b2;
}
public void setB2(String b2) {
this.b2 = b2;
}
}
@Entity(name = "C")
@Table(name = "C")
public static | BEntity |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/kstream/internals/KTableAggregateTest.java | {
"start": 19041,
"end": 19265
} | class ____ {
private final String x;
public NoEqualsImpl(final String x) {
this.x = x;
}
public String getX() {
return x;
}
}
private static | NoEqualsImpl |
java | grpc__grpc-java | gcp-observability/src/main/java/io/grpc/gcp/observability/interceptors/LogHelper.java | {
"start": 1942,
"end": 11518
} | class ____ {
private static final Logger logger = Logger.getLogger(LogHelper.class.getName());
// TODO(DNVindhya): Define it in one places(TBD) to make it easily accessible from everywhere
static final Metadata.Key<byte[]> STATUS_DETAILS_KEY =
Metadata.Key.of(
"grpc-status-details-bin",
Metadata.BINARY_BYTE_MARSHALLER);
private final Sink sink;
/**
* Creates a LogHelper instance.
* @param sink sink
*
*/
public LogHelper(Sink sink) {
this.sink = sink;
}
/**
* Logs the request header. Binary logging equivalent of logClientHeader.
*/
void logClientHeader(
long seqId,
String serviceName,
String methodName,
String authority,
@Nullable Duration timeout,
Metadata metadata,
int maxHeaderBytes,
GrpcLogRecord.EventLogger eventLogger,
String callId,
// null on client side
@Nullable SocketAddress peerAddress,
SpanContext spanContext) {
checkNotNull(serviceName, "serviceName");
checkNotNull(methodName, "methodName");
checkNotNull(authority, "authority");
checkNotNull(callId, "callId");
checkArgument(
peerAddress == null || eventLogger == GrpcLogRecord.EventLogger.SERVER,
"peerAddress can only be specified by server");
PayloadBuilderHelper<Payload.Builder> pair =
createMetadataProto(metadata, maxHeaderBytes);
if (timeout != null) {
pair.payloadBuilder.setTimeout(timeout);
}
GrpcLogRecord.Builder logEntryBuilder = GrpcLogRecord.newBuilder()
.setSequenceId(seqId)
.setServiceName(serviceName)
.setMethodName(methodName)
.setAuthority(authority)
.setType(EventType.CLIENT_HEADER)
.setLogger(eventLogger)
.setPayload(pair.payloadBuilder)
.setPayloadTruncated(pair.truncated)
.setCallId(callId);
if (peerAddress != null) {
logEntryBuilder.setPeer(socketAddressToProto(peerAddress));
}
sink.write(logEntryBuilder.build(), spanContext);
}
/**
* Logs the response header. Binary logging equivalent of logServerHeader.
*/
void logServerHeader(
long seqId,
String serviceName,
String methodName,
String authority,
Metadata metadata,
int maxHeaderBytes,
GrpcLogRecord.EventLogger eventLogger,
String callId,
@Nullable SocketAddress peerAddress,
SpanContext spanContext) {
checkNotNull(serviceName, "serviceName");
checkNotNull(methodName, "methodName");
checkNotNull(authority, "authority");
checkNotNull(callId, "callId");
// Logging peer address only on the first incoming event. On server side, peer address will
// of logging request header
checkArgument(
peerAddress == null || eventLogger == GrpcLogRecord.EventLogger.CLIENT,
"peerAddress can only be specified for client");
PayloadBuilderHelper<Payload.Builder> pair =
createMetadataProto(metadata, maxHeaderBytes);
GrpcLogRecord.Builder logEntryBuilder = GrpcLogRecord.newBuilder()
.setSequenceId(seqId)
.setServiceName(serviceName)
.setMethodName(methodName)
.setAuthority(authority)
.setType(EventType.SERVER_HEADER)
.setLogger(eventLogger)
.setPayload(pair.payloadBuilder)
.setPayloadTruncated(pair.truncated)
.setCallId(callId);
if (peerAddress != null) {
logEntryBuilder.setPeer(socketAddressToProto(peerAddress));
}
sink.write(logEntryBuilder.build(), spanContext);
}
/**
* Logs the server trailer.
*/
void logTrailer(
long seqId,
String serviceName,
String methodName,
String authority,
Status status,
Metadata metadata,
int maxHeaderBytes,
GrpcLogRecord.EventLogger eventLogger,
String callId,
@Nullable SocketAddress peerAddress,
SpanContext spanContext) {
checkNotNull(serviceName, "serviceName");
checkNotNull(methodName, "methodName");
checkNotNull(authority, "authority");
checkNotNull(status, "status");
checkNotNull(callId, "callId");
checkArgument(
peerAddress == null || eventLogger == GrpcLogRecord.EventLogger.CLIENT,
"peerAddress can only be specified for client");
PayloadBuilderHelper<Payload.Builder> pair =
createMetadataProto(metadata, maxHeaderBytes);
pair.payloadBuilder.setStatusCode(Code.forNumber(status.getCode().value()));
String statusDescription = status.getDescription();
if (statusDescription != null) {
pair.payloadBuilder.setStatusMessage(statusDescription);
}
byte[] statusDetailBytes = metadata.get(STATUS_DETAILS_KEY);
if (statusDetailBytes != null) {
pair.payloadBuilder.setStatusDetails(ByteString.copyFrom(statusDetailBytes));
}
GrpcLogRecord.Builder logEntryBuilder = GrpcLogRecord.newBuilder()
.setSequenceId(seqId)
.setServiceName(serviceName)
.setMethodName(methodName)
.setAuthority(authority)
.setType(EventType.SERVER_TRAILER)
.setLogger(eventLogger)
.setPayload(pair.payloadBuilder)
.setPayloadTruncated(pair.truncated)
.setCallId(callId);
if (peerAddress != null) {
logEntryBuilder.setPeer(socketAddressToProto(peerAddress));
}
sink.write(logEntryBuilder.build(), spanContext);
}
/**
* Logs the RPC message.
*/
<T> void logRpcMessage(
long seqId,
String serviceName,
String methodName,
String authority,
EventType eventType,
T message,
int maxMessageBytes,
EventLogger eventLogger,
String callId,
SpanContext spanContext) {
checkNotNull(serviceName, "serviceName");
checkNotNull(methodName, "methodName");
checkNotNull(authority, "authority");
checkNotNull(callId, "callId");
checkArgument(
eventType == EventType.CLIENT_MESSAGE
|| eventType == EventType.SERVER_MESSAGE,
"event type must correspond to client message or server message");
checkNotNull(message, "message");
// TODO(DNVindhya): Implement conversion of generics to ByteString
// Following is a temporary workaround to log if message is of following types :
// 1. com.google.protobuf.Message
// 2. byte[]
byte[] messageBytesArray = null;
if (message instanceof com.google.protobuf.Message) {
messageBytesArray = ((com.google.protobuf.Message) message).toByteArray();
} else if (message instanceof byte[]) {
messageBytesArray = (byte[]) message;
} else {
logger.log(Level.WARNING, "message is of UNKNOWN type, message and payload_size fields "
+ "of GrpcLogRecord proto will not be logged");
}
PayloadBuilderHelper<Payload.Builder> pair = null;
if (messageBytesArray != null) {
pair = createMessageProto(messageBytesArray, maxMessageBytes);
}
GrpcLogRecord.Builder logEntryBuilder = GrpcLogRecord.newBuilder()
.setSequenceId(seqId)
.setServiceName(serviceName)
.setMethodName(methodName)
.setAuthority(authority)
.setType(eventType)
.setLogger(eventLogger)
.setCallId(callId);
if (pair != null) {
logEntryBuilder.setPayload(pair.payloadBuilder)
.setPayloadTruncated(pair.truncated);
}
sink.write(logEntryBuilder.build(), spanContext);
}
/**
* Logs half close.
*/
void logHalfClose(
long seqId,
String serviceName,
String methodName,
String authority,
GrpcLogRecord.EventLogger eventLogger,
String callId,
SpanContext spanContext) {
checkNotNull(serviceName, "serviceName");
checkNotNull(methodName, "methodName");
checkNotNull(authority, "authority");
checkNotNull(callId, "callId");
GrpcLogRecord.Builder logEntryBuilder = GrpcLogRecord.newBuilder()
.setSequenceId(seqId)
.setServiceName(serviceName)
.setMethodName(methodName)
.setAuthority(authority)
.setType(EventType.CLIENT_HALF_CLOSE)
.setLogger(eventLogger)
.setCallId(callId);
sink.write(logEntryBuilder.build(), spanContext);
}
/**
* Logs cancellation.
*/
void logCancel(
long seqId,
String serviceName,
String methodName,
String authority,
GrpcLogRecord.EventLogger eventLogger,
String callId,
SpanContext spanContext) {
checkNotNull(serviceName, "serviceName");
checkNotNull(methodName, "methodName");
checkNotNull(authority, "authority");
checkNotNull(callId, "callId");
GrpcLogRecord.Builder logEntryBuilder = GrpcLogRecord.newBuilder()
.setSequenceId(seqId)
.setServiceName(serviceName)
.setMethodName(methodName)
.setAuthority(authority)
.setType(EventType.CANCEL)
.setLogger(eventLogger)
.setCallId(callId);
sink.write(logEntryBuilder.build(), spanContext);
}
// TODO(DNVindhya): Evaluate if we need following clause for metadata logging in GcpObservability
// Leaving the implementation for now as is to have same behavior across Java and Go
private static final Set<String> NEVER_INCLUDED_METADATA = new HashSet<>(
Collections.singletonList(
// grpc-status-details-bin is already logged in `status_details` field of the
// observabilitylog proto
STATUS_DETAILS_KEY.name()));
private static final Set<String> ALWAYS_INCLUDED_METADATA = new HashSet<>(
Collections.singletonList(
"grpc-trace-bin"));
static final | LogHelper |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/engine/CamelPostProcessorHelperTest.java | {
"start": 33489,
"end": 33689
} | class ____ {
@BeanInject
public FooBar foo;
public String doSomething(String body) {
return foo.hello(body);
}
}
public static | MyBeanInjectByTypeBean |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java | {
"start": 316,
"end": 1820
} | class ____ {
static final String INFERENCE_ID = "inference_id";
static final String TASK_TYPE_OR_INFERENCE_ID = "task_type_or_id";
static final String TASK_TYPE = "task_type";
public static final String INFERENCE_CCM_PATH = "_inference/_ccm";
static final String INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}";
static final String TASK_TYPE_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/{" + INFERENCE_ID + "}";
static final String INFERENCE_DIAGNOSTICS_PATH = "_inference/.diagnostics";
static final String TASK_TYPE_INFERENCE_ID_UPDATE_PATH = "_inference/{"
+ TASK_TYPE_OR_INFERENCE_ID
+ "}/{"
+ INFERENCE_ID
+ "}/_update";
static final String INFERENCE_ID_UPDATE_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/_update";
static final String INFERENCE_SERVICES_PATH = "_inference/_services";
static final String TASK_TYPE_INFERENCE_SERVICES_PATH = "_inference/_services/{" + TASK_TYPE + "}";
public static final String STREAM_SUFFIX = "_stream";
static final String STREAM_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/" + STREAM_SUFFIX;
static final String STREAM_TASK_TYPE_INFERENCE_ID_PATH = "_inference/{"
+ TASK_TYPE_OR_INFERENCE_ID
+ "}/{"
+ INFERENCE_ID
+ "}/"
+ STREAM_SUFFIX;
public static final String RETURN_MINIMAL_CONFIG = "return_minimal_config";
private Paths() {
}
}
| Paths |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/xslt/InvalidXsltFileTest.java | {
"start": 1242,
"end": 1973
} | class ____ extends TestSupport {
@Test
public void testInvalidStylesheet() throws Exception {
RouteBuilder builder = createRouteBuilder();
CamelContext context = new DefaultCamelContext();
context.addRoutes(builder);
RuntimeCamelException exception = assertThrows(RuntimeCamelException.class, context::start);
assertIsInstanceOf(TransformerConfigurationException.class, exception.getCause().getCause().getCause());
}
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("seda:a").to("xslt:org/apache/camel/component/xslt/invalid.xsl");
}
};
}
}
| InvalidXsltFileTest |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/server/authentication/session/InMemoryReactiveSessionRegistryTests.java | {
"start": 1251,
"end": 4802
} | class ____ {
InMemoryReactiveSessionRegistry sessionRegistry = new InMemoryReactiveSessionRegistry();
Instant now = LocalDate.of(2023, 11, 21).atStartOfDay().toInstant(ZoneOffset.UTC);
@Test
void saveWhenPrincipalThenRegisterPrincipalSession() {
Authentication authentication = TestAuthentication.authenticatedUser();
ReactiveSessionInformation sessionInformation = new ReactiveSessionInformation(authentication.getPrincipal(),
"1234", this.now);
this.sessionRegistry.saveSessionInformation(sessionInformation).block();
List<ReactiveSessionInformation> principalSessions = this.sessionRegistry
.getAllSessions(authentication.getPrincipal())
.collectList()
.block();
assertThat(principalSessions).hasSize(1);
assertThat(this.sessionRegistry.getSessionInformation("1234").block()).isNotNull();
}
@Test
void getAllSessionsWhenMultipleSessionsThenReturnAll() {
Authentication authentication = TestAuthentication.authenticatedUser();
ReactiveSessionInformation sessionInformation1 = new ReactiveSessionInformation(authentication.getPrincipal(),
"1234", this.now);
ReactiveSessionInformation sessionInformation2 = new ReactiveSessionInformation(authentication.getPrincipal(),
"4321", this.now);
ReactiveSessionInformation sessionInformation3 = new ReactiveSessionInformation(authentication.getPrincipal(),
"9876", this.now);
this.sessionRegistry.saveSessionInformation(sessionInformation1).block();
this.sessionRegistry.saveSessionInformation(sessionInformation2).block();
this.sessionRegistry.saveSessionInformation(sessionInformation3).block();
List<ReactiveSessionInformation> sessions = this.sessionRegistry.getAllSessions(authentication.getPrincipal())
.collectList()
.block();
assertThat(sessions).hasSize(3);
assertThat(this.sessionRegistry.getSessionInformation("1234").block()).isNotNull();
assertThat(this.sessionRegistry.getSessionInformation("4321").block()).isNotNull();
assertThat(this.sessionRegistry.getSessionInformation("9876").block()).isNotNull();
}
@Test
void removeSessionInformationThenSessionIsRemoved() {
Authentication authentication = TestAuthentication.authenticatedUser();
ReactiveSessionInformation sessionInformation = new ReactiveSessionInformation(authentication.getPrincipal(),
"1234", this.now);
this.sessionRegistry.saveSessionInformation(sessionInformation).block();
this.sessionRegistry.removeSessionInformation("1234").block();
List<ReactiveSessionInformation> sessions = this.sessionRegistry.getAllSessions(authentication.getName())
.collectList()
.block();
assertThat(this.sessionRegistry.getSessionInformation("1234").block()).isNull();
assertThat(sessions).isEmpty();
}
@Test
void updateLastAccessTimeThenUpdated() {
Authentication authentication = TestAuthentication.authenticatedUser();
ReactiveSessionInformation sessionInformation = new ReactiveSessionInformation(authentication.getPrincipal(),
"1234", this.now);
this.sessionRegistry.saveSessionInformation(sessionInformation).block();
ReactiveSessionInformation saved = this.sessionRegistry.getSessionInformation("1234").block();
assertThat(saved.getLastAccessTime()).isNotNull();
Instant lastAccessTimeBefore = saved.getLastAccessTime();
this.sessionRegistry.updateLastAccessTime("1234").block();
saved = this.sessionRegistry.getSessionInformation("1234").block();
assertThat(saved.getLastAccessTime()).isNotNull();
assertThat(saved.getLastAccessTime()).isAfter(lastAccessTimeBefore);
}
}
| InMemoryReactiveSessionRegistryTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/service/MasterService.java | {
"start": 70304,
"end": 74598
} | class ____<T extends ClusterStateTaskListener> implements MasterServiceTaskQueue<T> {
private final ConcurrentLinkedQueue<Entry<T>> queue = new ConcurrentLinkedQueue<>();
private final ConcurrentLinkedQueue<Entry<T>> executing = new ConcurrentLinkedQueue<>(); // executing tasks are also shown in APIs
private final AtomicInteger queueSize = new AtomicInteger();
private final String name;
private final BatchConsumer<T> batchConsumer;
private final LongSupplier insertionIndexSupplier;
private final PerPriorityQueue perPriorityQueue;
private final ClusterStateTaskExecutor<T> executor;
private final ThreadPool threadPool;
private final Batch processor = new Processor();
BatchingTaskQueue(
String name,
BatchConsumer<T> batchConsumer,
LongSupplier insertionIndexSupplier,
PerPriorityQueue perPriorityQueue,
ClusterStateTaskExecutor<T> executor,
ThreadPool threadPool
) {
this.name = name;
this.batchConsumer = batchConsumer;
this.insertionIndexSupplier = insertionIndexSupplier;
this.perPriorityQueue = perPriorityQueue;
this.executor = executor;
this.threadPool = threadPool;
}
@Override
public void submitTask(String source, T task, @Nullable TimeValue timeout) {
final var taskHolder = new AtomicReference<>(task);
final Scheduler.Cancellable timeoutCancellable;
if (timeout != null && timeout.millis() > 0) {
try {
timeoutCancellable = threadPool.schedule(
new TaskTimeoutHandler<>(timeout, source, taskHolder),
timeout,
threadPool.generic()
);
} catch (Exception e) {
assert e instanceof EsRejectedExecutionException esre && esre.isExecutorShutdown() : e;
task.onFailure(
new NotMasterException("could not schedule timeout handler for [%s][%s] on queue [%s]", e, source, task, name)
);
return;
}
} else {
timeoutCancellable = null;
}
queue.add(
new Entry<>(
source,
taskHolder,
insertionIndexSupplier.getAsLong(),
threadPool.relativeTimeInMillis(),
threadPool.getThreadContext().newRestorableContext(true),
timeoutCancellable
)
);
if (queueSize.getAndIncrement() == 0) {
perPriorityQueue.execute(processor);
}
}
@Override
public String toString() {
return "BatchingTaskQueue[" + name + "]";
}
private record Entry<T extends ClusterStateTaskListener>(
String source,
AtomicReference<T> taskHolder,
long insertionIndex,
long insertionTimeMillis,
Supplier<ThreadContext.StoredContext> storedContextSupplier,
@Nullable Scheduler.Cancellable timeoutCancellable
) {
T acquireForExecution() {
final var task = taskHolder.getAndSet(null);
if (task != null && timeoutCancellable != null) {
timeoutCancellable.cancel();
}
return task;
}
void onRejection(NotMasterException e) {
final var task = acquireForExecution();
if (task != null) {
try (var ignored = storedContextSupplier.get()) {
task.onFailure(e);
} catch (Exception e2) {
e2.addSuppressed(e);
logger.error(() -> format("exception failing task [%s] on rejection", task), e2);
assert false : e2;
}
}
}
boolean isPending() {
return taskHolder().get() != null;
}
}
private | BatchingTaskQueue |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/savedrequest/HttpSessionRequestCacheTests.java | {
"start": 1440,
"end": 7641
} | class ____ {
@Test
public void originalGetRequestDoesntMatchIncomingPost() {
HttpSessionRequestCache cache = new HttpSessionRequestCache();
MockHttpServletRequest request = new MockHttpServletRequest("GET", "/destination");
MockHttpServletResponse response = new MockHttpServletResponse();
cache.saveRequest(request, response);
assertThat(request.getSession().getAttribute(HttpSessionRequestCache.SAVED_REQUEST)).isNotNull();
assertThat(cache.getRequest(request, response)).isNotNull();
MockHttpServletRequest newRequest = new MockHttpServletRequest("POST", "/destination");
newRequest.setSession(request.getSession());
assertThat(cache.getMatchingRequest(newRequest, response)).isNull();
}
@Test
public void requestMatcherDefinesCorrectSubsetOfCachedRequests() {
HttpSessionRequestCache cache = new HttpSessionRequestCache();
cache.setRequestMatcher((request) -> request.getMethod().equals("GET"));
MockHttpServletRequest request = new MockHttpServletRequest("POST", "/destination");
MockHttpServletResponse response = new MockHttpServletResponse();
cache.saveRequest(request, response);
assertThat(cache.getRequest(request, response)).isNull();
assertThat(cache.getRequest(new MockHttpServletRequest(), new MockHttpServletResponse())).isNull();
assertThat(cache.getMatchingRequest(request, response)).isNull();
}
// SEC-2246
@Test
public void getRequestCustomNoClassCastException() {
MockHttpServletRequest request = new MockHttpServletRequest("POST", "/destination");
MockHttpServletResponse response = new MockHttpServletResponse();
HttpSessionRequestCache cache = new HttpSessionRequestCache() {
@Override
public void saveRequest(HttpServletRequest request, HttpServletResponse response) {
request.getSession()
.setAttribute(SAVED_REQUEST, new CustomSavedRequest(new DefaultSavedRequest(request)));
}
};
cache.saveRequest(request, response);
cache.saveRequest(request, response);
assertThat(cache.getRequest(request, response)).isInstanceOf(CustomSavedRequest.class);
}
@Test
public void testCustomSessionAttrName() {
HttpSessionRequestCache cache = new HttpSessionRequestCache();
cache.setSessionAttrName("CUSTOM_SAVED_REQUEST");
MockHttpServletRequest request = new MockHttpServletRequest("GET", "/destination");
MockHttpServletResponse response = new MockHttpServletResponse();
cache.saveRequest(request, response);
assertThat(request.getSession().getAttribute(HttpSessionRequestCache.SAVED_REQUEST)).isNull();
assertThat(request.getSession().getAttribute("CUSTOM_SAVED_REQUEST")).isNotNull();
}
@Test
public void getMatchingRequestWhenMatchingRequestParameterNameSetThenSessionNotAccessed() {
HttpSessionRequestCache cache = new HttpSessionRequestCache();
cache.setMatchingRequestParameterName("success");
HttpServletRequest request = spy(new MockHttpServletRequest());
HttpServletRequest matchingRequest = cache.getMatchingRequest(request, new MockHttpServletResponse());
assertThat(matchingRequest).isNull();
verify(request, never()).getSession();
verify(request, never()).getSession(anyBoolean());
}
@Test
public void getMatchingRequestWhenMatchingRequestParameterNameSetAndParameterExistThenLookedUp() {
MockHttpServletRequest request = new MockHttpServletRequest();
HttpSessionRequestCache cache = new HttpSessionRequestCache();
cache.setMatchingRequestParameterName("success");
cache.saveRequest(request, new MockHttpServletResponse());
MockHttpServletRequest requestToMatch = new MockHttpServletRequest();
requestToMatch.setQueryString("success"); // gh-12665
requestToMatch.setSession(request.getSession());
HttpServletRequest matchingRequest = cache.getMatchingRequest(requestToMatch, new MockHttpServletResponse());
assertThat(matchingRequest).isNotNull();
}
// gh-12665
@Test
public void getMatchingRequestWhenMatchingRequestParameterNameSetAndParameterExistAndQueryThenLookedUp() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.setQueryString("param=true");
HttpSessionRequestCache cache = new HttpSessionRequestCache();
cache.setMatchingRequestParameterName("success");
cache.saveRequest(request, new MockHttpServletResponse());
MockHttpServletRequest requestToMatch = new MockHttpServletRequest();
requestToMatch.setQueryString("param=true&success");
requestToMatch.setSession(request.getSession());
HttpServletRequest matchingRequest = cache.getMatchingRequest(requestToMatch, new MockHttpServletResponse());
assertThat(matchingRequest).isNotNull();
}
@Test
public void getMatchingRequestWhenMatchesThenRemoved() {
MockHttpServletRequest request = new MockHttpServletRequest();
HttpSessionRequestCache cache = new HttpSessionRequestCache();
cache.setMatchingRequestParameterName("success");
cache.saveRequest(request, new MockHttpServletResponse());
assertThat(request.getSession().getAttribute(HttpSessionRequestCache.SAVED_REQUEST)).isNotNull();
MockHttpServletRequest requestToMatch = new MockHttpServletRequest();
requestToMatch.setQueryString("success");
requestToMatch.setSession(request.getSession());
HttpServletRequest matchingRequest = cache.getMatchingRequest(requestToMatch, new MockHttpServletResponse());
assertThat(matchingRequest).isNotNull();
assertThat(request.getSession().getAttribute(HttpSessionRequestCache.SAVED_REQUEST)).isNull();
}
// gh-13731
@Test
public void getMatchingRequestWhenMatchingRequestParameterNameSetThenDoesNotInvokeGetParameterMethods() {
HttpSessionRequestCache cache = new HttpSessionRequestCache();
cache.setMatchingRequestParameterName("success");
MockHttpServletRequest mockRequest = new MockHttpServletRequest();
mockRequest.setQueryString("success");
HttpServletRequest request = spy(mockRequest);
HttpServletRequest matchingRequest = cache.getMatchingRequest(request, new MockHttpServletResponse());
assertThat(matchingRequest).isNull();
verify(request, never()).getParameter(anyString());
verify(request, never()).getParameterValues(anyString());
verify(request, never()).getParameterNames();
verify(request, never()).getParameterMap();
}
private static final | HttpSessionRequestCacheTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/ProtoDurationGetSecondsGetNanoTest.java | {
"start": 7423,
"end": 7908
} | class ____ {
// BUG: Diagnostic contains: ProtoDurationGetSecondsGetNano
private final int nanos = Duration.getDefaultInstance().getNanos();
}
""")
.doTest();
}
@Test
public void getNanoInInnerClassGetSecondsInMethod() {
compilationHelper
.addSourceLines(
"test/TestCase.java",
"""
package test;
import com.google.protobuf.Duration;
public | TestCase |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigTests.java | {
"start": 590,
"end": 1872
} | class ____ extends AbstractBWCSerializationTestCase<RegressionConfig> {
private boolean lenient;
public static RegressionConfig randomRegressionConfig() {
return new RegressionConfig(randomBoolean() ? null : randomAlphaOfLength(10));
}
@Before
public void chooseStrictOrLenient() {
lenient = randomBoolean();
}
@Override
protected RegressionConfig createTestInstance() {
return randomRegressionConfig();
}
@Override
protected RegressionConfig mutateInstance(RegressionConfig instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Writeable.Reader<RegressionConfig> instanceReader() {
return RegressionConfig::new;
}
@Override
protected RegressionConfig doParseInstance(XContentParser parser) throws IOException {
return lenient ? RegressionConfig.fromXContentLenient(parser) : RegressionConfig.fromXContentStrict(parser);
}
@Override
protected boolean supportsUnknownFields() {
return lenient;
}
@Override
protected RegressionConfig mutateInstanceForVersion(RegressionConfig instance, TransportVersion version) {
return instance;
}
}
| RegressionConfigTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSPreemptionThread.java | {
"start": 11828,
"end": 13452
} | class ____ {
Map<ApplicationId, List<RMContainer>> containersByApp;
int numAMContainers;
int maxAMContainers;
PreemptableContainers(int maxAMContainers) {
numAMContainers = 0;
this.maxAMContainers = maxAMContainers;
this.containersByApp = new HashMap<>();
}
/**
* Add a container if the number of AM containers is less than
* maxAMContainers.
*
* @param container the container to add
* @return true if success; false otherwise
*/
private boolean addContainer(RMContainer container, ApplicationId appId) {
if (container.isAMContainer()) {
numAMContainers++;
if (numAMContainers >= maxAMContainers) {
return false;
}
}
if (!containersByApp.containsKey(appId)) {
containersByApp.put(appId, new ArrayList<>());
}
containersByApp.get(appId).add(container);
return true;
}
private List<RMContainer> getAllContainers() {
List<RMContainer> allContainers = new ArrayList<>();
for (List<RMContainer> containersForApp : containersByApp.values()) {
allContainers.addAll(containersForApp);
}
return allContainers;
}
private Resource getResourcesToPreemptForApp(ApplicationId appId) {
Resource resourcesToPreempt = Resources.createResource(0, 0);
if (containersByApp.containsKey(appId)) {
for (RMContainer container : containersByApp.get(appId)) {
Resources.addTo(resourcesToPreempt, container.getAllocatedResource());
}
}
return resourcesToPreempt;
}
}
}
| PreemptableContainers |
java | quarkusio__quarkus | integration-tests/test-extension/extension/deployment/src/test/java/io/quarkus/commandmode/launch/MixWithAndWithoutArgsMainCommandModeTestCase.java | {
"start": 917,
"end": 1126
} | class ____ {
static void main(String[] args) {
System.out.println("Hello World");
}
void main() {
System.out.println("Hi World");
}
}
}
| HelloWorldMain |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/common/convert/StringToCharacterConverter.java | {
"start": 1003,
"end": 1498
} | class ____ implements StringConverter<Character> {
@Override
public Character convert(String source) {
int length = length(source);
if (length == 0) {
return null;
}
if (length > 1) {
throw new IllegalArgumentException("The source String is more than one character!");
}
return source.charAt(0);
}
@Override
public int getPriority() {
return NORMAL_PRIORITY + 8;
}
}
| StringToCharacterConverter |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/PatternMatchingInstanceofTest.java | {
"start": 15218,
"end": 15752
} | class ____ {
private final int x = 0;
private final int y = 1;
@Override
public boolean equals(Object o) {
if (!(o instanceof Test other) || other.x != this.x) {
return false;
}
return other.y == this.y;
}
}
""")
.doTest();
}
@Test
public void conditionalExpression() {
helper
.addInputLines(
"Test.java",
"""
| Test |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/shuffle/TierFactoryInitializerTest.java | {
"start": 1952,
"end": 4947
} | class ____ {
private static Path tmpDir;
@BeforeAll
public static void before(@TempDir Path path) throws Exception {
tmpDir = TempDirUtils.newFolder(path, UUID.randomUUID().toString()).toPath();
}
@AfterAll
public static void after() throws IOException {
FileUtils.deleteDirectory(tmpDir.toFile());
}
@Test
void testInitEphemeralTiers() {
Configuration configuration = new Configuration();
List<TierFactory> tierFactories =
TierFactoryInitializer.initializeTierFactories(configuration);
assertThat(tierFactories).hasSize(2);
assertThat(tierFactories.get(0)).isInstanceOf(MemoryTierFactory.class);
assertThat(tierFactories.get(1)).isInstanceOf(DiskTierFactory.class);
}
@Test
void testInitEphemeralTiersWithRemoteTier() {
Configuration configuration = new Configuration();
configuration.set(
NettyShuffleEnvironmentOptions.NETWORK_HYBRID_SHUFFLE_REMOTE_STORAGE_BASE_PATH,
tmpDir.toString());
List<TierFactory> tierFactories =
TierFactoryInitializer.initializeTierFactories(configuration);
assertThat(tierFactories).hasSize(3);
assertThat(tierFactories.get(0)).isInstanceOf(MemoryTierFactory.class);
assertThat(tierFactories.get(1)).isInstanceOf(DiskTierFactory.class);
assertThat(tierFactories.get(2)).isInstanceOf(RemoteTierFactory.class);
}
@Test
void testInitDurableTiersWithExternalRemoteTier() {
Configuration configuration = new Configuration();
configuration.set(
NettyShuffleEnvironmentOptions
.NETWORK_HYBRID_SHUFFLE_EXTERNAL_REMOTE_TIER_FACTORY_CLASS_NAME,
ExternalRemoteTierFactory.class.getName());
List<TierFactory> tierFactories =
TierFactoryInitializer.initializeTierFactories(configuration);
assertThat(tierFactories).hasSize(1);
assertThat(tierFactories.get(0)).isInstanceOf(ExternalRemoteTierFactory.class);
}
@Test
void testInitDurableExternalRemoteTierWithHigherPriority() {
Configuration configuration = new Configuration();
configuration.set(
NettyShuffleEnvironmentOptions.NETWORK_HYBRID_SHUFFLE_REMOTE_STORAGE_BASE_PATH,
tmpDir.toString());
configuration.set(
NettyShuffleEnvironmentOptions
.NETWORK_HYBRID_SHUFFLE_EXTERNAL_REMOTE_TIER_FACTORY_CLASS_NAME,
ExternalRemoteTierFactory.class.getName());
List<TierFactory> tierFactories =
TierFactoryInitializer.initializeTierFactories(configuration);
assertThat(tierFactories).hasSize(1);
assertThat(tierFactories.get(0)).isInstanceOf(ExternalRemoteTierFactory.class);
}
/** Testing implementation for {@link TierFactory} to init an external remote tier. */
public static | TierFactoryInitializerTest |
java | elastic__elasticsearch | modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java | {
"start": 6583,
"end": 9222
} | class ____ {
final PercolateQuery percolateQuery;
final boolean singlePercolateQuery;
final int[] rootDocsBySlot;
PercolateContext(PercolateQuery pq, boolean singlePercolateQuery, IndexVersion indexVersionCreated) throws IOException {
this.percolateQuery = pq;
this.singlePercolateQuery = singlePercolateQuery;
IndexSearcher percolatorIndexSearcher = percolateQuery.getPercolatorIndexSearcher();
Query nonNestedFilter = percolatorIndexSearcher.rewrite(Queries.newNonNestedFilter(indexVersionCreated));
Weight weight = percolatorIndexSearcher.createWeight(nonNestedFilter, ScoreMode.COMPLETE_NO_SCORES, 1f);
Scorer s = weight.scorer(percolatorIndexSearcher.getIndexReader().leaves().get(0));
int memoryIndexMaxDoc = percolatorIndexSearcher.getIndexReader().maxDoc();
BitSet rootDocs = BitSet.of(s.iterator(), memoryIndexMaxDoc);
boolean hasNestedDocs = rootDocs.cardinality() != percolatorIndexSearcher.getIndexReader().numDocs();
if (hasNestedDocs) {
this.rootDocsBySlot = buildRootDocsSlots(rootDocs);
} else {
this.rootDocsBySlot = null;
}
}
String fieldName() {
return singlePercolateQuery ? FIELD_NAME_PREFIX : FIELD_NAME_PREFIX + "_" + percolateQuery.getName();
}
Query filterNestedDocs(Query in, IndexVersion indexVersionCreated) {
if (rootDocsBySlot != null) {
// Ensures that we filter out nested documents
return new BooleanQuery.Builder().add(in, BooleanClause.Occur.MUST)
.add(Queries.newNonNestedFilter(indexVersionCreated), BooleanClause.Occur.FILTER)
.build();
}
return in;
}
}
static IntStream convertTopDocsToSlots(TopDocs topDocs, int[] rootDocsBySlot) {
IntStream stream = Arrays.stream(topDocs.scoreDocs).mapToInt(scoreDoc -> scoreDoc.doc);
if (rootDocsBySlot != null) {
stream = stream.map(docId -> Arrays.binarySearch(rootDocsBySlot, docId));
}
return stream;
}
static int[] buildRootDocsSlots(BitSet rootDocs) {
int slot = 0;
int[] rootDocsBySlot = new int[rootDocs.cardinality()];
BitSetIterator iterator = new BitSetIterator(rootDocs, 0);
for (int rootDocId = iterator.nextDoc(); rootDocId != NO_MORE_DOCS; rootDocId = iterator.nextDoc()) {
rootDocsBySlot[slot++] = rootDocId;
}
return rootDocsBySlot;
}
}
| PercolateContext |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/jdk/JavaUtilCollectionsDeserializers.java | {
"start": 7752,
"end": 10866
} | class ____ implements Converter<Object,Object>
{
private final JavaType _inputType;
private final int _kind;
JavaUtilCollectionsConverter(int kind, JavaType inputType) {
_inputType = inputType;
_kind = kind;
}
@Override
public Object convert(DeserializationContext ctxt, Object value) {
return _convert(value);
}
@Override
public Object convert(SerializationContext ctxt, Object value) {
return _convert(value);
}
protected Object _convert(Object value) {
if (value == null) { // is this legal to get?
return null;
}
switch (_kind) {
case TYPE_SINGLETON_SET:
{
Set<?> set = (Set<?>) value;
_checkSingleton(set.size());
return Collections.singleton(set.iterator().next());
}
case TYPE_SINGLETON_LIST:
{
List<?> list = (List<?>) value;
_checkSingleton(list.size());
return Collections.singletonList(list.get(0));
}
case TYPE_SINGLETON_MAP:
{
Map<?,?> map = (Map<?,?>) value;
_checkSingleton(map.size());
Map.Entry<?,?> entry = map.entrySet().iterator().next();
return Collections.singletonMap(entry.getKey(), entry.getValue());
}
case TYPE_UNMODIFIABLE_SET:
return Collections.unmodifiableSet((Set<?>) value);
case TYPE_UNMODIFIABLE_LIST:
return Collections.unmodifiableList((List<?>) value);
case TYPE_UNMODIFIABLE_MAP:
return Collections.unmodifiableMap((Map<?,?>) value);
case TYPE_SYNC_SET:
return Collections.synchronizedSet((Set<?>) value);
case TYPE_SYNC_LIST:
return Collections.synchronizedList((List<?>) value);
case TYPE_SYNC_COLLECTION:
return Collections.synchronizedCollection((Collection<?>) value);
case TYPE_SYNC_MAP:
return Collections.synchronizedMap((Map<?,?>) value);
case TYPE_AS_LIST:
default:
// Here we do not actually care about impl type, just return List as-is:
return value;
}
}
@Override
public JavaType getInputType(TypeFactory typeFactory) {
return _inputType;
}
@Override
public JavaType getOutputType(TypeFactory typeFactory) {
// we don't actually care, so:
return _inputType;
}
private void _checkSingleton(int size) {
if (size != 1) {
// not the best error ever but... has to do
throw new IllegalArgumentException("Cannot deserialize Singleton container from "+size+" entries");
}
}
}
}
| JavaUtilCollectionsConverter |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/util/Loader.java | {
"start": 16561,
"end": 16673
} | class ____.
* @return the Class for the given name.
* @throws ClassNotFoundException if the specified | name |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/retry/Retryable.java | {
"start": 1076,
"end": 1507
} | interface ____<R extends @Nullable Object> {
/**
* Method to execute and retry if needed.
* @return the result of the operation
* @throws Throwable if an error occurs during the execution of the operation
*/
R execute() throws Throwable;
/**
* A unique, logical name for this retryable operation, used to distinguish
* between retries for different business operations.
* <p>Defaults to the fully-qualified | Retryable |
java | spring-projects__spring-boot | module/spring-boot-webclient/src/main/java/org/springframework/boot/webclient/autoconfigure/service/PropertiesWebClientHttpServiceGroupConfigurer.java | {
"start": 2117,
"end": 4241
} | class ____ implements WebClientHttpServiceGroupConfigurer {
private final HttpServiceClientProperties properties;
private final HttpClientSettingsPropertyMapper clientSettingsPropertyMapper;
private final ClientHttpConnectorBuilder<?> clientConnectorBuilder;
PropertiesWebClientHttpServiceGroupConfigurer(@Nullable ClassLoader classLoader,
HttpServiceClientProperties properties, @Nullable SslBundles sslBundles,
ObjectProvider<ClientHttpConnectorBuilder<?>> clientConnectorBuilder,
@Nullable HttpClientSettings httpClientSettings) {
this.properties = properties;
this.clientSettingsPropertyMapper = new HttpClientSettingsPropertyMapper(sslBundles, httpClientSettings);
this.clientConnectorBuilder = clientConnectorBuilder
.getIfAvailable(() -> ClientHttpConnectorBuilder.detect(classLoader));
}
@Override
public int getOrder() {
return Ordered.HIGHEST_PRECEDENCE;
}
@Override
public void configureGroups(Groups<WebClient.Builder> groups) {
groups.forEachClient(this::configureClient);
}
private void configureClient(HttpServiceGroup group, WebClient.Builder builder) {
HttpClientProperties clientProperties = this.properties.get(group.name());
HttpClientSettings clientSettings = this.clientSettingsPropertyMapper.map(clientProperties);
builder.clientConnector(this.clientConnectorBuilder.build(clientSettings));
if (clientProperties != null) {
PropertyMapper map = PropertyMapper.get();
map.from(clientProperties::getBaseUrl).whenHasText().to(builder::baseUrl);
map.from(clientProperties::getDefaultHeader).as(this::putAllHeaders).to(builder::defaultHeaders);
map.from(clientProperties::getApiversion)
.as(ApiversionProperties::getDefaultVersion)
.to(builder::defaultApiVersion);
map.from(clientProperties::getApiversion)
.as(ApiversionProperties::getInsert)
.as(PropertiesApiVersionInserter::get)
.to(builder::apiVersionInserter);
}
}
private Consumer<HttpHeaders> putAllHeaders(Map<String, List<String>> defaultHeaders) {
return (httpHeaders) -> httpHeaders.putAll(defaultHeaders);
}
}
| PropertiesWebClientHttpServiceGroupConfigurer |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/spi/EntityRepresentationStrategy.java | {
"start": 500,
"end": 1201
} | interface ____ extends ManagedTypeRepresentationStrategy {
/**
* Create a delegate capable of instantiating instances of the represented type.
*/
EntityInstantiator getInstantiator();
/**
* Create the delegate capable of producing proxies for the given entity
*/
ProxyFactory getProxyFactory();
default boolean isBytecodeEnhanced() {
return false;
}
JavaType<?> getProxyJavaType();
/**
* The Java type descriptor for the type returned when the entity is loaded
*/
default JavaType<?> getLoadJavaType() {
return getMappedJavaType();
}
default void visitEntityNameResolvers(Consumer<EntityNameResolver> consumer) {
// by default do nothing
}
}
| EntityRepresentationStrategy |
java | alibaba__fastjson | src/test/java/com/alibaba/json/test/benchmark/encode/EishayEncodeOutputStream.java | {
"start": 540,
"end": 689
} | class ____ extends java.io.OutputStream {
@Override
public void write(int b) throws IOException {
}
}
}
| DummyOutputStream |
java | apache__flink | flink-table/flink-table-code-splitter/src/test/resources/declaration/code/TestRewriteInnerClass.java | {
"start": 183,
"end": 366
} | class ____ {
public void myFun() {
int local1;
local1 = 5;
long local2;
final String local3 = "BBBBB";
}
}
}
| InnerClass |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/parser/EqlBaseParser.java | {
"start": 16269,
"end": 18592
} | class ____ extends ParserRuleContext {
public TerminalNode WITH() {
return getToken(EqlBaseParser.WITH, 0);
}
public TerminalNode MAXSPAN() {
return getToken(EqlBaseParser.MAXSPAN, 0);
}
public TerminalNode ASGN() {
return getToken(EqlBaseParser.ASGN, 0);
}
public TimeUnitContext timeUnit() {
return getRuleContext(TimeUnitContext.class, 0);
}
public SequenceParamsContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override
public int getRuleIndex() {
return RULE_sequenceParams;
}
@Override
public void enterRule(ParseTreeListener listener) {
if (listener instanceof EqlBaseListener) ((EqlBaseListener) listener).enterSequenceParams(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if (listener instanceof EqlBaseListener) ((EqlBaseListener) listener).exitSequenceParams(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof EqlBaseVisitor) return ((EqlBaseVisitor<? extends T>) visitor).visitSequenceParams(this);
else return visitor.visitChildren(this);
}
}
public final SequenceParamsContext sequenceParams() throws RecognitionException {
SequenceParamsContext _localctx = new SequenceParamsContext(_ctx, getState());
enterRule(_localctx, 8, RULE_sequenceParams);
try {
enterOuterAlt(_localctx, 1);
{
setState(83);
match(WITH);
{
setState(84);
match(MAXSPAN);
setState(85);
match(ASGN);
setState(86);
timeUnit();
}
}
} catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
} finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static | SequenceParamsContext |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/bloom/BloomFilterCommonTester.java | {
"start": 1493,
"end": 3909
} | class ____<T extends Filter> {
private static final double LN2 = Math.log(2);
private static final double LN2_SQUARED = LN2 * LN2;
private final int hashType;
private final int numInsertions;
private final ImmutableList.Builder<T> builder = ImmutableList.builder();
private ImmutableSet<BloomFilterTestStrategy> filterTestStrateges;
private final PreAssertionHelper preAssertionHelper;
static int optimalNumOfBits(int n, double p) {
return (int) (-n * Math.log(p) / LN2_SQUARED);
}
public static <T extends Filter> BloomFilterCommonTester<T> of(int hashId,
int numInsertions) {
return new BloomFilterCommonTester<T>(hashId, numInsertions);
}
public BloomFilterCommonTester<T> withFilterInstance(T filter) {
builder.add(filter);
return this;
}
private BloomFilterCommonTester(int hashId, int numInsertions) {
this.hashType = hashId;
this.numInsertions = numInsertions;
this.preAssertionHelper = new PreAssertionHelper() {
@Override
public ImmutableSet<Integer> falsePositives(int hashId) {
switch (hashId) {
case Hash.JENKINS_HASH: {
// // false pos for odd and event under 1000
return ImmutableSet.of(99, 963);
}
case Hash.MURMUR_HASH: {
// false pos for odd and event under 1000
return ImmutableSet.of(769, 772, 810, 874);
}
default: {
// fail fast with unknown hash error !!!
assertFalse(true, "unknown hash error");
return ImmutableSet.of();
}
}
}
};
}
public BloomFilterCommonTester<T> withTestCases(
ImmutableSet<BloomFilterTestStrategy> filterTestStrateges) {
this.filterTestStrateges = ImmutableSet.copyOf(filterTestStrateges);
return this;
}
@SuppressWarnings("unchecked")
public void test() {
final ImmutableList<T> filtersList = builder.build();
final ImmutableSet<Integer> falsePositives = preAssertionHelper
.falsePositives(hashType);
for (T filter : filtersList) {
for (BloomFilterTestStrategy strategy : filterTestStrateges) {
strategy.getStrategy().assertWhat(filter, numInsertions, hashType, falsePositives);
// create fresh instance for next test iteration
filter = (T) getSymmetricFilter(filter.getClass(), numInsertions, hashType);
}
}
}
| BloomFilterCommonTester |
java | apache__maven | its/core-it-support/core-it-javaagent/src/main/java/org/apache/maven/coreits/javaagent/mng5669/Premain.java | {
"start": 1338,
"end": 2389
} | class ____ {
public static void premain(String agentArgs, Instrumentation inst) {
inst.addTransformer(new ClassFileTransformer() {
@Override
public byte[] transform(
ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer)
throws IllegalClassFormatException {
if ("org/apache/maven/model/io/DefaultModelReader".equals(className)) {
ClassReader r = new ClassReader(classfileBuffer);
final ClassWriter w = new ClassWriter(Opcodes.ASM6);
ClassVisitor v = new DefaultModelReaderVisitor(Opcodes.ASM6, w);
r.accept(v, ClassReader.EXPAND_FRAMES);
return w.toByteArray();
} else {
return classfileBuffer;
}
}
});
}
private static | Premain |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenActionTests.java | {
"start": 661,
"end": 2783
} | class ____ extends ESTestCase {
public void testParserForUserAndRealm() throws Exception {
final String request = """
{"username": "user1","realm_name": "realm1"}""";
try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, request)) {
InvalidateTokenRequest invalidateTokenRequest = RestInvalidateTokenAction.PARSER.parse(parser, null);
assertEquals("user1", invalidateTokenRequest.getUserName());
assertEquals("realm1", invalidateTokenRequest.getRealmName());
assertNull(invalidateTokenRequest.getTokenString());
assertNull(invalidateTokenRequest.getTokenType());
}
}
public void testParserForToken() throws Exception {
final String request = """
{"refresh_token": "refresh_token_string"}""";
try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, request)) {
InvalidateTokenRequest invalidateTokenRequest = RestInvalidateTokenAction.PARSER.parse(parser, null);
assertEquals("refresh_token_string", invalidateTokenRequest.getTokenString());
assertEquals("refresh_token", invalidateTokenRequest.getTokenType().getValue());
assertNull(invalidateTokenRequest.getRealmName());
assertNull(invalidateTokenRequest.getUserName());
}
}
public void testParserForIncorrectInput() throws Exception {
final String request = """
{"refresh_token": "refresh_token_string","token": "access_token_string"}""";
try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, request)) {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> RestInvalidateTokenAction.PARSER.parse(parser, null)
);
assertThat(e.getCause().getMessage(), containsString("only one of [token, refresh_token] may be sent per request"));
}
}
}
| RestInvalidateTokenActionTests |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/projects/test-selection/src/test/java/com/example/NotEnabledHardDisabled.java | {
"start": 212,
"end": 761
} | class ____ {
@Inject
MyBean myBean;
@Test
public void test() {
assertEquals("hello", myBean.hello());
}
@Test
public void executeAnyway() {
assertEquals("hello", myBean.hello());
}
@Test
public void executeAnywayAgain() {
assertEquals("hello", myBean.hello());
}
@Test
public void alwaysExecute() {
assertEquals("hello", myBean.hello());
}
@Test
public void neverExecute() {
assertEquals("hello", myBean.hello());
}
}
| NotEnabledHardDisabled |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/validation/DefaultMessageCodesResolver.java | {
"start": 3366,
"end": 5029
} | class ____ implements MessageCodesResolver, Serializable {
/**
* The separator that this implementation uses when resolving message codes.
*/
public static final String CODE_SEPARATOR = ".";
private static final MessageCodeFormatter DEFAULT_FORMATTER = Format.PREFIX_ERROR_CODE;
private String prefix = "";
private MessageCodeFormatter formatter = DEFAULT_FORMATTER;
/**
* Specify a prefix to be applied to any code built by this resolver.
* <p>Default is none. Specify, for example, "validation." to get
* error codes like "validation.typeMismatch.name".
*/
public void setPrefix(@Nullable String prefix) {
this.prefix = (prefix != null ? prefix : "");
}
/**
* Return the prefix to be applied to any code built by this resolver.
* <p>Returns an empty String in case of no prefix.
*/
protected String getPrefix() {
return this.prefix;
}
/**
* Specify the format for message codes built by this resolver.
* <p>The default is {@link Format#PREFIX_ERROR_CODE}.
* @since 3.2
* @see Format
*/
public void setMessageCodeFormatter(@Nullable MessageCodeFormatter formatter) {
this.formatter = (formatter != null ? formatter : DEFAULT_FORMATTER);
}
@Override
public String[] resolveMessageCodes(String errorCode, String objectName) {
return resolveMessageCodes(errorCode, objectName, "", null);
}
/**
* Build the code list for the given code and field: an
* object/field-specific code, a field-specific code, a plain error code.
* <p>Arrays, Lists and Maps are resolved both for specific elements and
* the whole collection.
* <p>See the {@link DefaultMessageCodesResolver | DefaultMessageCodesResolver |
java | apache__camel | components/camel-aws/camel-aws2-ecs/src/test/java/org/apache/camel/component/aws2/ecs/ECS2ProducerHealthCheckProfileCredsTest.java | {
"start": 1509,
"end": 4129
} | class ____ extends CamelTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(ECS2ProducerHealthCheckProfileCredsTest.class);
CamelContext context;
@Override
protected CamelContext createCamelContext() throws Exception {
context = super.createCamelContext();
context.getPropertiesComponent().setLocation("ref:prop");
// install health check manually (yes a bit cumbersome)
HealthCheckRegistry registry = new DefaultHealthCheckRegistry();
registry.setCamelContext(context);
Object hc = registry.resolveById("context");
registry.register(hc);
hc = registry.resolveById("routes");
registry.register(hc);
hc = registry.resolveById("consumers");
registry.register(hc);
HealthCheckRepository hcr = (HealthCheckRepository) registry.resolveById("producers");
hcr.setEnabled(true);
registry.register(hcr);
context.getCamelContextExtension().addContextPlugin(HealthCheckRegistry.class, registry);
return context;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:listClusters")
.to("aws2-ecs://test?operation=listClusters®ion=l&useDefaultCredentialsProvider=true");
}
};
}
@Test
public void testConnectivity() {
Collection<HealthCheck.Result> res = HealthCheckHelper.invokeLiveness(context);
boolean up = res.stream().allMatch(r -> r.getState().equals(HealthCheck.State.UP));
Assertions.assertTrue(up, "liveness check");
// health-check readiness should be down
await().atMost(20, TimeUnit.SECONDS).untilAsserted(() -> {
Collection<HealthCheck.Result> res2 = HealthCheckHelper.invokeReadiness(context);
boolean down = res2.stream().allMatch(r -> r.getState().equals(HealthCheck.State.DOWN));
boolean containsAws2EcsHealthCheck = res2.stream()
.anyMatch(result -> result.getCheck().getId().startsWith("producer:aws2-ecs"));
boolean hasRegionMessage = res2.stream()
.anyMatch(r -> r.getMessage().stream().anyMatch(msg -> msg.contains("region")));
Assertions.assertTrue(down, "liveness check");
Assertions.assertTrue(containsAws2EcsHealthCheck, "aws2-ecs check");
Assertions.assertTrue(hasRegionMessage, "aws2-ecs check error message");
});
}
}
| ECS2ProducerHealthCheckProfileCredsTest |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToInt3Evaluator.java | {
"start": 1085,
"end": 4127
} | class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(RoundToInt3Evaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator field;
private final int p0;
private final int p1;
private final int p2;
private final DriverContext driverContext;
private Warnings warnings;
public RoundToInt3Evaluator(Source source, EvalOperator.ExpressionEvaluator field, int p0, int p1,
int p2, DriverContext driverContext) {
this.source = source;
this.field = field;
this.p0 = p0;
this.p1 = p1;
this.p2 = p2;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (IntBlock fieldBlock = (IntBlock) field.eval(page)) {
IntVector fieldVector = fieldBlock.asVector();
if (fieldVector == null) {
return eval(page.getPositionCount(), fieldBlock);
}
return eval(page.getPositionCount(), fieldVector).asBlock();
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += field.baseRamBytesUsed();
return baseRamBytesUsed;
}
public IntBlock eval(int positionCount, IntBlock fieldBlock) {
try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
switch (fieldBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
int field = fieldBlock.getInt(fieldBlock.getFirstValueIndex(p));
result.appendInt(RoundToInt.process(field, this.p0, this.p1, this.p2));
}
return result.build();
}
}
public IntVector eval(int positionCount, IntVector fieldVector) {
try(IntVector.FixedBuilder result = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
int field = fieldVector.getInt(p);
result.appendInt(p, RoundToInt.process(field, this.p0, this.p1, this.p2));
}
return result.build();
}
}
@Override
public String toString() {
return "RoundToInt3Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(field);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static | RoundToInt3Evaluator |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/cglib/beans/BeanCopier.java | {
"start": 1577,
"end": 2188
} | class ____
{
private static final BeanCopierKey KEY_FACTORY =
(BeanCopierKey)KeyFactory.create(BeanCopierKey.class);
private static final Type CONVERTER =
TypeUtils.parseType("org.springframework.cglib.core.Converter");
private static final Type BEAN_COPIER =
TypeUtils.parseType("org.springframework.cglib.beans.BeanCopier");
private static final Signature COPY =
new Signature("copy", Type.VOID_TYPE, new Type[]{ Constants.TYPE_OBJECT, Constants.TYPE_OBJECT, CONVERTER });
private static final Signature CONVERT =
TypeUtils.parseSignature("Object convert(Object, Class, Object)");
| BeanCopier |
java | spring-projects__spring-boot | module/spring-boot-reactor-netty/src/test/java/org/springframework/boot/reactor/netty/autoconfigure/NettyServerPropertiesTests.java | {
"start": 1296,
"end": 2771
} | class ____ {
private final NettyServerProperties properties = new NettyServerProperties();
@Test
void testCustomizeNettyIdleTimeout() {
bind("server.netty.idle-timeout", "10s");
assertThat(this.properties.getIdleTimeout()).isEqualTo(Duration.ofSeconds(10));
}
@Test
void testCustomizeNettyMaxKeepAliveRequests() {
bind("server.netty.max-keep-alive-requests", "100");
assertThat(this.properties.getMaxKeepAliveRequests()).isEqualTo(100);
}
@Test
void nettyMaxInitialLineLengthMatchesHttpDecoderSpecDefault() {
assertThat(this.properties.getMaxInitialLineLength().toBytes())
.isEqualTo(HttpDecoderSpec.DEFAULT_MAX_INITIAL_LINE_LENGTH);
}
@Test
void nettyValidateHeadersMatchesHttpDecoderSpecDefault() {
assertThat(this.properties.isValidateHeaders()).isTrue();
}
@Test
void nettyH2cMaxContentLengthMatchesHttpDecoderSpecDefault() {
assertThat(this.properties.getH2cMaxContentLength().toBytes()).isZero();
}
@Test
void nettyInitialBufferSizeMatchesHttpDecoderSpecDefault() {
assertThat(this.properties.getInitialBufferSize().toBytes())
.isEqualTo(HttpDecoderSpec.DEFAULT_INITIAL_BUFFER_SIZE);
}
private void bind(String name, String value) {
bind(Collections.singletonMap(name, value));
}
private void bind(Map<String, String> map) {
ConfigurationPropertySource source = new MapConfigurationPropertySource(map);
new Binder(source).bind("server.netty", Bindable.ofInstance(this.properties));
}
}
| NettyServerPropertiesTests |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ImportSelector.java | {
"start": 2737,
"end": 3091
} | class ____, or an empty array if none
*/
String[] selectImports(AnnotationMetadata importingClassMetadata);
/**
* Return a predicate for excluding classes from the import candidates, to be
* transitively applied to all classes found through this selector's imports.
* <p>If this predicate returns {@code true} for a given fully-qualified
* | names |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/identifier/composite/CompositeInheritanceWorkingTest.java | {
"start": 1066,
"end": 1536
} | class ____ called FooEntity and this works for some reason
CompositeInheritanceWorkingTest.Test2Entity.class,
}
)
@ServiceRegistry(
settings = {
// For your own convenience to see generated queries:
@Setting(name = AvailableSettings.SHOW_SQL, value = "true"),
@Setting(name = AvailableSettings.FORMAT_SQL, value = "true"),
// @Setting( name = AvailableSettings.GENERATE_STATISTICS, value = "true" ),
}
)
@SessionFactory
@Jira("HHH-19076")
public | is |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/runtime/BatchShuffleITCaseBase.java | {
"start": 6033,
"end": 6620
} | class ____ implements ParallelSourceFunction<String> {
private volatile boolean isRunning = true;
private int numRecordsToSend;
StringSource(int numRecordsToSend) {
this.numRecordsToSend = numRecordsToSend;
}
@Override
public void run(SourceContext<String> ctx) throws Exception {
while (isRunning && numRecordsToSend-- > 0) {
ctx.collect(RECORD);
}
}
@Override
public void cancel() {
isRunning = false;
}
}
private static | StringSource |
java | elastic__elasticsearch | client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java | {
"start": 4558,
"end": 5128
} | class ____ implements HttpHandler {
private final CountDownLatch cancelHandlerLatch = new CountDownLatch(1);
void cancelDone() {
cancelHandlerLatch.countDown();
}
@Override
public void handle(HttpExchange exchange) throws IOException {
try {
cancelHandlerLatch.await();
} catch (InterruptedException ignore) {} finally {
exchange.sendResponseHeaders(200, 0);
exchange.close();
}
}
}
private static | WaitForCancelHandler |
java | square__moshi | moshi/src/test/java/com/squareup/moshi/internal/ClassJsonAdapterTest.java | {
"start": 3942,
"end": 4429
} | class ____ {
static int a = 11;
int b;
}
@Test
public void staticFieldsOmitted() throws Exception {
StaticFields value = new StaticFields();
value.b = 12;
String toJson = toJson(StaticFields.class, value);
assertThat(toJson).isEqualTo("{\"b\":12}");
StaticFields fromJson = fromJson(StaticFields.class, "{\"a\":13,\"b\":12}");
assertThat(StaticFields.a).isEqualTo(11); // Unchanged.
assertThat(fromJson.b).isEqualTo(12);
}
static | StaticFields |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/framework/autoproxy/AutoProxyCreatorTests.java | {
"start": 19984,
"end": 20384
} | interface
____ BeanFactoryAware, ApplicationContextAware, InitializingBean, DisposableBean, Serializable {
@Override
public void setBeanFactory(BeanFactory beanFactory) {
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) {
}
@Override
public void afterPropertiesSet() {
}
@Override
public void destroy() {
}
}
public static | implements |
java | quarkusio__quarkus | extensions/infinispan-client/deployment/src/test/java/io/quarkus/infinispan/test/RunDevServiceWithoutClientBeanCreationTest.java | {
"start": 415,
"end": 1115
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withConfigurationResource("disable-default-client-devservices.properties");
@Test
public void remoteCacheManagerDefaultBeansAccessible() {
assertThat(Arc.container().instance(RemoteCacheManager.class, Default.Literal.INSTANCE).get()).isNull();
assertThat(Arc.container().instance(CounterManager.class, Default.Literal.INSTANCE).get()).isNull();
assertThat(Arc.container().listAll(RemoteCacheManager.class).size()).isZero();
assertThat(Arc.container().listAll(CounterManager.class).size()).isZero();
}
}
| RunDevServiceWithoutClientBeanCreationTest |
java | elastic__elasticsearch | test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/ContainsAssertionTests.java | {
"start": 622,
"end": 1042
} | class ____ extends ESTestCase {
public void testStringContains() {
XContentLocation location = new XContentLocation(0, 0);
ContainsAssertion containsAssertion = new ContainsAssertion(location, "field", "part");
containsAssertion.doAssert("partial match", "l m");
expectThrows(AssertionError.class, () -> containsAssertion.doAssert("partial match", "foo"));
}
}
| ContainsAssertionTests |
java | grpc__grpc-java | servlet/src/main/java/io/grpc/servlet/ServletServerBuilder.java | {
"start": 2593,
"end": 7336
} | class ____ extends ForwardingServerBuilder<ServletServerBuilder> {
List<? extends ServerStreamTracer.Factory> streamTracerFactories;
private Function<HttpServletRequest, String> methodNameResolver =
ServletAdapter.DEFAULT_METHOD_NAME_RESOLVER;
int maxInboundMessageSize = DEFAULT_MAX_MESSAGE_SIZE;
private final ServerImplBuilder serverImplBuilder;
private ScheduledExecutorService scheduler;
private boolean internalCaller;
private boolean usingCustomScheduler;
private InternalServerImpl internalServer;
public ServletServerBuilder() {
serverImplBuilder = new ServerImplBuilder(this::buildTransportServers);
}
/**
* Builds a gRPC server that can run as a servlet.
*
* <p>The returned server will not be started or bound to a port.
*
* <p>Users should not call this method directly. Instead users should call
* {@link #buildServletAdapter()} which internally will call {@code build()} and {@code start()}
* appropriately.
*
* @throws IllegalStateException if this method is called by users directly
*/
@Override
public Server build() {
checkState(internalCaller, "build() method should not be called directly by an application");
return super.build();
}
/**
* Creates a {@link ServletAdapter}.
*/
public ServletAdapter buildServletAdapter() {
return new ServletAdapter(buildAndStart(), streamTracerFactories, methodNameResolver,
maxInboundMessageSize);
}
/**
* Creates a {@link GrpcServlet}.
*/
public GrpcServlet buildServlet() {
return new GrpcServlet(buildServletAdapter());
}
private ServerTransportListener buildAndStart() {
Server server;
try {
internalCaller = true;
server = build().start();
} catch (IOException e) {
// actually this should never happen
throw new RuntimeException(e);
} finally {
internalCaller = false;
}
if (!usingCustomScheduler) {
scheduler = SharedResourceHolder.get(GrpcUtil.TIMER_SERVICE);
}
// Create only one "transport" for all requests because it has no knowledge of which request is
// associated with which client socket. This "transport" does not do socket connection, the
// container does.
ServerTransportImpl serverTransport = new ServerTransportImpl(scheduler);
ServerTransportListener delegate =
internalServer.serverListener.transportCreated(serverTransport);
return new ServerTransportListener() {
@Override
public void streamCreated(ServerStream stream, String method, Metadata headers) {
delegate.streamCreated(stream, method, headers);
}
@Override
public Attributes transportReady(Attributes attributes) {
return delegate.transportReady(attributes);
}
@Override
public void transportTerminated() {
server.shutdown();
delegate.transportTerminated();
if (!usingCustomScheduler) {
SharedResourceHolder.release(GrpcUtil.TIMER_SERVICE, scheduler);
}
}
};
}
@VisibleForTesting
InternalServer buildTransportServers(
List<? extends ServerStreamTracer.Factory> streamTracerFactories) {
checkNotNull(streamTracerFactories, "streamTracerFactories");
this.streamTracerFactories = streamTracerFactories;
internalServer = new InternalServerImpl();
return internalServer;
}
@Internal
@Override
protected ServerBuilder<?> delegate() {
return serverImplBuilder;
}
/**
* Throws {@code UnsupportedOperationException}. TLS should be configured by the servlet
* container.
*/
@Override
public ServletServerBuilder useTransportSecurity(File certChain, File privateKey) {
throw new UnsupportedOperationException("TLS should be configured by the servlet container");
}
/**
* Specifies how to determine gRPC method name from servlet request.
*
* <p>The default strategy is using {@link HttpServletRequest#getRequestURI()} without the leading
* slash.</p>
*/
public ServletServerBuilder methodNameResolver(
Function<HttpServletRequest, String> methodResolver) {
this.methodNameResolver = checkNotNull(methodResolver);
return this;
}
@Override
public ServletServerBuilder maxInboundMessageSize(int bytes) {
checkArgument(bytes >= 0, "bytes must be >= 0");
maxInboundMessageSize = bytes;
return this;
}
/**
* Provides a custom scheduled executor service to the server builder.
*
* @return this
*/
public ServletServerBuilder scheduledExecutorService(ScheduledExecutorService scheduler) {
this.scheduler = checkNotNull(scheduler, "scheduler");
usingCustomScheduler = true;
return this;
}
private static final | ServletServerBuilder |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/Contract.java | {
"start": 2203,
"end": 4082
} | class ____ represents output data type
*/
public void setOutputType(Class<?> clazz) {
this.outputType = new DataType(clazz);
this.contractString = null;
}
public boolean isValidateInput() {
return validateInput;
}
/**
* Whether to validate the input
*/
public void setValidateInput(boolean validate) {
this.validateInput = validate;
}
public boolean isValidateOutput() {
return validateOutput;
}
/**
* Whether to validate the output
*/
public void setValidateOutput(boolean validate) {
this.validateOutput = validate;
}
@Override
public String toString() {
if (contractString == null) {
this.contractString = "DataType[input=" + this.inputType + ", output=" + this.outputType + "]";
}
return contractString;
}
public boolean isEmpty() {
return inputType == null && outputType == null;
}
@Override
public boolean equals(Object target) {
if (!(target instanceof Contract)) {
return false;
}
Contract targetContract = (Contract) target;
if (getInputType() != null || targetContract.getInputType() != null) {
if (getInputType() == null || targetContract.getInputType() == null
|| !getInputType().equals(targetContract.getInputType())) {
return false;
}
}
if (getOutputType() != null || targetContract.getOutputType() != null) {
if (getOutputType() == null || targetContract.getOutputType() == null
|| !getOutputType().equals(targetContract.getOutputType())) {
return false;
}
}
return true;
}
@Override
public int hashCode() {
return toString().hashCode();
}
}
| which |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/enhancer/HibernateEntityEnhancerMissingEmbeddableAnnotationTest.java | {
"start": 4503,
"end": 4831
} | class ____ extends EmbeddableWithAnnotation {
private Integer integer;
public Integer getInteger() {
return integer;
}
public void setInteger(Integer integer) {
this.integer = integer;
}
}
}
}
| EmbeddableWithAnnotationExtended |
java | quarkusio__quarkus | extensions/panache/mongodb-panache/deployment/src/test/java/io/quarkus/mongodb/panache/bug10812/Bug10812BookNotAnnotatedReactiveEntity.java | {
"start": 180,
"end": 512
} | class ____ extends ReactivePanacheMongoEntity {
@BsonProperty("bookTitle")
private String title;
public String getTitle() {
return title;
}
public Bug10812BookNotAnnotatedReactiveEntity setTitle(String title) {
this.title = title;
return this;
}
}
| Bug10812BookNotAnnotatedReactiveEntity |
java | spring-projects__spring-security | access/src/main/java/org/springframework/security/access/expression/method/PostInvocationExpressionAttribute.java | {
"start": 1133,
"end": 2040
} | class ____ extends AbstractExpressionBasedMethodConfigAttribute
implements PostInvocationAttribute {
PostInvocationExpressionAttribute(String filterExpression, String authorizeExpression) throws ParseException {
super(filterExpression, authorizeExpression);
}
PostInvocationExpressionAttribute(@Nullable Expression filterExpression, @Nullable Expression authorizeExpression)
throws ParseException {
super(filterExpression, authorizeExpression);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
Expression authorize = getAuthorizeExpression();
Expression filter = getFilterExpression();
sb.append("[authorize: '").append((authorize != null) ? authorize.getExpressionString() : "null");
sb.append("', filter: '").append((filter != null) ? filter.getExpressionString() : "null").append("']");
return sb.toString();
}
}
| PostInvocationExpressionAttribute |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/dev/RuntimeUpdatesProcessor.java | {
"start": 40966,
"end": 62846
} | class
____(classFilePath, timestampSet);
classScanResult.addDeletedClass(moduleClassesPath, classFilePath);
}
}
} else if (classFileWasAdded(classFilePath, isInitialRun, timestampSet)) {
classScanResult.addAddedClass(moduleClassesPath, classFilePath);
} else if (classFileWasRecentModified(classFilePath, isInitialRun, timestampSet)) {
classScanResult.addChangedClass(moduleClassesPath, classFilePath);
}
}
}
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private Path retrieveSourceFilePathForClassFile(Path classFilePath, List<Path> moduleChangedSourceFiles,
DevModeContext.ModuleInfo module, Function<DevModeContext.ModuleInfo, DevModeContext.CompilationUnit> cuf,
TimestampSet timestampSet, boolean forceRefresh) {
Path sourceFilePath = timestampSet.classFilePathToSourceFilePath.get(classFilePath);
if (sourceFilePath == null || moduleChangedSourceFiles.contains(sourceFilePath) || forceRefresh) {
sourceFilePath = compiler.findSourcePath(classFilePath, cuf.apply(module).getSourcePaths(),
cuf.apply(module).getClassesPath());
}
return sourceFilePath;
}
private void cleanUpClassFile(Path classFilePath, TimestampSet timestampSet) throws IOException {
Files.deleteIfExists(classFilePath);
timestampSet.classFileChangeTimeStamps.remove(classFilePath);
timestampSet.classFilePathToSourceFilePath.remove(classFilePath);
}
private Optional<String> matchingHandledExtension(Path p) {
return compiler.allHandledExtensions().stream().filter(e -> p.toString().endsWith(e)).findFirst();
}
private String getFileExtension(File file) {
String name = file.getName();
int lastIndexOf = name.lastIndexOf('.');
if (lastIndexOf == -1) {
return ""; // empty extension
}
return name.substring(lastIndexOf);
}
Set<String> checkForFileChange() {
return checkForFileChange(DevModeContext.ModuleInfo::getMain, main);
}
/**
* Returns the set of modified files.
* <p>
* The returned set may contain:
* <ul>
* <li>an OS-specific absolute path for a HotDeploymentWatchedFileBuildItem that matches an absolute path; e.g.
* {@code /some/complex/unix/path/to/file}</li>
* <li>an OS-agnostic relative path for a HotDeploymentWatchedFileBuildItem that matches a relative path; e.g.
* {@code templates/foo.html}</li>
* <li>an OS-agnostic relative path for a HotDeploymentWatchedFileBuildItem that matches a glob pattern,</li>
* <li>an OS-agnostic relative path for a new file added to a resource root.</li>
* </ul>
*
* @param cuf
* @param timestampSet
* @return the set of modified files
*/
Set<String> checkForFileChange(Function<DevModeContext.ModuleInfo, DevModeContext.CompilationUnit> cuf,
TimestampSet timestampSet) {
Set<String> ret = new HashSet<>();
for (DevModeContext.ModuleInfo module : context.getAllModules()) {
DevModeContext.CompilationUnit compilationUnit = cuf.apply(module);
if (compilationUnit == null) {
continue;
}
final Set<Path> moduleResources = correspondingResources.computeIfAbsent(compilationUnit,
m -> Collections.newSetFromMap(new ConcurrentHashMap<>()));
boolean doCopy = true;
PathCollection rootPaths = compilationUnit.getResourcePaths();
String outputPath = compilationUnit.getResourcesOutputPath();
if (rootPaths.isEmpty()) {
String rootPath = compilationUnit.getClassesPath();
if (rootPath != null) {
rootPaths = PathList.of(Paths.get(rootPath));
}
outputPath = rootPath;
doCopy = false;
}
if (rootPaths.isEmpty() || outputPath == null) {
continue;
}
Path outputDir = Paths.get(outputPath);
final List<Path> roots = rootPaths.stream()
.filter(Files::exists)
.filter(Files::isReadable)
.collect(Collectors.toList());
//copy all modified non-hot deployment files over
if (doCopy) {
final Set<Path> seen = new HashSet<>(moduleResources);
try {
for (Path root : roots) {
try (final Stream<Path> walk = Files.walk(root)) {
walk.forEach(path -> {
try {
Path relative = root.relativize(path);
Path target = outputDir.resolve(relative);
seen.remove(target);
if (!timestampSet.watchedPaths.containsKey(path)) {
moduleResources.add(target);
long current = Files.getLastModifiedTime(path).toMillis();
if (!Files.exists(target) || Files.getLastModifiedTime(target).toMillis() < current) {
if (Files.isDirectory(path)) {
Files.createDirectories(target);
} else {
Files.createDirectories(target.getParent());
// A new file is added to a resource root
// We need to use the OS-agnostic path to match the HotDeploymentWatchedFileBuildItem
ret.add(toOSAgnosticPathStr(relative.toString()));
Files.copy(path, target, StandardCopyOption.REPLACE_EXISTING,
StandardCopyOption.COPY_ATTRIBUTES);
if (copyResourceNotification != null) {
copyResourceNotification.accept(module, relative.toString());
}
}
}
}
} catch (Exception e) {
log.error("Failed to copy resources", e);
}
});
}
}
for (Path i : seen) {
moduleResources.remove(i);
if (!Files.isDirectory(i)) {
try {
Files.delete(i);
} catch (IOException e) {
log.error("Failed to delete resources", e);
}
}
}
} catch (IOException e) {
log.error("Unable to walk through the directory", e);
}
}
for (WatchedPath watchedPath : timestampSet.watchedPaths.values()) {
boolean isAbsolute = watchedPath.isAbsolute();
if (!isAbsolute && roots.stream().noneMatch(watchedPath.filePath::startsWith)) {
// The watched path does not come from the current compilation unit
continue;
}
boolean pathCurrentlyExisting = false;
boolean pathPreviouslyExisting = false;
if (Files.exists(watchedPath.filePath)) {
pathCurrentlyExisting = true;
try {
long current = Files.getLastModifiedTime(watchedPath.filePath).toMillis();
long last = watchedPath.lastModified;
if (current > last) {
// Use either the absolute path or the OS-agnostic path to match the HotDeploymentWatchedFileBuildItem
ret.add(isAbsolute ? watchedPath.filePath.toString() : watchedPath.getOSAgnosticMatchPath());
//a write can be a 'truncate' + 'write'
//if the file is empty we may be seeing the middle of a write
if (Files.size(watchedPath.filePath) == 0) {
try {
Thread.sleep(200);
} catch (InterruptedException e) {
//ignore
}
}
//re-read, as we may have read the original TS if the middle of
//a truncate+write, even if the write had completed by the time
//we read the size
current = Files.getLastModifiedTime(watchedPath.filePath).toMillis();
log.infof("File change detected: %s", watchedPath.filePath);
if (!isAbsolute && doCopy && !Files.isDirectory(watchedPath.filePath)) {
Path target = outputDir.resolve(watchedPath.matchPath);
byte[] data = Files.readAllBytes(watchedPath.filePath);
try (FileOutputStream out = new FileOutputStream(target.toFile())) {
out.write(data);
}
}
watchedPath.lastModified = current;
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
} else {
long prevValue = watchedPath.lastModified;
watchedPath.lastModified = 0L;
pathPreviouslyExisting = prevValue > 0;
}
if (!pathCurrentlyExisting) {
if (pathPreviouslyExisting) {
// Use either the absolute path or the OS-agnostic path to match the HotDeploymentWatchedFileBuildItem
ret.add(isAbsolute ? watchedPath.filePath.toString() : watchedPath.getOSAgnosticMatchPath());
}
if (!isAbsolute) {
Path target = outputDir.resolve(watchedPath.matchPath);
try {
FileUtil.deleteIfExists(target);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
}
}
return ret;
}
private boolean sourceFileWasRecentModified(final Path sourcePath, boolean ignoreFirstScanChanges, boolean firstScan) {
return checkIfFileModified(sourcePath, sourceFileTimestamps, ignoreFirstScanChanges, firstScan);
}
private boolean classFileWasRecentModified(final Path classFilePath, boolean ignoreFirstScanChanges,
TimestampSet timestampSet) {
return checkIfFileModified(classFilePath, timestampSet.classFileChangeTimeStamps, ignoreFirstScanChanges, true);
}
private boolean classFileWasAdded(final Path classFilePath, boolean ignoreFirstScanChanges, TimestampSet timestampSet) {
final Long lastRecordedChange = timestampSet.classFileChangeTimeStamps.get(classFilePath);
if (lastRecordedChange == null) {
try {
timestampSet.classFileChangeTimeStamps.put(classFilePath, Files.getLastModifiedTime(classFilePath).toMillis());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
return lastRecordedChange == null && !ignoreFirstScanChanges;
}
private boolean checkIfFileModified(Path path, Map<Path, Long> pathModificationTimes, boolean ignoreFirstScanChanges,
boolean updateTimestamp) {
try {
final long lastModificationTime = Files.getLastModifiedTime(path).toMillis();
final Long lastRecordedChange = pathModificationTimes.get(path);
if (lastRecordedChange == null) {
if (updateTimestamp) {
pathModificationTimes.put(path, lastModificationTime);
}
return !ignoreFirstScanChanges;
}
if (lastRecordedChange != lastModificationTime) {
if (updateTimestamp) {
pathModificationTimes.put(path, lastModificationTime);
}
return true;
}
return false;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public RuntimeUpdatesProcessor setDisableInstrumentationForClassPredicate(
Predicate<ClassInfo> disableInstrumentationForClassPredicate) {
this.disableInstrumentationForClassPredicate = disableInstrumentationForClassPredicate;
return this;
}
public RuntimeUpdatesProcessor setDisableInstrumentationForIndexPredicate(
Predicate<Index> disableInstrumentationForIndexPredicate) {
this.disableInstrumentationForIndexPredicate = disableInstrumentationForIndexPredicate;
return this;
}
public RuntimeUpdatesProcessor setWatchedFilePaths(Map<String, Boolean> watchedFilePaths,
List<Entry<Predicate<String>, Boolean>> watchedFilePredicates, boolean isTest) {
if (isTest) {
setWatchedFilePathsInternal(watchedFilePaths, test,
s -> s.getTest().isPresent() ? asList(s.getTest().get(), s.getMain()) : singletonList(s.getMain()),
watchedFilePredicates);
} else {
setWatchedFilePathsInternal(watchedFilePaths, main, s -> singletonList(s.getMain()), watchedFilePredicates);
}
return this;
}
private RuntimeUpdatesProcessor setWatchedFilePathsInternal(Map<String, Boolean> watchedFilePaths,
TimestampSet timestamps, Function<DevModeContext.ModuleInfo, List<DevModeContext.CompilationUnit>> cuf,
List<Entry<Predicate<String>, Boolean>> watchedFilePredicates) {
timestamps.watchedFilePaths = watchedFilePaths;
timestamps.watchedFilePredicates = watchedFilePredicates;
for (DevModeContext.ModuleInfo module : context.getAllModules()) {
List<DevModeContext.CompilationUnit> compilationUnits = cuf.apply(module);
for (DevModeContext.CompilationUnit unit : compilationUnits) {
PathCollection rootPaths = unit.getResourcePaths();
if (rootPaths.isEmpty()) {
String rootPath = unit.getClassesPath();
if (rootPath == null) {
continue;
}
rootPaths = PathList.of(Path.of(rootPath));
}
final List<Path> roots = rootPaths.stream()
.filter(Files::exists)
.filter(Files::isReadable)
.collect(Collectors.toList());
for (Path root : roots) {
Set<String> watchedRootPaths = new HashSet<>();
// First find all matching paths from all roots
try (final Stream<Path> walk = Files.walk(root)) {
walk.forEach(path -> {
if (path.equals(root)
// Never watch directories
|| Files.isDirectory(path)) {
return;
}
// Use the relative path to match the watched file
// For example /some/more/complex/path/src/main/resources/foo/bar.txt -> foo/bar.txt
Path relativePath = root.relativize(path);
// We need to match the OS-agnostic path
String relativePathStr = toOSAgnosticPathStr(relativePath.toString());
Boolean restart = watchedFilePaths.get(relativePathStr);
if (restart == null) {
restart = watchedFilePredicates.stream().filter(p -> p.getKey().test(relativePathStr))
.map(Entry::getValue).findFirst().orElse(null);
}
if (restart != null) {
log.debugf("Watch %s from: %s", relativePath, root);
watchedRootPaths.add(relativePathStr);
putLastModifiedTime(path, relativePath, restart, timestamps);
}
});
} catch (IOException e) {
throw new UncheckedIOException(e);
}
// Then process glob patterns
for (Entry<String, Boolean> e : watchedFilePaths.entrySet()) {
String watchedFilePath = e.getKey();
Path path = Paths.get(sanitizedPattern(watchedFilePath));
if (!path.isAbsolute() && !watchedRootPaths.contains(e.getKey())
&& maybeGlobPattern(watchedFilePath)) {
try {
final PathMatcher matcher = FileSystems.getDefault().getPathMatcher("glob:" + watchedFilePath);
Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) {
Path relativePath = root.relativize(file);
if (matcher.matches(relativePath)) {
log.debugf("Glob pattern [%s] matched %s from %s", watchedFilePath, relativePath,
root);
WatchedPath extra = new WatchedPath(file, relativePath, e.getValue(),
attrs.lastModifiedTime().toMillis());
timestamps.watchedPaths.put(extra.filePath, extra);
}
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFileFailed(Path file, IOException exc) {
return FileVisitResult.CONTINUE;
}
});
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
}
}
}
}
}
// Finally process watched absolute paths
for (Entry<String, Boolean> e : watchedFilePaths.entrySet()) {
String watchedFilePath = e.getKey();
Path path = Paths.get(sanitizedPattern(watchedFilePath));
if (path.isAbsolute()) {
path = Paths.get(watchedFilePath);
log.debugf("Watch %s", path);
if (Files.exists(path)) {
putLastModifiedTime(path, path, e.getValue(), timestamps);
} else {
// The watched file does not exist yet but we still need to keep track of this path
timestamps.watchedPaths.put(path, new WatchedPath(path, path, e.getValue(), -1));
}
}
}
log.debugf("Watched paths: %s", timestamps.watchedPaths.values());
return this;
}
private String sanitizedPattern(String pattern) {
return pattern.replaceAll("[*?]", "");
}
private boolean maybeGlobPattern(String path) {
return path.contains("*") || path.contains("?");
}
private void putLastModifiedTime(Path path, Path relativePath, boolean restart, TimestampSet timestamps) {
try {
FileTime lastModifiedTime = Files.getLastModifiedTime(path);
timestamps.watchedPaths.put(path, new WatchedPath(path, relativePath, restart, lastModifiedTime.toMillis()));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public void addHotReplacementSetup(HotReplacementSetup service) {
hotReplacementSetup.add(service);
}
public void addDeploymentFailedStartHandler(Runnable service) {
deploymentFailedStartHandlers.add(service);
}
public void startupFailed() {
for (HotReplacementSetup i : hotReplacementSetup) {
i.handleFailedInitialStart();
}
for (Runnable i : deploymentFailedStartHandlers) {
i.run();
}
//if startup failed we always do a | cleanUpClassFile |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/support/DirtiesContextBeforeModesTestExecutionListener.java | {
"start": 4498,
"end": 5106
} | class ____} is set to {@link
* ClassMode#BEFORE_EACH_TEST_METHOD BEFORE_EACH_TEST_METHOD}, the
* {@linkplain ApplicationContext application context} of the test context
* will be {@linkplain TestContext#markApplicationContextDirty marked as dirty} and the
* {@link DependencyInjectionTestExecutionListener#REINJECT_DEPENDENCIES_ATTRIBUTE
* REINJECT_DEPENDENCIES_ATTRIBUTE} in the test context will be set to {@code true}.
*/
@Override
public void beforeTestMethod(TestContext testContext) throws Exception {
beforeOrAfterTestMethod(testContext, BEFORE_METHOD, BEFORE_EACH_TEST_METHOD);
}
}
| mode |
java | spring-projects__spring-framework | spring-r2dbc/src/main/java/org/springframework/r2dbc/core/NamedParameterUtils.java | {
"start": 13082,
"end": 14042
} | class ____ {
private final String parameterName;
private final int startIndex;
private final int endIndex;
ParameterHolder(String parameterName, int startIndex, int endIndex) {
Assert.notNull(parameterName, "Parameter name must not be null");
this.parameterName = parameterName;
this.startIndex = startIndex;
this.endIndex = endIndex;
}
String getParameterName() {
return this.parameterName;
}
int getStartIndex() {
return this.startIndex;
}
int getEndIndex() {
return this.endIndex;
}
@Override
public boolean equals(@Nullable Object other) {
return (this == other || (other instanceof ParameterHolder that &&
this.startIndex == that.startIndex && this.endIndex == that.endIndex &&
this.parameterName.equals(that.parameterName)));
}
@Override
public int hashCode() {
return this.parameterName.hashCode();
}
}
/**
* Holder for bind markers progress.
*/
static | ParameterHolder |
java | elastic__elasticsearch | x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/allocation/CcrPrimaryFollowerAllocationDecider.java | {
"start": 1221,
"end": 3029
} | class ____ extends AllocationDecider {
static final String NAME = "ccr_primary_follower";
@Override
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
final IndexMetadata indexMetadata = allocation.metadata().indexMetadata(shardRouting.index());
if (CcrSettings.CCR_FOLLOWING_INDEX_SETTING.get(indexMetadata.getSettings()) == false) {
return allocation.decision(Decision.YES, NAME, "shard is not a follower and is not under the purview of this decider");
}
if (shardRouting.primary() == false) {
return allocation.decision(Decision.YES, NAME, "shard is a replica follower and is not under the purview of this decider");
}
final RecoverySource recoverySource = shardRouting.recoverySource();
if (recoverySource == null || recoverySource.getType() != RecoverySource.Type.SNAPSHOT) {
return allocation.decision(
Decision.YES,
NAME,
"shard is a primary follower but was bootstrapped already; hence is not under the purview of this decider"
);
}
if (node.node().isRemoteClusterClient() == false) {
return allocation.decision(
Decision.NO,
NAME,
"shard is a primary follower and being bootstrapped, but node does not have the "
+ DiscoveryNodeRole.REMOTE_CLUSTER_CLIENT_ROLE.roleName()
+ " role"
);
}
return allocation.decision(
Decision.YES,
NAME,
"shard is a primary follower and node has the " + DiscoveryNodeRole.REMOTE_CLUSTER_CLIENT_ROLE.roleName() + " role"
);
}
}
| CcrPrimaryFollowerAllocationDecider |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractAppendTest.java | {
"start": 1486,
"end": 6126
} | class ____ extends AbstractFSContractTestBase {
private static final Logger LOG =
LoggerFactory.getLogger(AbstractContractAppendTest.class);
private Path testPath;
private Path target;
@BeforeEach
@Override
public void setup() throws Exception {
super.setup();
skipIfUnsupported(SUPPORTS_APPEND);
//delete the test directory
testPath = path("test");
target = new Path(testPath, "target");
}
@Test
public void testAppendToEmptyFile() throws Throwable {
touch(getFileSystem(), target);
byte[] dataset = dataset(256, 'a', 'z');
try (FSDataOutputStream outputStream = getFileSystem().append(target)) {
outputStream.write(dataset);
}
byte[] bytes = ContractTestUtils.readDataset(getFileSystem(), target,
dataset.length);
ContractTestUtils.compareByteArrays(dataset, bytes, dataset.length);
}
@Test
public void testBuilderAppendToEmptyFile() throws Throwable {
touch(getFileSystem(), target);
byte[] dataset = dataset(256, 'a', 'z');
try (FSDataOutputStream outputStream =
getFileSystem().appendFile(target).build()) {
outputStream.write(dataset);
}
byte[] bytes = ContractTestUtils.readDataset(getFileSystem(), target,
dataset.length);
ContractTestUtils.compareByteArrays(dataset, bytes, dataset.length);
}
@Test
public void testAppendNonexistentFile() throws Throwable {
try {
FSDataOutputStream out = getFileSystem().append(target);
//got here: trouble
out.close();
fail("expected a failure");
} catch (Exception e) {
//expected
handleExpectedException(e);
}
}
@Test
public void testAppendToExistingFile() throws Throwable {
byte[] original = dataset(8192, 'A', 'Z');
byte[] appended = dataset(8192, '0', '9');
createFile(getFileSystem(), target, false, original);
try (FSDataOutputStream out = getFileSystem().append(target)) {
out.write(appended);
}
byte[] bytes = ContractTestUtils.readDataset(getFileSystem(), target,
original.length + appended.length);
ContractTestUtils.validateFileContent(bytes,
new byte[] [] { original, appended });
}
@Test
public void testBuilderAppendToExistingFile() throws Throwable {
byte[] original = dataset(8192, 'A', 'Z');
byte[] appended = dataset(8192, '0', '9');
createFile(getFileSystem(), target, false, original);
try (FSDataOutputStream out = getFileSystem().appendFile(target).build()) {
out.write(appended);
}
byte[] bytes = ContractTestUtils.readDataset(getFileSystem(), target,
original.length + appended.length);
ContractTestUtils.validateFileContent(bytes,
new byte[][]{original, appended});
}
@Test
public void testAppendMissingTarget() throws Throwable {
try {
FSDataOutputStream out = getFileSystem().append(target);
//got here: trouble
out.close();
fail("expected a failure");
} catch (Exception e) {
//expected
handleExpectedException(e);
}
}
@Test
public void testRenameFileBeingAppended() throws Throwable {
touch(getFileSystem(), target);
assertPathExists("original file does not exist", target);
byte[] dataset = dataset(256, 'a', 'z');
FSDataOutputStream outputStream = getFileSystem().append(target);
if (isSupported(CREATE_VISIBILITY_DELAYED)) {
// Some filesystems like WebHDFS doesn't assure sequential consistency.
// In such a case, delay is needed. Given that we can not check the lease
// because here is closed in client side package, simply add a sleep.
Thread.sleep(100);
}
outputStream.write(dataset);
Path renamed = new Path(testPath, "renamed");
rename(target, renamed);
outputStream.close();
String listing = ls(testPath);
//expected: the stream goes to the file that was being renamed, not
//the original path
assertPathExists("renamed destination file does not exist", renamed);
assertPathDoesNotExist("Source file found after rename during append:\n" +
listing, target);
byte[] bytes = ContractTestUtils.readDataset(getFileSystem(), renamed,
dataset.length);
ContractTestUtils.compareByteArrays(dataset, bytes, dataset.length);
}
@Test
public void testFileSystemDeclaresCapability() throws Throwable {
assertHasPathCapabilities(getFileSystem(), target,
CommonPathCapabilities.FS_APPEND);
}
}
| AbstractContractAppendTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/grant/MySqlGrantTest_4.java | {
"start": 969,
"end": 2367
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "GRANT SELECT, INSERT ON mydb.* TO 'someuser'@'somehost'";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
// print(statementList);
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("GRANT SELECT, INSERT ON mydb.* TO 'someuser'@'somehost'", //
output);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("City")));
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("t2")));
// assertTrue(visitor.getColumns().contains(new Column("t2", "id")));
}
}
| MySqlGrantTest_4 |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/AnySetterTest.java | {
"start": 1318,
"end": 1657
} | class ____
{
HashMap<String,int[]> _map;
public MapImitatorWithValue() {
_map = new HashMap<String,int[]>();
}
@JsonAnySetter
void addEntry(String key, int[] value)
{
_map.put(key, value);
}
}
// Bad; 2 "any setters"
static | MapImitatorWithValue |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/generated/GeneratedAnnotationTests.java | {
"start": 1168,
"end": 3480
} | class ____ {
private static final String TRIGGER = "begin NEW.lastUpdatedAt = current_timestamp; return NEW; end;";
@BeforeEach
public void prepare(SessionFactoryScope scope) {
scope.inTransaction(
s -> {
s.createNativeMutationQuery( "create function update_ts_func() returns trigger language plpgsql as $$ " + TRIGGER + " $$" )
.executeUpdate();
s.createNativeMutationQuery( "create trigger update_ts before update on gen_ann_baseline for each row execute procedure update_ts_func()" )
.executeUpdate();
}
);
}
@AfterEach
public void cleanup(SessionFactoryScope scope) {
scope.inTransaction(
s -> {
s.createNativeMutationQuery( "drop trigger if exists update_ts on gen_ann_baseline" )
.executeUpdate();
s.createNativeMutationQuery( "drop function if exists update_ts_func()" )
.executeUpdate();
}
);
}
@Test
public void test(SessionFactoryScope scope) {
final AuditedEntity created = scope.fromTransaction( (session) -> {
final AuditedEntity entity = new AuditedEntity( 1, "tsifr" );
session.persist( entity );
return entity;
} );
assertThat( created.createdAt ).isNotNull();
assertThat( created.lastUpdatedAt ).isNotNull();
assertThat( created.lastUpdatedAt ).isEqualTo(created.createdAt );
created.name = "changed";
//We need to wait a little to make sure the timestamps produced are different
waitALittle();
// then changing
final AuditedEntity merged = scope.fromTransaction( (session) -> {
return (AuditedEntity) session.merge( created );
} );
assertThat( merged ).isNotNull();
assertThat( merged.createdAt ).isNotNull();
assertThat( merged.lastUpdatedAt ).isNotNull();
assertThat( merged.lastUpdatedAt ).isNotEqualTo( merged.createdAt );
//We need to wait a little to make sure the timestamps produced are different
waitALittle();
// lastly, make sure we can load it..
final AuditedEntity loaded = scope.fromTransaction( (session) -> {
return session.get( AuditedEntity.class, 1 );
} );
assertThat( loaded ).isNotNull();
assertThat( loaded.createdAt ).isEqualTo( merged.createdAt );
assertThat( loaded.lastUpdatedAt ).isEqualTo( merged.lastUpdatedAt );
}
@Entity( name = "gen_ann_baseline" )
@Table( name = "" )
public static | GeneratedAnnotationTests |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.