language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotRequestSerializationTests.java
|
{
"start": 949,
"end": 3251
}
|
class ____ extends AbstractWireSerializingTestCase<GetShardSnapshotRequest> {
@Override
protected Writeable.Reader<GetShardSnapshotRequest> instanceReader() {
return GetShardSnapshotRequest::new;
}
@Override
protected GetShardSnapshotRequest createTestInstance() {
ShardId shardId = randomShardId();
if (randomBoolean()) {
return GetShardSnapshotRequest.latestSnapshotInAllRepositories(TEST_REQUEST_TIMEOUT, shardId);
} else {
List<String> repositories = randomList(1, randomIntBetween(1, 100), () -> randomAlphaOfLength(randomIntBetween(1, 100)));
return GetShardSnapshotRequest.latestSnapshotInRepositories(TEST_REQUEST_TIMEOUT, shardId, repositories);
}
}
@Override
protected GetShardSnapshotRequest mutateInstance(GetShardSnapshotRequest instance) {
ShardId shardId = randomShardId();
if (instance.getFromAllRepositories()) {
return GetShardSnapshotRequest.latestSnapshotInAllRepositories(TEST_REQUEST_TIMEOUT, shardId);
} else {
return GetShardSnapshotRequest.latestSnapshotInRepositories(TEST_REQUEST_TIMEOUT, shardId, instance.getRepositories());
}
}
private ShardId randomShardId() {
return new ShardId(randomAlphaOfLength(10), UUIDs.randomBase64UUID(), randomIntBetween(0, 100));
}
public void testGetDescription() {
final GetShardSnapshotRequest request = new GetShardSnapshotRequest(
TEST_REQUEST_TIMEOUT,
Arrays.asList("repo1", "repo2"),
new ShardId("idx", "uuid", 0)
);
assertThat(request.getDescription(), equalTo("shard[idx][0], repositories[repo1,repo2]"));
final GetShardSnapshotRequest randomRequest = createTestInstance();
final String description = randomRequest.getDescription();
assertThat(description, containsString(randomRequest.getShardId().toString()));
assertThat(
description,
description.length(),
lessThanOrEqualTo(
("shard" + randomRequest.getShardId() + ", repositories[").length() + 1024 + 100 + ",... (999 in total, 999 omitted)"
.length()
)
);
}
}
|
GetShardSnapshotRequestSerializationTests
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/inference/ChunkingSettings.java
|
{
"start": 657,
"end": 964
}
|
interface ____ extends ToXContentObject, VersionedNamedWriteable {
ChunkingStrategy getChunkingStrategy();
Map<String, Object> asMap();
/**
* @return The max chunk size specified, or null if not specified
*/
Integer maxChunkSize();
default void validate() {}
}
|
ChunkingSettings
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/monitor/fs/FsInfo.java
|
{
"start": 23575,
"end": 23829
}
|
class ____ {
static final String FS = "fs";
static final String TIMESTAMP = "timestamp";
static final String DATA = "data";
static final String TOTAL = "total";
static final String IO_STATS = "io_stats";
}
}
|
Fields
|
java
|
grpc__grpc-java
|
binder/src/main/java/io/grpc/binder/internal/Inbound.java
|
{
"start": 18346,
"end": 21296
}
|
class ____ extends Inbound<ServerStreamListener> {
private final BinderServerTransport serverTransport;
ServerInbound(BinderServerTransport transport, Attributes attributes, int callId) {
super(transport, attributes, callId);
this.serverTransport = transport;
}
@GuardedBy("this")
@Override
protected void handlePrefix(int flags, Parcel parcel) throws StatusException {
String methodName = parcel.readString();
Metadata headers = MetadataHelper.readMetadata(parcel, attributes);
StatsTraceContext statsTraceContext =
serverTransport.createStatsTraceContext(methodName, headers);
Outbound.ServerOutbound outbound =
new Outbound.ServerOutbound(serverTransport, callId, statsTraceContext);
ServerStream stream;
if ((flags & TransactionUtils.FLAG_EXPECT_SINGLE_MESSAGE) != 0) {
stream = new SingleMessageServerStream(this, outbound, attributes);
} else {
stream = new MultiMessageServerStream(this, outbound, attributes);
}
Status status = serverTransport.startStream(stream, methodName, headers);
if (status.isOk()) {
checkNotNull(listener); // Is it ok to assume this will happen synchronously?
if (transport.isReady()) {
listener.onReady();
}
} else {
closeAbnormal(status);
}
}
@GuardedBy("this")
@Override
protected void handleSuffix(int flags, Parcel parcel) {
// Nothing to read.
}
@Override
@GuardedBy("this")
protected void deliverSuffix() {
listener.halfClosed();
}
@Override
@GuardedBy("this")
protected void deliverCloseAbnormal(Status status) {
listener.closed(status);
}
@GuardedBy("this")
void onCloseSent(Status status) {
if (!isClosed()) {
onDeliveryState(State.CLOSED);
statsTraceContext.streamClosed(status);
listener.closed(Status.OK);
}
}
}
// ======================================
// Helper methods.
private static void checkTransition(State current, State next) {
switch (next) {
case INITIALIZED:
checkState(current == State.UNINITIALIZED, "%s -> %s", current, next);
break;
case PREFIX_DELIVERED:
checkState(
current == State.INITIALIZED || current == State.UNINITIALIZED,
"%s -> %s",
current,
next);
break;
case ALL_MESSAGES_DELIVERED:
checkState(current == State.PREFIX_DELIVERED, "%s -> %s", current, next);
break;
case SUFFIX_DELIVERED:
checkState(current == State.ALL_MESSAGES_DELIVERED, "%s -> %s", current, next);
break;
case CLOSED:
break;
default:
throw new AssertionError();
}
}
// ======================================
// Message reassembly.
/** Part of an unconsumed message. */
private static final
|
ServerInbound
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/serializer/NotWriteDefaultValueTest.java
|
{
"start": 4104,
"end": 4475
}
|
class ____ {
private boolean f0;
private boolean f1;
public boolean isF0() {
return f0;
}
public void setF0(boolean f0) {
this.f0 = f0;
}
public boolean isF1() {
return f1;
}
public void setF1(boolean f1) {
this.f1 = f1;
}
}
}
|
VO_Boolean
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/convert/TypeDescriptorTests.java
|
{
"start": 39812,
"end": 39891
}
|
class ____<T> extends ArrayList<List<Set<T>>> {
}
public static
|
PassDownGeneric
|
java
|
apache__camel
|
dsl/camel-java-joor-dsl/src/test/resources/org/apache/camel/main/java/MyRoutesScan.java
|
{
"start": 857,
"end": 1024
}
|
class ____ extends RouteBuilder {
@Override
public void configure() throws Exception {
from("direct:scan")
.to("mock:scan");
}
}
|
MyRoutesScan
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/limits/SubclassLimitsTest.java
|
{
"start": 478,
"end": 1490
}
|
class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(Simple.class, SimpleInterceptor.class, Counter.class,
LargeBean.class);
@Test
public void testInterception() {
ArcContainer container = Arc.container();
LargeBean largeBean = container.instance(LargeBean.class).get();
assertEquals("0fii1", largeBean.ping1("fii"));
assertEquals("1fii2", largeBean.ping100("fii"));
}
// this could be used to generate a larger bean
// public static void main(String[] args) throws IOException {
// int count = 1500;
// StringBuilder builder = new StringBuilder();
// builder.append("package io.quarkus.arc.test.interceptors.limits;");
// builder.append("import jakarta.enterprise.context.ApplicationScoped;");
// builder.append("import io.quarkus.arc.test.interceptors.Simple;");
// builder.append("@Simple @ApplicationScoped public
|
SubclassLimitsTest
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/scheduler/ResourceRequestSetKey.java
|
{
"start": 1457,
"end": 5950
}
|
class ____ extends SchedulerRequestKey {
private static final Logger LOG =
LoggerFactory.getLogger(ResourceRequestSetKey.class);
// More ResourceRequest key fields on top of SchedulerRequestKey
private final Resource resource;
private final ExecutionType execType;
/**
* Create the key object from a {@link ResourceRequest}.
*
* @param rr Resource request object
* @throws YarnException if fails
*/
public ResourceRequestSetKey(ResourceRequest rr) throws YarnException {
this(rr.getAllocationRequestId(), rr.getPriority(), rr.getCapability(),
((rr.getExecutionTypeRequest() == null) ? ExecutionType.GUARANTEED
: rr.getExecutionTypeRequest().getExecutionType()));
if (rr.getPriority() == null) {
throw new YarnException("Null priority in RR: " + rr);
}
if (rr.getCapability() == null) {
throw new YarnException("Null resource in RR: " + rr);
}
}
/**
* Create the key object from member objects.
*
* @param allocationRequestId allocate request id of the ask
* @param priority the priority of the ask
* @param resource the resource size of the ask
* @param execType the execution type of the ask
*/
public ResourceRequestSetKey(long allocationRequestId, Priority priority,
Resource resource, ExecutionType execType) {
super(priority, allocationRequestId, null);
if (resource == null) {
this.resource = Resource.newInstance(0, 0);
} else {
this.resource = resource;
}
if (execType == null) {
this.execType = ExecutionType.GUARANTEED;
} else {
this.execType = execType;
}
}
public Resource getResource() {
return this.resource;
}
public ExecutionType getExeType() {
return this.execType;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SchedulerRequestKey)) {
return false;
}
if (!(obj instanceof ResourceRequestSetKey)) {
return super.equals(obj);
}
ResourceRequestSetKey other = (ResourceRequestSetKey) obj;
return super.equals(other) && this.resource.equals(other.resource)
&& this.execType.equals(other.execType);
}
@Override
public int hashCode() {
return ((super.hashCode() * 37 + this.resource.hashCode()) * 41)
+ this.execType.hashCode();
}
@Override
public int compareTo(SchedulerRequestKey other) {
int ret = super.compareTo(other);
if (ret != 0) {
return ret;
}
if (!(other instanceof ResourceRequestSetKey)) {
return ret;
}
ResourceRequestSetKey otherKey = (ResourceRequestSetKey) other;
ret = this.resource.compareTo(otherKey.resource);
if (ret != 0) {
return ret;
}
return this.execType.compareTo(otherKey.execType);
}
/**
* Extract the corresponding ResourceRequestSetKey for an allocated container
* from a given set. Return null if not found.
*
* @param container the allocated container
* @param keys the set of keys to look from
* @return ResourceRequestSetKey
*/
public static ResourceRequestSetKey extractMatchingKey(Container container,
Set<ResourceRequestSetKey> keys) {
ResourceRequestSetKey resourceRequestSetKey = new ResourceRequestSetKey(
container.getAllocationRequestId(), container.getPriority(),
container.getResource(), container.getExecutionType());
if (keys.contains(resourceRequestSetKey)) {
return resourceRequestSetKey;
}
if (container.getAllocationRequestId() > 0) {
// If no exact match, look for the one with the same (non-zero)
// allocationRequestId
for (ResourceRequestSetKey candidate : keys) {
if (candidate.getAllocationRequestId() == container.getAllocationRequestId()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Using possible match for {} : {}", resourceRequestSetKey, candidate);
}
return candidate;
}
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("not match found for container {}.", container.getId());
for (ResourceRequestSetKey candidate : keys) {
LOG.debug("candidate set keys: {}.", candidate.toString());
}
}
return null;
}
@Override
public String toString() {
return "[id:" + getAllocationRequestId() + " p:"
+ getPriority().getPriority()
+ (this.execType.equals(ExecutionType.GUARANTEED) ? " G"
: " O" + " r:" + this.resource + "]");
}
}
|
ResourceRequestSetKey
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/argument/AnnotationBaseArgumentResolver.java
|
{
"start": 1240,
"end": 1874
}
|
interface ____<T extends Annotation> extends ArgumentResolver {
Class<T> accept();
NamedValueMeta getNamedValueMeta(ParameterMeta parameter, AnnotationMeta<Annotation> annotation);
Object resolve(ParameterMeta parameter, AnnotationMeta<T> annotation, HttpRequest request, HttpResponse response);
default boolean accept(ParameterMeta parameter) {
return true;
}
@Override
default Object resolve(ParameterMeta parameter, HttpRequest request, HttpResponse response) {
throw new UnsupportedOperationException("Resolve without annotation is unsupported");
}
}
|
AnnotationBaseArgumentResolver
|
java
|
alibaba__nacos
|
common/src/main/java/com/alibaba/nacos/common/cache/Cache.java
|
{
"start": 1511,
"end": 2036
}
|
interface ____ exception
*/
V get(K key, Callable<? extends V> call) throws Exception;
/**
* Take the corresponding value from the cache according to the cache key, and remove this record from the cache.
* @param key cache key
* @return cache value
*/
V remove(K key);
/**
* Clear the entire cache.
*/
void clear();
/**
* Returns the number of key-value pairs in the cache.
* @return number of key-value pairs
*/
int getSize();
}
|
throw
|
java
|
spring-projects__spring-boot
|
module/spring-boot-micrometer-metrics/src/dockerTest/java/org/springframework/boot/micrometer/metrics/docker/compose/otlp/GrafanaOpenTelemetryMetricsDockerComposeConnectionDetailsFactoryIntegrationTests.java
|
{
"start": 1195,
"end": 1555
}
|
class ____ {
@DockerComposeTest(composeFile = "otlp-compose.yaml", image = TestImage.GRAFANA_OTEL_LGTM)
void runCreatesConnectionDetails(OtlpMetricsConnectionDetails connectionDetails) {
assertThat(connectionDetails.getUrl()).startsWith("http://").endsWith("/v1/metrics");
}
}
|
GrafanaOpenTelemetryMetricsDockerComposeConnectionDetailsFactoryIntegrationTests
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/metrics/NumericMetricsAggregator.java
|
{
"start": 1632,
"end": 2583
}
|
class ____ extends NumericMetricsAggregator {
protected SingleValue(String name, AggregationContext context, Aggregator parent, Map<String, Object> metadata) throws IOException {
super(name, context, parent, metadata);
}
public abstract double metric(long owningBucketOrd);
@Override
public BucketComparator bucketComparator(String key, SortOrder order) {
if (key != null && false == "value".equals(key)) {
throw new IllegalArgumentException(String.format(Locale.ROOT, """
Ordering on a single-value metrics aggregation can only be done on its value. \
Either drop the key (a la "%s") or change it to "value" (a la "%s.value")""", name(), name()));
}
return (lhs, rhs) -> Comparators.compareDiscardNaN(metric(lhs), metric(rhs), order == SortOrder.ASC);
}
}
public abstract static
|
SingleValue
|
java
|
apache__flink
|
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/handler/operation/GetOperationStatusHandler.java
|
{
"start": 1315,
"end": 1799
}
|
class ____ extends AbstractOperationHandler {
public GetOperationStatusHandler(
SqlGatewayService service,
Map<String, String> responseHeaders,
MessageHeaders<
EmptyRequestBody,
OperationStatusResponseBody,
OperationMessageParameters>
messageHeaders) {
super(service, responseHeaders, messageHeaders);
}
}
|
GetOperationStatusHandler
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/params/ParameterizedTestIntegrationTests.java
|
{
"start": 85807,
"end": 86014
}
|
interface ____ {
}
@ParameterizedTest(quoteTextArguments = false)
@TwoEnumSources
void testWithRepeatableEnumSourceAsMetaAnnotation(Action argument) {
fail(argument.toString());
}
|
TwoEnumSources
|
java
|
mockito__mockito
|
mockito-core/src/main/java/org/mockito/stubbing/VoidAnswer2.java
|
{
"start": 1032,
"end": 1292
}
|
interface ____<A0, A1> {
/**
* @param argument0 the first argument.
* @param argument1 the second argument.
*
* @throws Throwable the throwable to be thrown
*/
void answer(A0 argument0, A1 argument1) throws Throwable;
}
|
VoidAnswer2
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java
|
{
"start": 23653,
"end": 27076
}
|
class ____ {
static AscendingFunction from(MvEvaluatorImplementer impl, TypeElement declarationType, TypeName workType, String name) {
if (name.equals("")) {
return null;
}
// check for index lookup
ExecutableElement fn = findMethod(
declarationType,
new String[] { name },
m -> m.getParameters().size() == 1 && m.getParameters().get(0).asType().getKind() == TypeKind.INT
);
if (fn != null) {
return impl.new AscendingFunction(fn, false, false);
}
// Block mode without work parameter
fn = findMethod(
declarationType,
new String[] { name },
m -> m.getParameters().size() == 3
&& m.getParameters().get(1).asType().getKind() == TypeKind.INT
&& m.getParameters().get(2).asType().getKind() == TypeKind.INT
);
if (fn != null) {
return impl.new AscendingFunction(fn, true, false);
}
// Block mode with work parameter
fn = findMethod(
declarationType,
new String[] { name },
m -> m.getParameters().size() == 4
&& TypeName.get(m.getParameters().get(0).asType()).equals(workType)
&& m.getParameters().get(2).asType().getKind() == TypeKind.INT
&& m.getParameters().get(3).asType().getKind() == TypeKind.INT
);
if (fn != null) {
return impl.new AscendingFunction(fn, true, true);
}
throw new IllegalArgumentException("Couldn't find " + declarationType + "#" + name + "(block, int, int)");
}
private final List<Object> invocationArgs = new ArrayList<>();
private final boolean blockMode;
private final boolean withWorkParameter;
private AscendingFunction(ExecutableElement fn, boolean blockMode, boolean withWorkParameter) {
this.blockMode = blockMode;
this.withWorkParameter = withWorkParameter;
if (blockMode) {
invocationArgs.add(resultType);
}
invocationArgs.add(declarationType);
invocationArgs.add(fn.getSimpleName());
}
private void call(MethodSpec.Builder builder) {
if (blockMode) {
if (withWorkParameter) {
builder.addStatement("$T result = $T.$L(work, v, first, valueCount)", invocationArgs.toArray());
} else {
builder.addStatement("$T result = $T.$L(v, first, valueCount)", invocationArgs.toArray());
}
} else {
builder.addStatement("int idx = $T.$L(valueCount)", invocationArgs.toArray());
fetch(builder, "result", resultType, "first + idx", workType.equals(fieldType) ? "firstScratch" : "valueScratch");
}
}
}
MethodSpec baseRamBytesUsed() {
MethodSpec.Builder builder = MethodSpec.methodBuilder("baseRamBytesUsed").addAnnotation(Override.class);
builder.addModifiers(Modifier.PUBLIC).returns(TypeName.LONG);
builder.addStatement("return BASE_RAM_BYTES_USED + field.baseRamBytesUsed()");
return builder.build();
}
}
|
AscendingFunction
|
java
|
micronaut-projects__micronaut-core
|
function-client/src/main/java/io/micronaut/function/client/FunctionDefinition.java
|
{
"start": 794,
"end": 1056
}
|
interface ____ {
/**
* @return The name of the function
*/
String getName();
/**
* @return An optional URI endpoint to the function
*/
default Optional<URI> getURI() {
return Optional.empty();
}
}
|
FunctionDefinition
|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/indices/SystemIndexDescriptorUtils.java
|
{
"start": 536,
"end": 1517
}
|
class ____ {
/**
* Creates a descriptor for system indices matching the supplied pattern. These indices will not be managed
* by Elasticsearch internally.
* @param indexPattern The pattern of index names that this descriptor will be used for. Must start with a '.' character, must not
* overlap with any other descriptor patterns, and must allow a suffix (see note on
* {@link SystemIndexDescriptor} for details).
* @param description The name of the plugin responsible for this system index.
*/
public static SystemIndexDescriptor createUnmanaged(String indexPattern, String description) {
return SystemIndexDescriptor.builder()
.setIndexPattern(indexPattern)
.setDescription(description)
.setType(SystemIndexDescriptor.Type.INTERNAL_UNMANAGED)
.setAllowedElasticProductOrigins(List.of())
.build();
}
}
|
SystemIndexDescriptorUtils
|
java
|
elastic__elasticsearch
|
x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/slack/SlackAction.java
|
{
"start": 887,
"end": 5001
}
|
class ____ implements Action {
public static final String TYPE = "slack";
final SlackMessage.Template message;
@Nullable
final String account;
@Nullable
final HttpProxy proxy;
public SlackAction(@Nullable String account, SlackMessage.Template message, HttpProxy proxy) {
this.account = account;
this.message = message;
this.proxy = proxy;
}
@Override
public String type() {
return TYPE;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SlackAction that = (SlackAction) o;
return Objects.equals(account, that.account) && Objects.equals(message, that.message) && Objects.equals(proxy, that.proxy);
}
@Override
public int hashCode() {
return Objects.hash(account, message, proxy);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (account != null) {
builder.field(Field.ACCOUNT.getPreferredName(), account);
}
if (proxy != null) {
proxy.toXContent(builder, params);
}
builder.field(Field.MESSAGE.getPreferredName(), message);
return builder.endObject();
}
public static SlackAction parse(String watchId, String actionId, XContentParser parser) throws IOException {
String account = null;
SlackMessage.Template message = null;
HttpProxy proxy = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (Field.ACCOUNT.match(currentFieldName, parser.getDeprecationHandler())) {
if (token == XContentParser.Token.VALUE_STRING) {
account = parser.text();
} else {
throw new ElasticsearchParseException(
"failed to parse [{}] action [{}/{}]. expected [{}] to be of type string, but " + "found [{}] instead",
TYPE,
watchId,
actionId,
Field.ACCOUNT.getPreferredName(),
token
);
}
} else if (Field.PROXY.match(currentFieldName, parser.getDeprecationHandler())) {
proxy = HttpProxy.parse(parser);
} else if (Field.MESSAGE.match(currentFieldName, parser.getDeprecationHandler())) {
try {
message = SlackMessage.Template.parse(parser);
} catch (Exception e) {
throw new ElasticsearchParseException(
"failed to parse [{}] action [{}/{}]. failed to parse [{}] field",
e,
TYPE,
watchId,
actionId,
Field.MESSAGE.getPreferredName()
);
}
} else {
throw new ElasticsearchParseException(
"failed to parse [{}] action [{}/{}]. unexpected token [{}]",
TYPE,
watchId,
actionId,
token
);
}
}
if (message == null) {
throw new ElasticsearchParseException(
"failed to parse [{}] action [{}/{}]. missing required [{}] field",
TYPE,
watchId,
actionId,
Field.MESSAGE.getPreferredName()
);
}
return new SlackAction(account, message, proxy);
}
public static Builder builder(String account, SlackMessage.Template message) {
return new Builder(new SlackAction(account, message, null));
}
public
|
SlackAction
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/TaggedInputSplit.java
|
{
"start": 2837,
"end": 2896
}
|
class ____ use for this split.
*
* @return The Mapper
|
to
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/sqm/internal/ConcreteSqmSelectQueryPlan.java
|
{
"start": 22215,
"end": 23002
}
|
class ____ extends SqmJdbcExecutionContextAdapter {
private final SubselectFetch.RegistrationHandler subSelectFetchKeyHandler;
private final String hql;
public MySqmJdbcExecutionContextAdapter(
DomainQueryExecutionContext executionContext,
JdbcSelect jdbcSelect,
SubselectFetch.RegistrationHandler subSelectFetchKeyHandler,
String hql) {
super( executionContext, jdbcSelect );
this.subSelectFetchKeyHandler = subSelectFetchKeyHandler;
this.hql = hql;
}
@Override
public void registerLoadingEntityHolder(EntityHolder holder) {
subSelectFetchKeyHandler.addKey( holder );
}
@Override
public String getQueryIdentifier(String sql) {
return CRITERIA_HQL_STRING.equals( hql ) ? "[CRITERIA] " + sql : hql;
}
}
}
|
MySqmJdbcExecutionContextAdapter
|
java
|
google__gson
|
gson/src/test/java/com/google/gson/functional/MapTest.java
|
{
"start": 17033,
"end": 26369
}
|
class ____ extends LinkedHashMap<String, String> {
private static final long serialVersionUID = 1L;
@SuppressWarnings("unused")
int foo = 10;
}
/** From bug report http://code.google.com/p/google-gson/issues/detail?id=95 */
@Test
public void testMapOfMapSerialization() {
Map<String, Map<String, String>> map = new HashMap<>();
Map<String, String> nestedMap = new HashMap<>();
nestedMap.put("1", "1");
nestedMap.put("2", "2");
map.put("nestedMap", nestedMap);
String json = gson.toJson(map);
assertThat(json).contains("nestedMap");
assertThat(json).contains("\"1\":\"1\"");
assertThat(json).contains("\"2\":\"2\"");
}
/** From bug report http://code.google.com/p/google-gson/issues/detail?id=95 */
@Test
public void testMapOfMapDeserialization() {
String json = "{nestedMap:{'2':'2','1':'1'}}";
Type type = new TypeToken<Map<String, Map<String, String>>>() {}.getType();
Map<String, Map<String, String>> map = gson.fromJson(json, type);
Map<String, String> nested = map.get("nestedMap");
assertThat(nested.get("1")).isEqualTo("1");
assertThat(nested.get("2")).isEqualTo("2");
}
/** From bug report http://code.google.com/p/google-gson/issues/detail?id=178 */
@Test
public void testMapWithQuotes() {
Map<String, String> map = new HashMap<>();
map.put("a\"b", "c\"d");
String json = gson.toJson(map);
assertThat(json).isEqualTo("{\"a\\\"b\":\"c\\\"d\"}");
}
/** From issue 227. */
@Test
public void testWriteMapsWithEmptyStringKey() {
Map<String, Boolean> map = new HashMap<>();
map.put("", true);
assertThat(gson.toJson(map)).isEqualTo("{\"\":true}");
}
@Test
public void testReadMapsWithEmptyStringKey() {
Map<String, Boolean> map =
gson.fromJson("{\"\":true}", new TypeToken<Map<String, Boolean>>() {}.getType());
assertThat(map.get("")).isEqualTo(true);
}
/** From bug report http://code.google.com/p/google-gson/issues/detail?id=204 */
@Test
public void testSerializeMaps() {
Map<String, Object> map = new LinkedHashMap<>();
map.put("a", 12);
map.put("b", null);
LinkedHashMap<String, Object> innerMap = new LinkedHashMap<>();
innerMap.put("test", 1);
innerMap.put("TestStringArray", new String[] {"one", "two"});
map.put("c", innerMap);
assertThat(new GsonBuilder().serializeNulls().create().toJson(map))
.isEqualTo(
"{\"a\":12,\"b\":null,\"c\":{\"test\":1,\"TestStringArray\":[\"one\",\"two\"]}}");
assertThat(new GsonBuilder().setPrettyPrinting().serializeNulls().create().toJson(map))
.isEqualTo(
"{\n \"a\": 12,\n \"b\": null,\n \"c\": "
+ "{\n \"test\": 1,\n \"TestStringArray\": "
+ "[\n \"one\",\n \"two\"\n ]\n }\n}");
assertThat(new GsonBuilder().create().toJson(map))
.isEqualTo("{\"a\":12,\"c\":{\"test\":1,\"TestStringArray\":[\"one\",\"two\"]}}");
assertThat(new GsonBuilder().setPrettyPrinting().create().toJson(map))
.isEqualTo(
"{\n \"a\": 12,\n \"c\": "
+ "{\n \"test\": 1,\n \"TestStringArray\": "
+ "[\n \"one\",\n \"two\"\n ]\n }\n}");
innerMap.put("d", "e");
assertThat(new Gson().toJson(map))
.isEqualTo(
"{\"a\":12,\"c\":{\"test\":1,\"TestStringArray\":[\"one\",\"two\"],\"d\":\"e\"}}");
}
@Test
public final void testInterfaceTypeMap() {
MapClass element = new MapClass();
TestTypes.Sub subType = new TestTypes.Sub();
element.addBase("Test", subType);
element.addSub("Test", subType);
String subTypeJson = new Gson().toJson(subType);
String expected =
"{\"bases\":{\"Test\":" + subTypeJson + "},\"subs\":{\"Test\":" + subTypeJson + "}}";
Gson gsonWithComplexKeys = new GsonBuilder().enableComplexMapKeySerialization().create();
String json = gsonWithComplexKeys.toJson(element);
assertThat(json).isEqualTo(expected);
Gson gson = new Gson();
json = gson.toJson(element);
assertThat(json).isEqualTo(expected);
}
@Test
public final void testInterfaceTypeMapWithSerializer() {
MapClass element = new MapClass();
TestTypes.Sub subType = new TestTypes.Sub();
element.addBase("Test", subType);
element.addSub("Test", subType);
Gson tempGson = new Gson();
String subTypeJson = tempGson.toJson(subType);
JsonElement baseTypeJsonElement = tempGson.toJsonTree(subType, TestTypes.Base.class);
String baseTypeJson = tempGson.toJson(baseTypeJsonElement);
String expected =
"{\"bases\":{\"Test\":" + baseTypeJson + "},\"subs\":{\"Test\":" + subTypeJson + "}}";
JsonSerializer<TestTypes.Base> baseTypeAdapter =
(src, typeOfSrc, context) -> baseTypeJsonElement;
Gson gson =
new GsonBuilder()
.enableComplexMapKeySerialization()
.registerTypeAdapter(TestTypes.Base.class, baseTypeAdapter)
.create();
String json = gson.toJson(element);
assertThat(json).isEqualTo(expected);
gson = new GsonBuilder().registerTypeAdapter(TestTypes.Base.class, baseTypeAdapter).create();
json = gson.toJson(element);
assertThat(json).isEqualTo(expected);
}
@Test
public void testGeneralMapField() {
MapWithGeneralMapParameters map = new MapWithGeneralMapParameters();
map.map.put("string", "testString");
map.map.put("stringArray", new String[] {"one", "two"});
map.map.put("objectArray", new Object[] {1, 2L, "three"});
String expected =
"{\"map\":{\"string\":\"testString\",\"stringArray\":"
+ "[\"one\",\"two\"],\"objectArray\":[1,2,\"three\"]}}";
assertThat(gson.toJson(map)).isEqualTo(expected);
gson = new GsonBuilder().enableComplexMapKeySerialization().create();
assertThat(gson.toJson(map)).isEqualTo(expected);
}
@Test
public void testComplexKeysSerialization() {
Map<Point, String> map = new LinkedHashMap<>();
map.put(new Point(2, 3), "a");
map.put(new Point(5, 7), "b");
String json = "{\"2,3\":\"a\",\"5,7\":\"b\"}";
assertThat(gson.toJson(map, new TypeToken<Map<Point, String>>() {}.getType())).isEqualTo(json);
assertThat(gson.toJson(map, Map.class)).isEqualTo(json);
}
@Test
public void testComplexKeysDeserialization() {
String json = "{'2,3':'a','5,7':'b'}";
Type type = new TypeToken<Map<Point, String>>() {}.getType();
var e = assertThrows(JsonParseException.class, () -> gson.fromJson(json, type));
assertThat(e)
.hasCauseThat()
.hasMessageThat()
.startsWith("Expected BEGIN_OBJECT but was STRING at line 1 column 3 path $.\n");
}
@Test
public void testStringKeyDeserialization() {
String json = "{'2,3':'a','5,7':'b'}";
Map<String, String> map = new LinkedHashMap<>();
map.put("2,3", "a");
map.put("5,7", "b");
assertThat(gson.fromJson(json, new TypeToken<Map<String, String>>() {})).isEqualTo(map);
}
@Test
public void testNumberKeyDeserialization() {
String json = "{'2.3':'a','5.7':'b'}";
Map<Double, String> map = new LinkedHashMap<>();
map.put(2.3, "a");
map.put(5.7, "b");
assertThat(gson.fromJson(json, new TypeToken<Map<Double, String>>() {})).isEqualTo(map);
}
@Test
public void testBooleanKeyDeserialization() {
String json = "{'true':'a','false':'b'}";
Map<Boolean, String> map = new LinkedHashMap<>();
map.put(true, "a");
map.put(false, "b");
assertThat(gson.fromJson(json, new TypeToken<Map<Boolean, String>>() {})).isEqualTo(map);
}
@Test
public void testMapDeserializationWithDuplicateKeys() {
Type type = new TypeToken<Map<String, Integer>>() {}.getType();
var e = assertThrows(JsonSyntaxException.class, () -> gson.fromJson("{'a':1,'a':2}", type));
assertThat(e).hasMessageThat().isEqualTo("duplicate key: a");
}
@Test
public void testSerializeMapOfMaps() {
Type type = new TypeToken<Map<String, Map<String, String>>>() {}.getType();
Map<String, Map<String, String>> map =
newMap(
"a", newMap("ka1", "va1", "ka2", "va2"),
"b", newMap("kb1", "vb1", "kb2", "vb2"));
assertThat(gson.toJson(map, type).replace('"', '\''))
.isEqualTo("{'a':{'ka1':'va1','ka2':'va2'},'b':{'kb1':'vb1','kb2':'vb2'}}");
}
@Test
public void testDeserializeMapOfMaps() {
TypeToken<Map<String, Map<String, String>>> type = new TypeToken<>() {};
Map<String, Map<String, String>> map =
newMap(
"a", newMap("ka1", "va1", "ka2", "va2"),
"b", newMap("kb1", "vb1", "kb2", "vb2"));
String json = "{'a':{'ka1':'va1','ka2':'va2'},'b':{'kb1':'vb1','kb2':'vb2'}}";
assertThat(gson.fromJson(json, type)).isEqualTo(map);
}
private static <K, V> Map<K, V> newMap(K key1, V value1, K key2, V value2) {
Map<K, V> result = new LinkedHashMap<>();
result.put(key1, value1);
result.put(key2, value2);
return result;
}
@Test
public void testMapNamePromotionWithJsonElementReader() {
String json = "{'2.3':'a'}";
Map<Double, String> map = new LinkedHashMap<>();
map.put(2.3, "a");
JsonElement tree = JsonParser.parseString(json);
assertThat(gson.fromJson(tree, new TypeToken<Map<Double, String>>() {})).isEqualTo(map);
}
static
|
MyMap
|
java
|
spring-projects__spring-framework
|
spring-beans/src/testFixtures/java/org/springframework/beans/testfixture/beans/factory/generator/lifecycle/InferredDestroyBean.java
|
{
"start": 715,
"end": 775
}
|
class ____ {
public void close() {
}
}
|
InferredDestroyBean
|
java
|
dropwizard__dropwizard
|
dropwizard-auth/src/test/java/io/dropwizard/auth/OptionalAuthFilterOrderingTest.java
|
{
"start": 915,
"end": 1624
}
|
class ____ extends JerseyTest {
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
}
@Override
@AfterEach
public void tearDown() throws Exception {
super.tearDown();
}
@Override
protected DeploymentContext configureDeployment() {
forceSet(TestProperties.CONTAINER_PORT, "0");
return ServletDeploymentContext
.builder(new BasicAuthResourceConfigWithAuthorizationFilter())
.initParam(ServletProperties.JAXRS_APPLICATION_CLASS,
BasicAuthResourceConfigWithAuthorizationFilter.class.getName())
.build();
}
public static
|
OptionalAuthFilterOrderingTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/broadcast/InitializationTypeConflictException.java
|
{
"start": 1185,
"end": 1513
}
|
class ____ extends Exception {
private static final long serialVersionUID = -3930913982433642882L;
private final Class<?> type;
public InitializationTypeConflictException(Class<?> type) {
this.type = type;
}
public Class<?> getType() {
return type;
}
}
|
InitializationTypeConflictException
|
java
|
resilience4j__resilience4j
|
resilience4j-core/src/test/java/io/github/resilience4j/core/ClassUtilsTest.java
|
{
"start": 2478,
"end": 2638
}
|
class ____ extends PublicBiConsumer {
public NoDefaultConstructorBiConsumer(String foo) {
}
}
public static
|
NoDefaultConstructorBiConsumer
|
java
|
apache__kafka
|
connect/runtime/src/main/java/org/apache/kafka/connect/runtime/distributed/IncrementalCooperativeAssignor.java
|
{
"start": 2633,
"end": 2866
}
|
class ____ NOT thread-safe.
* @see <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-415%3A+Incremental+Cooperative+Rebalancing+in+Kafka+Connect">
* KIP-415 for a description of the assignment policy. </a>
*
*/
public
|
is
|
java
|
playframework__playframework
|
core/play/src/main/java/play/mvc/MultipartFormatter.java
|
{
"start": 493,
"end": 2664
}
|
class ____ {
public static String randomBoundary() {
return Multipart.randomBoundary(18, ThreadLocalRandom.current());
}
public static String boundaryToContentType(String boundary) {
return "multipart/form-data; boundary=" + boundary;
}
public static Source<ByteString, ?> transform(
Source<? super Http.MultipartFormData.Part<Source<ByteString, ?>>, ?> parts,
String boundary) {
@SuppressWarnings("unchecked")
Source<MultipartFormData.Part<org.apache.pekko.stream.scaladsl.Source<ByteString, ?>>, ?>
source =
parts.map(
part -> {
if (part instanceof Http.MultipartFormData.DataPart) {
Http.MultipartFormData.DataPart dp = (Http.MultipartFormData.DataPart) part;
return (MultipartFormData.Part)
new MultipartFormData.DataPart(dp.getKey(), dp.getValue());
} else if (part instanceof Http.MultipartFormData.FilePart) {
if (((Http.MultipartFormData.FilePart) part).ref instanceof Source) {
@SuppressWarnings("unchecked")
Http.MultipartFormData.FilePart<Source<ByteString, ?>> fp =
(Http.MultipartFormData.FilePart<Source<ByteString, ?>>) part;
Option<String> ct = Option.apply(fp.getContentType());
return new MultipartFormData.FilePart<
org.apache.pekko.stream.scaladsl.Source<ByteString, ?>>(
fp.getKey(),
fp.getFilename(),
ct,
fp.ref.asScala(),
fp.getFileSize(),
fp.getDispositionType(),
byteSource ->
OptionConverters.toScala(fp.refToBytes.apply(byteSource.asJava())));
}
}
throw new UnsupportedOperationException("Unsupported Part Class");
});
return source.via(Multipart.format(boundary, Charset.defaultCharset(), 4096));
}
}
|
MultipartFormatter
|
java
|
google__dagger
|
dagger-compiler/main/java/dagger/internal/codegen/model/ComponentPath.java
|
{
"start": 1121,
"end": 3506
}
|
class ____ {
/** Returns a new {@link ComponentPath} from {@code components}. */
public static ComponentPath create(Iterable<DaggerTypeElement> components) {
return new AutoValue_ComponentPath(ImmutableList.copyOf(components));
}
/**
* Returns the component types, starting from the {@linkplain #rootComponent() root
* component} and ending with the {@linkplain #currentComponent() current component}.
*/
public abstract ImmutableList<DaggerTypeElement> components();
/**
* Returns the root {@link dagger.Component}- or {@link
* dagger.producers.ProductionComponent}-annotated type
*/
public final DaggerTypeElement rootComponent() {
return components().get(0);
}
/** Returns the component at the end of the path. */
@Memoized
public DaggerTypeElement currentComponent() {
return getLast(components());
}
/**
* Returns the parent of the {@linkplain #currentComponent()} current component}.
*
* @throws IllegalStateException if the current graph is the {@linkplain #atRoot() root component}
*/
public final DaggerTypeElement parentComponent() {
checkState(!atRoot());
return components().reverse().get(1);
}
/**
* Returns this path's parent path.
*
* @throws IllegalStateException if the current graph is the {@linkplain #atRoot() root component}
*/
// TODO(ronshapiro): consider memoizing this
public final ComponentPath parent() {
checkState(!atRoot());
return create(components().subList(0, components().size() - 1));
}
/** Returns the path from the root component to the {@code child} of the current component. */
public final ComponentPath childPath(DaggerTypeElement child) {
return create(
ImmutableList.<DaggerTypeElement>builder().addAll(components()).add(child).build());
}
/**
* Returns {@code true} if the {@linkplain #currentComponent()} current component} is the
* {@linkplain #rootComponent()} root component}.
*/
public final boolean atRoot() {
return components().size() == 1;
}
@Override
public final String toString() {
return components().stream()
.map(DaggerTypeElement::xprocessing)
.map(XTypeElement::getQualifiedName)
.collect(joining(" → "));
}
@Memoized
@Override
public abstract int hashCode();
@Override
public abstract boolean equals(Object obj);
}
|
ComponentPath
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-data-jpa/src/main/java/smoketest/data/jpa/domain/Hotel.java
|
{
"start": 1104,
"end": 2265
}
|
class ____ implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@SequenceGenerator(name = "hotel_generator", sequenceName = "hotel_sequence", initialValue = 28)
@GeneratedValue(generator = "hotel_generator")
@SuppressWarnings("NullAway.Init")
private Long id;
@ManyToOne(optional = false)
@NaturalId
@SuppressWarnings("NullAway.Init")
private City city;
@Column(nullable = false)
@NaturalId
@SuppressWarnings("NullAway.Init")
private String name;
@Column(nullable = false)
@SuppressWarnings("NullAway.Init")
private String address;
@Column(nullable = false)
@SuppressWarnings("NullAway.Init")
private String zip;
@OneToMany(fetch = FetchType.LAZY, mappedBy = "hotel")
private Set<Review> reviews = new HashSet<>();
protected Hotel() {
}
public Hotel(City city, String name, String address, String zip) {
this.city = city;
this.name = name;
this.address = address;
this.zip = zip;
}
public City getCity() {
return this.city;
}
public String getName() {
return this.name;
}
public String getAddress() {
return this.address;
}
public String getZip() {
return this.zip;
}
}
|
Hotel
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindowedSerializer.java
|
{
"start": 3532,
"end": 3965
}
|
class ____ set using constructor "
+ "(" + inner.getClass().getName() + ")" +
" is different from the one set in " + serializerConfigKey + " config " +
"(" + windowedInnerSerializerClass.serializer().getClass().getName() + ").");
}
} else if (inner == null && serializerConfigValue == null) {
throw new IllegalArgumentException("Inner
|
serializer
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java
|
{
"start": 3039,
"end": 16709
}
|
class ____ extends MlNativeAutodetectIntegTestCase {
private static final long DATA_START_TIME = 1761955200000L;
@After
public void tearDownData() {
cleanUp();
}
public void testRevertModelSnapshot() throws Exception {
testRunJobInTwoPartsAndRevertSnapshotAndRunToCompletion("revert-model-snapshot-it-job", false);
}
public void testRevertModelSnapshot_DeleteInterveningResults() throws Exception {
// Create and run a unrelated job to chech it is not affected by reverting a different job
String jobId = "revert-snapshot-delete-intervening-unrelated-job";
TimeValue bucketSpan = TimeValue.timeValueHours(1);
long startTime = DATA_START_TIME - (bucketSpan.getMillis() * 2);
String data = String.join("", generateData(startTime, bucketSpan, 23, List.of("foo"), (bucketIndex, series) -> 10.0));
Job.Builder job = buildAndRegisterJob(jobId, bucketSpan);
openJob(job.getId());
postData(job.getId(), data);
flushJob(job.getId(), true);
closeJob(job.getId());
String snapShotId = getJob(jobId).get(0).getModelSnapshotId();
assertThat(snapShotId, is(notNullValue()));
List<Bucket> buckets = getBuckets(jobId);
assertThat(buckets.size(), greaterThan(0));
// Run another job and revert to an previous snapshot
testRunJobInTwoPartsAndRevertSnapshotAndRunToCompletion("revert-model-snapshot-it-job-delete-intervening-results", true);
// Check snapshot Id and buckets have not changed
assertThat(getJob(jobId).getFirst().getModelSnapshotId(), is(snapShotId));
List<Bucket> bucketsAfterRevert = getBuckets(jobId);
assertThat(bucketsAfterRevert.size(), is(buckets.size()));
assertThat(bucketsAfterRevert, is(buckets));
}
public void testRevertToEmptySnapshot() throws Exception {
String jobId = "revert-to-empty-snapshot-test";
TimeValue bucketSpan = TimeValue.timeValueHours(1);
long startTime = 1491004800000L;
String data = generateData(
startTime,
bucketSpan,
20,
Arrays.asList("foo"),
(bucketIndex, series) -> bucketIndex == 19 ? 100.0 : 10.0
).stream().collect(Collectors.joining());
Job.Builder job = buildAndRegisterJob(jobId, bucketSpan);
openJob(job.getId());
postData(job.getId(), data);
flushJob(job.getId(), true);
closeJob(job.getId());
assertThat(getJob(jobId).get(0).getModelSnapshotId(), is(notNullValue()));
List<Bucket> expectedBuckets = getBuckets(jobId);
assertThat(expectedBuckets.size(), equalTo(20));
List<AnomalyRecord> expectedRecords = getRecords(jobId);
assertThat(expectedBuckets.isEmpty(), is(false));
assertThat(expectedRecords.isEmpty(), is(false));
RevertModelSnapshotAction.Response revertResponse = revertModelSnapshot(jobId, "empty", true);
assertThat(revertResponse.getModel().getSnapshotId(), equalTo("empty"));
assertThat(getJob(jobId).get(0).getModelSnapshotId(), is(nullValue()));
assertThat(getBuckets(jobId).isEmpty(), is(true));
assertThat(getRecords(jobId).isEmpty(), is(true));
assertThat(getJobStats(jobId).get(0).getDataCounts().getLatestRecordTimeStamp(), is(nullValue()));
// Now run again and see we get same results
openJob(job.getId());
DataCounts dataCounts = postData(job.getId(), data);
assertThat(dataCounts.getOutOfOrderTimeStampCount(), equalTo(0L));
flushJob(job.getId(), true);
closeJob(job.getId());
assertThat(getBuckets(jobId).size(), equalTo(expectedBuckets.size()));
assertThat(getRecords(jobId), equalTo(expectedRecords));
}
private void testRunJobInTwoPartsAndRevertSnapshotAndRunToCompletion(String jobId, boolean deleteInterveningResults) throws Exception {
TimeValue bucketSpan = TimeValue.timeValueHours(1);
Job.Builder job = buildAndRegisterJob(jobId, bucketSpan);
openJob(job.getId());
postData(
job.getId(),
generateData(DATA_START_TIME, bucketSpan, 10, Arrays.asList("foo"), (bucketIndex, series) -> bucketIndex == 5 ? 100.0 : 10.0)
.stream()
.collect(Collectors.joining())
);
flushJob(job.getId(), true);
String forecastId = forecast(job.getId(), TimeValue.timeValueHours(10), TimeValue.timeValueDays(100));
waitForecastToFinish(job.getId(), forecastId);
closeJob(job.getId());
long numForecastDocs = countForecastDocs(job.getId(), forecastId);
assertThat(numForecastDocs, greaterThan(0L));
ModelSizeStats modelSizeStats1 = getJobStats(job.getId()).get(0).getModelSizeStats();
Quantiles quantiles1 = getQuantiles(job.getId());
List<Bucket> midwayBuckets = getBuckets(job.getId());
Bucket revertPointBucket = midwayBuckets.get(midwayBuckets.size() - 1);
assertThat(revertPointBucket.isInterim(), is(true));
// We need to wait a second to ensure the second time around model snapshot will have a different ID (it depends on epoch seconds)
waitUntil(() -> false, 1, TimeUnit.SECONDS);
openJob(job.getId());
postData(
job.getId(),
generateData(
DATA_START_TIME + 10 * bucketSpan.getMillis(),
bucketSpan,
10,
Arrays.asList("foo", "bar"),
(bucketIndex, series) -> 10.0
).stream().collect(Collectors.joining())
);
closeJob(job.getId());
ModelSizeStats modelSizeStats2 = getJobStats(job.getId()).get(0).getModelSizeStats();
Quantiles quantiles2 = getQuantiles(job.getId());
// Check model has grown since a new series was introduced
assertThat(modelSizeStats2.getModelBytes(), greaterThan(modelSizeStats1.getModelBytes()));
// Check quantiles have changed
assertThat(quantiles2, not(equalTo(quantiles1)));
List<Bucket> finalPreRevertBuckets = getBuckets(job.getId());
Bucket finalPreRevertPointBucket = finalPreRevertBuckets.get(midwayBuckets.size() - 1);
assertThat(finalPreRevertPointBucket.isInterim(), is(false));
List<ModelSnapshot> modelSnapshots = getModelSnapshots(job.getId());
assertThat(modelSnapshots.size(), equalTo(2));
// Snapshots are sorted in descending timestamp order so we revert to the last of the list/earliest.
assertThat(modelSnapshots.get(0).getTimestamp().getTime(), greaterThan(modelSnapshots.get(1).getTimestamp().getTime()));
assertThat(getJob(job.getId()).get(0).getModelSnapshotId(), equalTo(modelSnapshots.get(0).getSnapshotId()));
ModelSnapshot revertSnapshot = modelSnapshots.get(1);
// Check there are 2 annotations (one per model snapshot)
assertThatNumberOfAnnotationsIsEqualTo(jobId, 2);
// Add 3 new annotations...
Instant lastResultTimestamp = revertSnapshot.getLatestResultTimeStamp().toInstant();
client().index(randomAnnotationIndexRequest(job.getId(), lastResultTimestamp.plusSeconds(10), Event.DELAYED_DATA)).actionGet();
client().index(randomAnnotationIndexRequest(job.getId(), lastResultTimestamp.plusSeconds(20), Event.MODEL_CHANGE)).actionGet();
client().index(randomAnnotationIndexRequest(job.getId(), lastResultTimestamp.minusSeconds(10), Event.MODEL_CHANGE)).actionGet();
// ... and check there are 5 annotations in total now
assertThatNumberOfAnnotationsIsEqualTo(jobId, 5);
GetJobsStatsAction.Response.JobStats statsBeforeRevert = getJobStats(jobId).get(0);
Instant timeBeforeRevert = Instant.now();
revertModelSnapshot(job.getId(), revertSnapshot.getSnapshotId(), deleteInterveningResults);
GetJobsStatsAction.Response.JobStats statsAfterRevert = getJobStats(job.getId()).get(0);
// Check model_size_stats has been reverted
assertThat(statsAfterRevert.getModelSizeStats().getModelBytes(), equalTo(modelSizeStats1.getModelBytes()));
if (deleteInterveningResults) {
// Check data counts have been reverted
assertThat(statsAfterRevert.getDataCounts().getLatestRecordTimeStamp(), equalTo(revertSnapshot.getLatestRecordTimeStamp()));
assertThat(statsAfterRevert.getDataCounts().getLogTime(), greaterThanOrEqualTo(timeBeforeRevert));
} else {
assertThat(statsAfterRevert.getDataCounts(), equalTo(statsBeforeRevert.getDataCounts()));
}
// Check quantiles have been reverted
assertThat(getQuantiles(job.getId()).getTimestamp(), equalTo(revertSnapshot.getLatestResultTimeStamp()));
// Check annotations with event type from {delayed_data, model_change} have been removed if deleteInterveningResults flag is set
assertThatNumberOfAnnotationsIsEqualTo(jobId, deleteInterveningResults ? 3 : 5);
// Reverting should not have deleted any forecast docs
assertThat(countForecastDocs(job.getId(), forecastId), is(numForecastDocs));
// Re-run 2nd half of data
openJob(job.getId());
postData(
job.getId(),
generateData(
DATA_START_TIME + 10 * bucketSpan.getMillis(),
bucketSpan,
10,
Arrays.asList("foo", "bar"),
(bucketIndex, series) -> 10.0
).stream().collect(Collectors.joining())
);
closeJob(job.getId());
List<Bucket> finalPostRevertBuckets = getBuckets(job.getId());
Bucket finalPostRevertPointBucket = finalPostRevertBuckets.get(midwayBuckets.size() - 1);
assertThat(finalPostRevertPointBucket.getTimestamp(), equalTo(finalPreRevertPointBucket.getTimestamp()));
assertThat(finalPostRevertPointBucket.getAnomalyScore(), equalTo(finalPreRevertPointBucket.getAnomalyScore()));
assertThat(finalPostRevertPointBucket.getEventCount(), equalTo(finalPreRevertPointBucket.getEventCount()));
// Re-running should not have deleted any forecast docs
assertThat(countForecastDocs(job.getId(), forecastId), is(numForecastDocs));
}
private Job.Builder buildAndRegisterJob(String jobId, TimeValue bucketSpan) throws Exception {
Detector.Builder detector = new Detector.Builder("mean", "value");
detector.setPartitionFieldName("series");
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Arrays.asList(detector.build()));
analysisConfig.setBucketSpan(bucketSpan);
Job.Builder job = new Job.Builder(jobId);
job.setAnalysisConfig(analysisConfig);
DataDescription.Builder dataDescription = new DataDescription.Builder();
job.setDataDescription(dataDescription);
putJob(job);
return job;
}
private static List<String> generateData(
long timestamp,
TimeValue bucketSpan,
int bucketCount,
List<String> series,
BiFunction<Integer, String, Double> timeAndSeriesToValueFunction
) throws IOException {
List<String> data = new ArrayList<>();
long now = timestamp;
for (int i = 0; i < bucketCount; i++) {
for (String field : series) {
Map<String, Object> record = new HashMap<>();
record.put("time", now);
record.put("value", timeAndSeriesToValueFunction.apply(i, field));
record.put("series", field);
data.add(createJsonRecord(record));
record = new HashMap<>();
record.put("time", now + bucketSpan.getMillis() / 2);
record.put("value", timeAndSeriesToValueFunction.apply(i, field));
record.put("series", field);
data.add(createJsonRecord(record));
}
now += bucketSpan.getMillis();
}
return data;
}
private Quantiles getQuantiles(String jobId) throws Exception {
SetOnce<Quantiles> quantilesSetOnce = new SetOnce<>();
assertCheckedResponse(
prepareSearch(".ml-state*").setQuery(QueryBuilders.idsQuery().addIds(Quantiles.documentId(jobId))).setSize(1),
response -> {
SearchHits hits = response.getHits();
assertThat(hits.getTotalHits().value(), equalTo(1L));
try (
XContentParser parser = JsonXContent.jsonXContent.createParser(
XContentParserConfiguration.EMPTY,
hits.getAt(0).getSourceAsString()
)
) {
quantilesSetOnce.set(Quantiles.LENIENT_PARSER.apply(parser, null));
}
}
);
return quantilesSetOnce.get();
}
private static IndexRequest randomAnnotationIndexRequest(String jobId, Instant timestamp, Event event) throws IOException {
Annotation annotation = new Annotation.Builder(randomAnnotation(jobId)).setTimestamp(Date.from(timestamp))
.setCreateUsername(InternalUsers.XPACK_USER.principal())
.setEvent(event)
.build();
try (XContentBuilder xContentBuilder = annotation.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) {
return new IndexRequest(AnnotationIndex.WRITE_ALIAS_NAME).source(xContentBuilder)
.setRequireAlias(true)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
}
}
}
|
RevertModelSnapshotIT
|
java
|
apache__rocketmq
|
test/src/test/java/org/apache/rocketmq/test/client/consumer/broadcast/normal/BroadcastNormalMsgRecvStartLaterIT.java
|
{
"start": 1524,
"end": 3812
}
|
class ____ extends BaseBroadcast {
private static Logger logger = LoggerFactory
.getLogger(NormalMsgTwoSameGroupConsumerIT.class);
private RMQNormalProducer producer = null;
private String topic = null;
@Before
public void setUp() {
printSeparator();
topic = initTopic();
logger.info(String.format("use topic: %s;", topic));
producer = getProducer(NAMESRV_ADDR, topic);
}
@After
public void tearDown() {
super.shutdown();
}
@Test
public void testStartOneAndStartAnotherLater() {
int msgSize = 16;
String group = initConsumerGroup();
RMQBroadCastConsumer consumer1 = getBroadCastConsumer(NAMESRV_ADDR, group, topic, "*",
new RMQNormalListener(group + "_1"));
TestUtils.waitForSeconds(WAIT_TIME);
producer.send(msgSize);
Assert.assertEquals("Not all sent succeeded", msgSize, producer.getAllUndupMsgBody().size());
consumer1.getListener().waitForMessageConsume(producer.getAllMsgBody(), CONSUME_TIME);
assertThat(VerifyUtils.getFilterdMessage(producer.getAllMsgBody(),
consumer1.getListener().getAllMsgBody()))
.containsExactlyElementsIn(producer.getAllMsgBody());
producer.clearMsg();
consumer1.clearMsg();
RMQBroadCastConsumer consumer2 = getBroadCastConsumer(NAMESRV_ADDR,
consumer1.getConsumerGroup(), topic, "*", new RMQNormalListener(group + "_2"));
TestUtils.waitForSeconds(WAIT_TIME);
producer.send(msgSize);
Assert.assertEquals("Not all sent succeeded", msgSize, producer.getAllUndupMsgBody().size());
consumer1.getListener().waitForMessageConsume(producer.getAllMsgBody(), CONSUME_TIME);
consumer2.getListener().waitForMessageConsume(producer.getAllMsgBody(), CONSUME_TIME);
assertThat(VerifyUtils.getFilterdMessage(producer.getAllMsgBody(),
consumer1.getListener().getAllMsgBody()))
.containsExactlyElementsIn(producer.getAllMsgBody());
assertThat(VerifyUtils.getFilterdMessage(producer.getAllMsgBody(),
consumer2.getListener().getAllMsgBody()))
.containsExactlyElementsIn(producer.getAllMsgBody());
}
}
|
BroadcastNormalMsgRecvStartLaterIT
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/GoogleSheetsStreamComponentBuilderFactory.java
|
{
"start": 1388,
"end": 1924
}
|
interface ____ {
/**
* Google Sheets Stream (camel-google-sheets)
* Poll for changes in Google Sheets.
*
* Category: cloud,document
* Since: 2.23
* Maven coordinates: org.apache.camel:camel-google-sheets
*
* @return the dsl builder
*/
static GoogleSheetsStreamComponentBuilder googleSheetsStream() {
return new GoogleSheetsStreamComponentBuilderImpl();
}
/**
* Builder for the Google Sheets Stream component.
*/
|
GoogleSheetsStreamComponentBuilderFactory
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/SubApplicationEntity.java
|
{
"start": 1152,
"end": 1802
}
|
class ____ extends HierarchicalTimelineEntity {
public static final String YARN_APPLICATION_ID = "YARN_APPLICATION_ID";
public SubApplicationEntity(TimelineEntity entity) {
super(entity);
}
/**
* Checks if the input TimelineEntity object is an SubApplicationEntity.
*
* @param te TimelineEntity object.
* @return true if input is an SubApplicationEntity, false otherwise
*/
public static boolean isSubApplicationEntity(TimelineEntity te) {
return (te != null && te instanceof SubApplicationEntity);
}
public void setApplicationId(String appId) {
addInfo(YARN_APPLICATION_ID, appId);
}
}
|
SubApplicationEntity
|
java
|
apache__camel
|
core/camel-management/src/test/java/org/apache/camel/management/ManagedRouteAddSecondRouteNotRegisterNewRoutesTest.java
|
{
"start": 1525,
"end": 2993
}
|
class ____ extends ManagementTestSupport {
@Override
protected RouteBuilder createRouteBuilder() {
// do not register new routes
context.getManagementStrategy().getManagementAgent().setRegisterNewRoutes(false);
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").routeId("foo").to("mock:result");
}
};
}
@Test
public void testRouteAddSecondRoute() throws Exception {
MBeanServer mbeanServer = getMBeanServer();
ObjectName route1 = getCamelObjectName(TYPE_ROUTE, "foo");
// should be started
String state = (String) mbeanServer.getAttribute(route1, "State");
assertEquals(ServiceStatus.Started.name(), state, "Should be started");
log.info(">>>>>>>>>>>>>>>>> adding 2nd route <<<<<<<<<<<<<<");
// add a 2nd route
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:bar").routeId("bar").to("mock:bar");
}
});
log.info(">>>>>>>>>>>>>>>>> adding 2nd route DONE <<<<<<<<<<<<<<");
// find the 2nd route
ObjectName route2 = getCamelObjectName(TYPE_ROUTE, "bar");
// should not be registered
assertFalse(mbeanServer.isRegistered(route2), "2nd route should not be registered");
}
}
|
ManagedRouteAddSecondRouteNotRegisterNewRoutesTest
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/SubcomponentValidationTest.java
|
{
"start": 9434,
"end": 10332
}
|
interface ____ {}");
CompilerTests.daggerCompiler(moduleFile, componentFile, childComponentFile)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
"test.TestModule is present as an argument to the test.ChildComponent "
+ "factory method, but is not one of the modules used to implement the "
+ "subcomponent.")
.onSource(componentFile)
.onLine(7);
});
}
@Test public void missingBinding() {
Source moduleFile =
CompilerTests.javaSource("test.TestModule",
"package test;",
"",
"import dagger.Module;",
"import dagger.Provides;",
"",
"@Module",
"final
|
ChildComponent
|
java
|
apache__rocketmq
|
remoting/src/main/java/org/apache/rocketmq/remoting/protocol/header/controller/register/GetNextBrokerIdResponseHeader.java
|
{
"start": 1015,
"end": 2479
}
|
class ____ implements CommandCustomHeader {
private String clusterName;
private String brokerName;
private Long nextBrokerId;
public GetNextBrokerIdResponseHeader() {
}
public GetNextBrokerIdResponseHeader(String clusterName, String brokerName) {
this(clusterName, brokerName, null);
}
public GetNextBrokerIdResponseHeader(String clusterName, String brokerName, Long nextBrokerId) {
this.clusterName = clusterName;
this.brokerName = brokerName;
this.nextBrokerId = nextBrokerId;
}
@Override
public String toString() {
return "GetNextBrokerIdResponseHeader{" +
"clusterName='" + clusterName + '\'' +
", brokerName='" + brokerName + '\'' +
", nextBrokerId=" + nextBrokerId +
'}';
}
@Override
public void checkFields() throws RemotingCommandException {
}
public void setNextBrokerId(Long nextBrokerId) {
this.nextBrokerId = nextBrokerId;
}
public Long getNextBrokerId() {
return nextBrokerId;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getBrokerName() {
return brokerName;
}
public void setBrokerName(String brokerName) {
this.brokerName = brokerName;
}
}
|
GetNextBrokerIdResponseHeader
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-data-elasticsearch/src/main/java/smoketest/data/elasticsearch/SampleDataElasticsearchApplication.java
|
{
"start": 970,
"end": 1931
}
|
class ____ {
public static void main(String[] args) {
ConfigurableApplicationContext context = SpringApplication.run(SampleDataElasticsearchApplication.class, args);
SampleRepository repository = context.getBean(SampleRepository.class);
createDocument(repository);
listDocuments(repository);
repository.deleteAll();
context.close();
}
private static void listDocuments(SampleRepository repository) {
System.out.println("Documents:");
for (SampleDocument foundDocument : repository.findAll()) {
System.out.println(" " + foundDocument);
}
}
private static void createDocument(SampleRepository repository) {
SampleDocument document = new SampleDocument();
document.setText("Look, new @DataElasticsearchTest!");
String id = UUID.randomUUID().toString();
document.setId(id);
SampleDocument savedDocument = repository.save(document);
System.out.println("Saved document " + savedDocument);
}
}
|
SampleDataElasticsearchApplication
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/InconsistentCapitalization.java
|
{
"start": 5610,
"end": 6977
}
|
class ____ extends TreeScanner<Void, Void> {
static ImmutableSet<Symbol> findFields(ClassTree tree) {
ImmutableSet.Builder<Symbol> fieldsBuilder = ImmutableSet.builder();
new FieldScanner(fieldsBuilder, tree).scan(tree, null);
return fieldsBuilder.build();
}
private final ImmutableSet.Builder<Symbol> fields;
private final Symbol classSymbol;
private FieldScanner(ImmutableSet.Builder<Symbol> fields, Tree classTree) {
this.fields = fields;
this.classSymbol = ASTHelpers.getSymbol(classTree);
}
@Override
public Void visitVariable(VariableTree tree, Void unused) {
Symbol symbol = ASTHelpers.getSymbol(tree);
/* It is quite common to have upper case static field names that match variable names,
* as for example between HTTP request parameters name definitions and their corresponding
* extracted value. */
if (symbol.getKind().equals(ElementKind.FIELD)
&& !isUpperCaseAndStatic(symbol)
&& ASTHelpers.enclosingClass(symbol).equals(classSymbol)) {
fields.add(symbol);
}
return super.visitVariable(tree, null);
}
}
/**
* Matcher for all parameters (methods, constructors, lambda expressions) that have the same name
* as one of the provided fields but with different capitalization.
*/
private static
|
FieldScanner
|
java
|
google__dagger
|
dagger-compiler/main/java/dagger/internal/codegen/writing/ComponentCreatorImplementationFactory.java
|
{
"start": 21498,
"end": 22141
}
|
enum ____ {
/** An instance is needed to create the component. */
NEEDED,
/**
* An instance is not needed to create the component, but the requirement is for a module owned
* by the component. Setting the requirement is a no-op and any setter method should be marked
* deprecated on the generated type as a warning to the user.
*/
UNNEEDED,
/**
* The requirement may not be set in this creator because the module it is for is already
* inherited from an ancestor component. Any setter method for it should throw an exception.
*/
UNSETTABLE_REPEATED_MODULE,
;
}
}
|
RequirementStatus
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/ProjectSerializationTests.java
|
{
"start": 489,
"end": 1483
}
|
class ____ extends AbstractLogicalPlanSerializationTests<Project> {
@Override
protected Project createTestInstance() {
Source source = randomSource();
LogicalPlan child = randomChild(0);
List<? extends NamedExpression> projections = randomFieldAttributes(0, 10, false);
return new Project(source, child, projections);
}
@Override
protected Project mutateInstance(Project instance) throws IOException {
LogicalPlan child = instance.child();
List<? extends NamedExpression> projections = instance.projections();
if (randomBoolean()) {
child = randomValueOtherThan(child, () -> randomChild(0));
} else {
projections = randomValueOtherThan(projections, () -> randomFieldAttributes(0, 10, false));
}
return new Project(instance.source(), child, projections);
}
@Override
protected boolean alwaysEmptySource() {
return true;
}
}
|
ProjectSerializationTests
|
java
|
google__guice
|
core/test/com/google/inject/BinderTestSuite.java
|
{
"start": 23534,
"end": 24093
}
|
class ____ implements A {
final int value;
PlainA() {
value = nextId.getAndIncrement();
if (value < 0) {
throw new RuntimeException("Illegal value: " + value);
}
}
PlainA(int value) {
this.value = value;
}
@Override
public boolean equals(Object obj) {
return obj instanceof PlainA && value == ((PlainA) obj).value;
}
@Override
public int hashCode() {
return value;
}
@Override
public String toString() {
return "PlainA#" + value;
}
}
static
|
PlainA
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ColumnExtractOperator.java
|
{
"start": 696,
"end": 3493
}
|
class ____ extends AbstractPageMappingOperator {
public record Factory(
ElementType[] types,
ExpressionEvaluator.Factory inputEvalSupplier,
Supplier<ColumnExtractOperator.Evaluator> evaluatorSupplier
) implements OperatorFactory {
@Override
public Operator get(DriverContext driverContext) {
return new ColumnExtractOperator(types, inputEvalSupplier.get(driverContext), evaluatorSupplier.get(), driverContext);
}
@Override
public String describe() {
return "ColumnExtractOperator[evaluator=" + evaluatorSupplier.get() + "]";
}
}
private final ElementType[] types;
private final EvalOperator.ExpressionEvaluator inputEvaluator;
private final ColumnExtractOperator.Evaluator evaluator;
private final DriverContext driverContext;
public ColumnExtractOperator(
ElementType[] types,
ExpressionEvaluator inputEvaluator,
Evaluator evaluator,
DriverContext driverContext
) {
this.types = types;
this.inputEvaluator = inputEvaluator;
this.evaluator = evaluator;
this.driverContext = driverContext;
}
@Override
protected Page process(Page page) {
int rowsCount = page.getPositionCount();
Block.Builder[] blockBuilders = new Block.Builder[types.length];
try {
for (int i = 0; i < types.length; i++) {
blockBuilders[i] = types[i].newBlockBuilder(rowsCount, driverContext.blockFactory());
}
try (BytesRefBlock input = (BytesRefBlock) inputEvaluator.eval(page)) {
BytesRef spare = new BytesRef();
for (int row = 0; row < rowsCount; row++) {
if (input.isNull(row)) {
for (int i = 0; i < blockBuilders.length; i++) {
blockBuilders[i].appendNull();
}
continue;
}
evaluator.computeRow(input, row, blockBuilders, spare);
}
Block[] blocks = new Block[blockBuilders.length];
for (int i = 0; i < blockBuilders.length; i++) {
blocks[i] = blockBuilders[i].build();
}
return page.appendBlocks(blocks);
}
} finally {
Releasables.closeExpectNoException(blockBuilders);
}
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(this.getClass().getSimpleName()).append("[");
sb.append("evaluator=");
sb.append(evaluator.toString());
sb.append("]");
return sb.toString();
}
public
|
ColumnExtractOperator
|
java
|
apache__maven
|
compat/maven-compat/src/test/java/org/apache/maven/repository/legacy/resolver/conflict/OldestConflictResolverTest.java
|
{
"start": 1065,
"end": 2844
}
|
class ____ extends AbstractConflictResolverTest {
// constructors -----------------------------------------------------------
OldestConflictResolverTest() throws Exception {
super("oldest");
}
// tests ------------------------------------------------------------------
/**
* Tests that <code>a:1.0</code> wins in the scenario:
* <pre>
* a:1.0
* b:1.0 -> a:2.0
* </pre>
*/
@Test
void testDepth() {
ResolutionNode a1n = createResolutionNode(a1);
ResolutionNode b1n = createResolutionNode(b1);
ResolutionNode a2n = createResolutionNode(a2, b1n);
assertResolveConflict(a1n, a1n, a2n);
}
/**
* Tests that <code>a:1.0</code> wins in the scenario:
* <pre>
* b:1.0 -> a:2.0
* a:1.0
* </pre>
*/
@Test
void testDepthReversed() {
ResolutionNode b1n = createResolutionNode(b1);
ResolutionNode a2n = createResolutionNode(a2, b1n);
ResolutionNode a1n = createResolutionNode(a1);
assertResolveConflict(a1n, a2n, a1n);
}
/**
* Tests that <code>a:1.0</code> wins in the scenario:
* <pre>
* a:1.0
* a:2.0
* </pre>
*/
@Test
void testEqual() {
ResolutionNode a1n = createResolutionNode(a1);
ResolutionNode a2n = createResolutionNode(a2);
assertResolveConflict(a1n, a1n, a2n);
}
/**
* Tests that <code>a:1.0</code> wins in the scenario:
* <pre>
* a:2.0
* a:1.0
* </pre>
*/
@Test
void testEqualReversed() {
ResolutionNode a2n = createResolutionNode(a2);
ResolutionNode a1n = createResolutionNode(a1);
assertResolveConflict(a1n, a2n, a1n);
}
}
|
OldestConflictResolverTest
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/core/fs/PluginFileSystemFactory.java
|
{
"start": 1213,
"end": 2392
}
|
class ____ implements FileSystemFactory {
private final FileSystemFactory inner;
private final ClassLoader loader;
private PluginFileSystemFactory(final FileSystemFactory inner, final ClassLoader loader) {
this.inner = inner;
this.loader = loader;
}
public static PluginFileSystemFactory of(final FileSystemFactory inner) {
return new PluginFileSystemFactory(inner, inner.getClass().getClassLoader());
}
@Override
public String getScheme() {
return inner.getScheme();
}
@Override
public ClassLoader getClassLoader() {
return inner.getClassLoader();
}
@Override
public void configure(final Configuration config) {
inner.configure(config);
}
@Override
public FileSystem create(final URI fsUri) throws IOException {
try (TemporaryClassLoaderContext ignored = TemporaryClassLoaderContext.of(loader)) {
return new ClassLoaderFixingFileSystem(inner.create(fsUri), loader);
}
}
@Override
public String toString() {
return String.format("Plugin %s", inner.getClass().getName());
}
static
|
PluginFileSystemFactory
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/io/parsing/Symbol.java
|
{
"start": 1202,
"end": 1262
}
|
class ____ {
/*
* The type of symbol.
*/
public
|
Symbol
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_containsAnyOf_Test.java
|
{
"start": 977,
"end": 1383
}
|
class ____ extends IterableAssertBaseTest {
@Override
protected ConcreteIterableAssert<Object> invoke_api_method() {
return assertions.containsAnyOf("foo", "bar");
}
@Override
protected void verify_internal_effects() {
Object[] values = { "foo", "bar" };
verify(iterables).assertContainsAnyOf(getInfo(assertions), getActual(assertions), values);
}
}
|
IterableAssert_containsAnyOf_Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/enumeratedvalue/EnumAndColumnDefinitionTest.java
|
{
"start": 3212,
"end": 3384
}
|
class ____ {
@Id
Long id;
@Enumerated(value = EnumType.ORDINAL)
@Column(name = "my_enum", columnDefinition = "VARCHAR(255) NOT NULL")
/*
Annotating the
|
TestEntity
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/data/BinaryStringDataTest.java
|
{
"start": 24904,
"end": 40882
}
|
class ____ {
private String str;
private int precision, scale;
private DecimalTestData(String str, int precision, int scale) {
this.str = str;
this.precision = precision;
this.scale = scale;
}
}
DecimalTestData[] data = {
new DecimalTestData("12.345", 5, 3),
new DecimalTestData("-12.345", 5, 3),
new DecimalTestData("+12345", 5, 0),
new DecimalTestData("-12345", 5, 0),
new DecimalTestData("12345.", 5, 0),
new DecimalTestData("-12345.", 5, 0),
new DecimalTestData(".12345", 5, 5),
new DecimalTestData("-.12345", 5, 5),
new DecimalTestData("+12.345E3", 5, 0),
new DecimalTestData("-12.345e3", 5, 0),
new DecimalTestData("12.345e-3", 6, 6),
new DecimalTestData("-12.345E-3", 6, 6),
new DecimalTestData("12345E3", 8, 0),
new DecimalTestData("-12345e3", 8, 0),
new DecimalTestData("12345e-3", 5, 3),
new DecimalTestData("-12345E-3", 5, 3),
new DecimalTestData("+.12345E3", 5, 2),
new DecimalTestData("-.12345e3", 5, 2),
new DecimalTestData(".12345e-3", 8, 8),
new DecimalTestData("-.12345E-3", 8, 8),
new DecimalTestData("1234512345.1234", 18, 8),
new DecimalTestData("-1234512345.1234", 18, 8),
new DecimalTestData("1234512345.1234", 12, 2),
new DecimalTestData("-1234512345.1234", 12, 2),
new DecimalTestData("1234512345.1299", 12, 2),
new DecimalTestData("-1234512345.1299", 12, 2),
new DecimalTestData("999999999999999999", 18, 0),
new DecimalTestData("1234512345.1234512345", 20, 10),
new DecimalTestData("-1234512345.1234512345", 20, 10),
new DecimalTestData("1234512345.1234512345", 15, 5),
new DecimalTestData("-1234512345.1234512345", 15, 5),
new DecimalTestData("12345123451234512345E-10", 20, 10),
new DecimalTestData("-12345123451234512345E-10", 20, 10),
new DecimalTestData("12345123451234512345E-10", 15, 5),
new DecimalTestData("-12345123451234512345E-10", 15, 5),
new DecimalTestData("999999999999999999999", 21, 0),
new DecimalTestData("-999999999999999999999", 21, 0),
new DecimalTestData("0.00000000000000000000123456789123456789", 38, 38),
new DecimalTestData("-0.00000000000000000000123456789123456789", 38, 38),
new DecimalTestData("0.00000000000000000000123456789123456789", 29, 29),
new DecimalTestData("-0.00000000000000000000123456789123456789", 29, 29),
new DecimalTestData("123456789123E-27", 18, 18),
new DecimalTestData("-123456789123E-27", 18, 18),
new DecimalTestData("123456789999E-27", 18, 18),
new DecimalTestData("-123456789999E-27", 18, 18),
new DecimalTestData("123456789123456789E-36", 18, 18),
new DecimalTestData("-123456789123456789E-36", 18, 18),
new DecimalTestData("123456789999999999E-36", 18, 18),
new DecimalTestData("-123456789999999999E-36", 18, 18)
};
for (DecimalTestData d : data) {
assertThat(toDecimal(fromString(d.str), d.precision, d.scale))
.isEqualTo(
DecimalData.fromBigDecimal(
new BigDecimal(d.str), d.precision, d.scale));
}
BinaryRowData row = new BinaryRowData(data.length);
BinaryRowWriter writer = new BinaryRowWriter(row);
for (int i = 0; i < data.length; i++) {
writer.writeString(i, BinaryStringData.fromString(data[i].str));
}
writer.complete();
for (int i = 0; i < data.length; i++) {
DecimalTestData d = data[i];
assertThat(toDecimal((BinaryStringData) row.getString(i), d.precision, d.scale))
.isEqualTo(
DecimalData.fromBigDecimal(
new BigDecimal(d.str), d.precision, d.scale));
}
}
@TestTemplate
void testEmptyString() {
BinaryStringData str2 = fromString("hahahahah");
BinaryStringData str3;
{
MemorySegment[] segments = new MemorySegment[2];
segments[0] = MemorySegmentFactory.wrap(new byte[10]);
segments[1] = MemorySegmentFactory.wrap(new byte[10]);
str3 = BinaryStringData.fromAddress(segments, 15, 0);
}
assertThat(BinaryStringData.EMPTY_UTF8.compareTo(str2)).isLessThan(0);
assertThat(str2.compareTo(BinaryStringData.EMPTY_UTF8)).isGreaterThan(0);
assertThat(BinaryStringData.EMPTY_UTF8.compareTo(str3)).isEqualTo(0);
assertThat(str3.compareTo(BinaryStringData.EMPTY_UTF8)).isEqualTo(0);
assertThat(str2).isNotEqualTo(BinaryStringData.EMPTY_UTF8);
assertThat(BinaryStringData.EMPTY_UTF8).isNotEqualTo(str2);
assertThat(str3).isEqualTo(BinaryStringData.EMPTY_UTF8);
assertThat(BinaryStringData.EMPTY_UTF8).isEqualTo(str3);
}
@TestTemplate
void testEncodeWithIllegalCharacter() throws UnsupportedEncodingException {
// Tis char array has some illegal character, such as 55357
// the jdk ignores theses character and cast them to '?'
// which StringUtf8Utils'encodeUTF8 should follow
char[] chars =
new char[] {
20122, 40635, 124, 38271, 34966, 124, 36830, 34915, 35033, 124, 55357, 124,
56407
};
String str = new String(chars);
assertThat(StringUtf8Utils.encodeUTF8(str)).isEqualTo(str.getBytes("UTF-8"));
}
@TestTemplate
void testKeyValue() {
assertThat(
keyValue(
fromString("k1:v1|k2:v2"),
fromString("|").byteAt(0),
fromString(":").byteAt(0),
fromString("k3")))
.isNull();
assertThat(
keyValue(
fromString("k1:v1|k2:v2|"),
fromString("|").byteAt(0),
fromString(":").byteAt(0),
fromString("k3")))
.isNull();
assertThat(
keyValue(
fromString("|k1:v1|k2:v2|"),
fromString("|").byteAt(0),
fromString(":").byteAt(0),
fromString("k3")))
.isNull();
String tab = org.apache.commons.lang3.StringEscapeUtils.unescapeJava("\t");
assertThat(
keyValue(
fromString("k1:v1" + tab + "k2:v2"),
fromString("\t").byteAt(0),
fromString(":").byteAt(0),
fromString("k2")))
.isEqualTo(fromString("v2"));
assertThat(
keyValue(
fromString("k1:v1|k2:v2"),
fromString("|").byteAt(0),
fromString(":").byteAt(0),
null))
.isNull();
assertThat(
keyValue(
fromString("k1=v1;k2=v2"),
fromString(";").byteAt(0),
fromString("=").byteAt(0),
fromString("k2")))
.isEqualTo(fromString("v2"));
assertThat(
keyValue(
fromString("|k1=v1|k2=v2|"),
fromString("|").byteAt(0),
fromString("=").byteAt(0),
fromString("k2")))
.isEqualTo(fromString("v2"));
assertThat(
keyValue(
fromString("k1=v1||k2=v2"),
fromString("|").byteAt(0),
fromString("=").byteAt(0),
fromString("k2")))
.isEqualTo(fromString("v2"));
assertThat(
keyValue(
fromString("k1=v1;k2"),
fromString(";").byteAt(0),
fromString("=").byteAt(0),
fromString("k2")))
.isNull();
assertThat(
keyValue(
fromString("k1;k2=v2"),
fromString(";").byteAt(0),
fromString("=").byteAt(0),
fromString("k1")))
.isNull();
assertThat(
keyValue(
fromString("k=1=v1;k2=v2"),
fromString(";").byteAt(0),
fromString("=").byteAt(0),
fromString("k=")))
.isNull();
assertThat(
keyValue(
fromString("k1==v1;k2=v2"),
fromString(";").byteAt(0),
fromString("=").byteAt(0),
fromString("k1")))
.isEqualTo(fromString("=v1"));
assertThat(
keyValue(
fromString("k1==v1;k2=v2"),
fromString(";").byteAt(0),
fromString("=").byteAt(0),
fromString("k1=")))
.isNull();
assertThat(
keyValue(
fromString("k1=v1;k2=v2"),
fromString(";").byteAt(0),
fromString("=").byteAt(0),
fromString("k1=")))
.isNull();
assertThat(
keyValue(
fromString("k1k1=v1;k2=v2"),
fromString(";").byteAt(0),
fromString("=").byteAt(0),
fromString("k1")))
.isNull();
assertThat(
keyValue(
fromString("k1=v1;k2=v2"),
fromString(";").byteAt(0),
fromString("=").byteAt(0),
fromString("k1k1k1k1k1k1k1k1k1k1")))
.isNull();
assertThat(
keyValue(
fromString("k1:v||k2:v2"),
fromString("|").byteAt(0),
fromString(":").byteAt(0),
fromString("k2")))
.isEqualTo(fromString("v2"));
assertThat(
keyValue(
fromString("k1:v||k2:v2"),
fromString("|").byteAt(0),
fromString(":").byteAt(0),
fromString("k2")))
.isEqualTo(fromString("v2"));
}
@TestTemplate
void testDecodeWithIllegalUtf8Bytes() throws UnsupportedEncodingException {
// illegal utf-8 bytes
byte[] bytes =
new byte[] {
(byte) 20122,
(byte) 40635,
124,
(byte) 38271,
(byte) 34966,
124,
(byte) 36830,
(byte) 34915,
(byte) 35033,
124,
(byte) 55357,
124,
(byte) 56407
};
String str = new String(bytes, StandardCharsets.UTF_8);
assertThat(StringUtf8Utils.decodeUTF8(bytes, 0, bytes.length)).isEqualTo(str);
assertThat(StringUtf8Utils.decodeUTF8(MemorySegmentFactory.wrap(bytes), 0, bytes.length))
.isEqualTo(str);
byte[] newBytes = new byte[bytes.length + 5];
System.arraycopy(bytes, 0, newBytes, 5, bytes.length);
assertThat(StringUtf8Utils.decodeUTF8(MemorySegmentFactory.wrap(newBytes), 5, bytes.length))
.isEqualTo(str);
}
@TestTemplate
void skipWrongFirstByte() {
int[] wrongFirstBytes = {
0x80,
0x9F,
0xBF, // Skip Continuation bytes
0xC0,
0xC2, // 0xC0..0xC1 - disallowed in UTF-8
// 0xF5..0xFF - disallowed in UTF-8
0xF5,
0xF6,
0xF7,
0xF8,
0xF9,
0xFA,
0xFB,
0xFC,
0xFD,
0xFE,
0xFF
};
byte[] c = new byte[1];
for (int wrongFirstByte : wrongFirstBytes) {
c[0] = (byte) wrongFirstByte;
assertThat(1).isEqualTo(fromBytes(c).numChars());
}
}
@TestTemplate
void testSplit() {
assertThat(splitByWholeSeparatorPreserveAllTokens(fromString(""), fromString("")))
.isEqualTo(EMPTY_STRING_ARRAY);
assertThat(splitByWholeSeparatorPreserveAllTokens(fromString("ab de fg"), null))
.isEqualTo(
new BinaryStringData[] {
fromString("ab"), fromString("de"), fromString("fg")
});
assertThat(splitByWholeSeparatorPreserveAllTokens(fromString("ab de fg"), null))
.isEqualTo(
new BinaryStringData[] {
fromString("ab"),
fromString(""),
fromString(""),
fromString("de"),
fromString("fg")
});
assertThat(splitByWholeSeparatorPreserveAllTokens(fromString("ab:cd:ef"), fromString(":")))
.isEqualTo(
new BinaryStringData[] {
fromString("ab"), fromString("cd"), fromString("ef")
});
assertThat(
splitByWholeSeparatorPreserveAllTokens(
fromString("ab-!-cd-!-ef"), fromString("-!-")))
.isEqualTo(
new BinaryStringData[] {
fromString("ab"), fromString("cd"), fromString("ef")
});
}
@TestTemplate
void testLazy() {
String javaStr = "haha";
BinaryStringData str = BinaryStringData.fromString(javaStr);
str.ensureMaterialized();
// check reference same.
assertThat(javaStr).isSameAs(str.toString());
}
@TestTemplate
void testIsEmpty() {
assertThat(isEmpty(fromString(""))).isEqualTo(true);
assertThat(isEmpty(BinaryStringData.fromBytes(new byte[] {}))).isEqualTo(true);
assertThat(isEmpty(fromString("hello"))).isEqualTo(false);
assertThat(isEmpty(BinaryStringData.fromBytes("hello".getBytes()))).isEqualTo(false);
assertThat(isEmpty(fromString("中文"))).isEqualTo(false);
assertThat(isEmpty(BinaryStringData.fromBytes("中文".getBytes()))).isEqualTo(false);
assertThat(isEmpty(new BinaryStringData())).isEqualTo(true);
}
}
|
DecimalTestData
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/window/tvf/combines/RecordsCombiner.java
|
{
"start": 3293,
"end": 3489
}
|
interface ____ extends Serializable {
RecordsCombiner createRecordsCombiner(
RuntimeContext runtimeContext, Collector<RowData> collector) throws Exception;
}
}
|
LocalFactory
|
java
|
apache__camel
|
test-infra/camel-test-infra-nats/src/test/java/org/apache/camel/test/infra/nats/services/NatsLocalContainerService.java
|
{
"start": 862,
"end": 960
}
|
class ____ extends NatsLocalContainerInfraService implements NatsService {
}
|
NatsLocalContainerService
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/main/java/org/springframework/messaging/MessageDeliveryException.java
|
{
"start": 863,
"end": 1515
}
|
class ____ extends MessagingException {
public MessageDeliveryException(String description) {
super(description);
}
public MessageDeliveryException(Message<?> undeliveredMessage) {
super(undeliveredMessage);
}
public MessageDeliveryException(Message<?> undeliveredMessage, String description) {
super(undeliveredMessage, description);
}
public MessageDeliveryException(Message<?> message, @Nullable Throwable cause) {
super(message, cause);
}
public MessageDeliveryException(Message<?> undeliveredMessage, String description, @Nullable Throwable cause) {
super(undeliveredMessage, description, cause);
}
}
|
MessageDeliveryException
|
java
|
quarkusio__quarkus
|
independent-projects/qute/debug/src/main/java/io/quarkus/qute/debug/agent/evaluations/ConditionalExpressionHelper.java
|
{
"start": 134,
"end": 281
}
|
class ____ parse conditional expressions into Qute TemplateNodes.
* <p>
* In the debugger, breakpoints can have conditions (like `x > 5`).
* This
|
to
|
java
|
dropwizard__dropwizard
|
dropwizard-jersey/src/test/java/io/dropwizard/jersey/DropwizardResourceConfigTest.java
|
{
"start": 14504,
"end": 14683
}
|
class ____ implements ResourceInterface {
@Override
public String bar() {
return "";
}
}
@Path("/")
public static
|
ImplementingResource
|
java
|
elastic__elasticsearch
|
x-pack/plugin/fleet/src/test/java/org/elasticsearch/xpack/fleet/action/DeleteSecretResponseTests.java
|
{
"start": 425,
"end": 1032
}
|
class ____ extends AbstractWireSerializingTestCase<DeleteSecretResponse> {
@Override
protected Writeable.Reader<DeleteSecretResponse> instanceReader() {
return DeleteSecretResponse::new;
}
@Override
protected DeleteSecretResponse createTestInstance() {
return new DeleteSecretResponse(randomBoolean());
}
@Override
protected DeleteSecretResponse mutateInstance(DeleteSecretResponse instance) {
// return a response with the opposite boolean value
return new DeleteSecretResponse(instance.isDeleted() == false);
}
}
|
DeleteSecretResponseTests
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/JobVertex.java
|
{
"start": 10174,
"end": 10860
}
|
class ____ represents the task of this vertex.
*
* @param cl The classloader used to resolve user-defined classes
* @return The invokable class, <code>null</code> if it is not set
*/
public Class<? extends TaskInvokable> getInvokableClass(ClassLoader cl) {
if (cl == null) {
throw new NullPointerException("The classloader must not be null.");
}
if (invokableClassName == null) {
return null;
}
try {
return Class.forName(invokableClassName, true, cl).asSubclass(TaskInvokable.class);
} catch (ClassNotFoundException e) {
throw new RuntimeException("The user-code
|
which
|
java
|
apache__dubbo
|
dubbo-common/src/test/java/org/apache/dubbo/common/logger/LoggerTest.java
|
{
"start": 1690,
"end": 4393
}
|
class ____ {
static Stream<Arguments> data() {
return Stream.of(
Arguments.of(JclLoggerAdapter.class),
Arguments.of(JdkLoggerAdapter.class),
Arguments.of(Log4jLoggerAdapter.class),
Arguments.of(Slf4jLoggerAdapter.class),
Arguments.of(Log4j2LoggerAdapter.class));
}
@ParameterizedTest
@MethodSource("data")
void testAllLogMethod(Class<? extends LoggerAdapter> loggerAdapter) throws Exception {
LoggerAdapter adapter = loggerAdapter.getDeclaredConstructor().newInstance();
adapter.setLevel(Level.ALL);
Logger logger =
new FailsafeErrorTypeAwareLogger(adapter.getLogger(FailsafeLogger.class.getName(), this.getClass()));
logger.error("error");
logger.warn("warn");
logger.info("info");
logger.debug("debug");
logger.trace("trace");
logger.error("error:{}", "arg1");
logger.warn("warn:{}", "arg1");
logger.info("info:{}", "arg1");
logger.debug("debug:{}", "arg1");
logger.trace("trace:{}", "arg1");
logger.error(new Exception("error"));
logger.warn(new Exception("warn"));
logger.info(new Exception("info"));
logger.debug(new Exception("debug"));
logger.trace(new Exception("trace"));
logger.error("error", new Exception("error"));
logger.warn("warn", new Exception("warn"));
logger.info("info", new Exception("info"));
logger.debug("debug", new Exception("debug"));
logger.trace("trace", new Exception("trace"));
logger.error("error:{}", "arg1", new Exception("error"));
logger.warn("warn:{}", "arg1", new Exception("warn"));
logger.info("info:{}", "arg1", new Exception("info"));
logger.debug("debug:{}", "arg1", new Exception("debug"));
logger.trace("trace:{}", "arg1", new Exception("trace"));
}
@ParameterizedTest
@MethodSource("data")
void testLevelEnable(Class<? extends LoggerAdapter> loggerAdapter)
throws IllegalAccessException, InstantiationException, NoSuchMethodException, InvocationTargetException {
LoggerAdapter adapter = loggerAdapter.getDeclaredConstructor().newInstance();
adapter.setLevel(Level.ALL);
Logger logger = adapter.getLogger(this.getClass());
assertThat(logger.isWarnEnabled(), not(nullValue()));
assertThat(logger.isTraceEnabled(), not(nullValue()));
assertThat(logger.isErrorEnabled(), not(nullValue()));
assertThat(logger.isInfoEnabled(), not(nullValue()));
assertThat(logger.isDebugEnabled(), not(nullValue()));
}
}
|
LoggerTest
|
java
|
quarkusio__quarkus
|
integration-tests/mongodb-panache/src/main/java/io/quarkus/it/mongodb/panache/transaction/TransactionPerson.java
|
{
"start": 267,
"end": 415
}
|
class ____ extends PanacheMongoEntityBase {
@BsonId
public Long id;
public String firstname;
public String lastname;
}
|
TransactionPerson
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservableRefCountTest.java
|
{
"start": 24837,
"end": 27284
}
|
class ____ extends Exception {
private static final long serialVersionUID = -6763898015338136119L;
public final Object data;
ExceptionData(Object data) {
this.data = data;
}
}
static final int GC_SLEEP_TIME = 250;
@Test
public void publishNoLeak() throws Exception {
System.gc();
Thread.sleep(GC_SLEEP_TIME);
source = Observable.fromCallable(new Callable<Object>() {
@Override
public Object call() throws Exception {
throw new ExceptionData(new byte[100 * 1000 * 1000]);
}
})
.publish()
.refCount();
long start = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed();
source.subscribe(Functions.emptyConsumer(), Functions.emptyConsumer());
long after = TestHelper.awaitGC(GC_SLEEP_TIME, 20, start + 20 * 1000 * 1000);
source = null;
assertTrue(String.format("%,3d -> %,3d%n", start, after), start + 20 * 1000 * 1000 > after);
}
@Test
public void publishNoLeak2() throws Exception {
System.gc();
Thread.sleep(GC_SLEEP_TIME);
long start = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed();
source = Observable.fromCallable(new Callable<Object>() {
@Override
public Object call() throws Exception {
return new byte[100 * 1000 * 1000];
}
}).concatWith(Observable.never())
.publish()
.refCount();
Disposable d1 = source.test();
Disposable d2 = source.test();
d1.dispose();
d2.dispose();
d1 = null;
d2 = null;
long after = TestHelper.awaitGC(GC_SLEEP_TIME, 20, start + 20 * 1000 * 1000);
source = null;
assertTrue(String.format("%,3d -> %,3d%n", start, after), start + 20 * 1000 * 1000 > after);
}
@Test
public void replayIsUnsubscribed() {
ConnectableObservable<Integer> co = Observable.just(1).concatWith(Observable.<Integer>never())
.replay();
if (co instanceof Disposable) {
assertTrue(((Disposable)co).isDisposed());
Disposable connection = co.connect();
assertFalse(((Disposable)co).isDisposed());
connection.dispose();
assertTrue(((Disposable)co).isDisposed());
}
}
static final
|
ExceptionData
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/bind/MissingServletRequestParameterException.java
|
{
"start": 934,
"end": 3274
}
|
class ____ extends MissingRequestValueException {
private final String parameterName;
private final String parameterType;
private final @Nullable MethodParameter parameter;
/**
* Constructor for MissingServletRequestParameterException.
* @param parameterName the name of the missing parameter
* @param parameterType the expected type of the missing parameter
*/
public MissingServletRequestParameterException(String parameterName, String parameterType) {
super("", false, null, new Object[] {parameterName});
this.parameterName = parameterName;
this.parameterType = parameterType;
this.parameter = null;
getBody().setDetail(initBodyDetail(this.parameterName));
}
/**
* Constructor with a {@link MethodParameter} instead of a String parameterType.
* @param parameterName the name of the missing parameter
* @param parameter the target method parameter for the missing value
* @param missingAfterConversion whether the value became null after conversion
* @since 6.1
*/
public MissingServletRequestParameterException(
String parameterName, MethodParameter parameter, boolean missingAfterConversion) {
super("", missingAfterConversion, null, new Object[] {parameterName});
this.parameterName = parameterName;
this.parameterType = parameter.getNestedParameterType().getSimpleName();
this.parameter = parameter;
getBody().setDetail(initBodyDetail(this.parameterName));
}
private static String initBodyDetail(String name) {
return "Required parameter '" + name + "' is not present.";
}
@Override
public String getMessage() {
return "Required request parameter '" + this.parameterName + "' for method parameter type " +
this.parameterType + " is " +
(isMissingAfterConversion() ? "present but converted to null" : "not present");
}
/**
* Return the name of the offending parameter.
*/
public final String getParameterName() {
return this.parameterName;
}
/**
* Return the expected type of the offending parameter.
*/
public final String getParameterType() {
return this.parameterType;
}
/**
* Return the target {@link MethodParameter} if the exception was raised for
* a controller method argument.
* @since 6.1
*/
public @Nullable MethodParameter getMethodParameter() {
return this.parameter;
}
}
|
MissingServletRequestParameterException
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/runtime/operators/lifecycle/event/OperatorStartedEvent.java
|
{
"start": 935,
"end": 1413
}
|
class ____ extends TestEvent {
private final int attemptNumber;
public OperatorStartedEvent(String operatorId, int subtaskIndex, int attemptNumber) {
super(operatorId, subtaskIndex, attemptNumber);
this.attemptNumber = attemptNumber;
}
public int getAttemptNumber() {
return attemptNumber;
}
@Override
public String toString() {
return super.toString() + ", attemptNumber=" + attemptNumber;
}
}
|
OperatorStartedEvent
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/map/MapAssert_size_Test.java
|
{
"start": 951,
"end": 1713
}
|
class ____ {
@Test
void should_be_able_to_use_integer_assertions_on_size_the_map_size() {
Map<String, String> stringToString = mapOf(entry("a", "1"), entry("b", "2"));
// @format:off
assertThat(stringToString).size().isGreaterThan(0)
.isLessThanOrEqualTo(3)
.returnToMap().contains(entry("a", "1"));
// @format:on
}
@Test
void should_have_an_helpful_error_message_when_size_is_used_on_a_null_map() {
Map<String, String> nullMap = null;
assertThatNullPointerException().isThrownBy(() -> assertThat(nullMap).size().isGreaterThan(1))
.withMessage("Can not perform assertions on the size of a null map.");
}
}
|
MapAssert_size_Test
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/builder/BuilderWithTypeParametersTest.java
|
{
"start": 844,
"end": 1138
}
|
class ____ {
public String x;
public String y;
@JsonCreator
public MyPOJO(@JsonProperty("x") String x, @JsonProperty("y") String y) {
this.x = x;
this.y = y;
}
}
@JsonDeserialize(builder = MyGenericPOJO.Builder.class)
public static
|
MyPOJO
|
java
|
apache__dubbo
|
dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/Constants.java
|
{
"start": 848,
"end": 4054
}
|
interface ____ {
String FAIL_BACK_TASKS_KEY = "failbacktasks";
int DEFAULT_FAILBACK_TASKS = 100;
int DEFAULT_FORKS = 2;
String WEIGHT_KEY = "weight";
int DEFAULT_WEIGHT = 100;
String MOCK_PROTOCOL = "mock";
String FORCE_KEY = "force";
String RAW_RULE_KEY = "rawRule";
String VALID_KEY = "valid";
String ENABLED_KEY = "enabled";
String DYNAMIC_KEY = "dynamic";
String SCOPE_KEY = "scope";
String KEY_KEY = "key";
String CONDITIONS_KEY = "conditions";
String AFFINITY_KEY = "affinityAware";
String TAGS_KEY = "tags";
/**
* To decide whether to exclude unavailable invoker from the cluster
*/
String CLUSTER_AVAILABLE_CHECK_KEY = "cluster.availablecheck";
/**
* The default value of cluster.availablecheck
*
* @see #CLUSTER_AVAILABLE_CHECK_KEY
*/
boolean DEFAULT_CLUSTER_AVAILABLE_CHECK = true;
/**
* To decide whether to enable sticky strategy for cluster
*/
String CLUSTER_STICKY_KEY = "sticky";
/**
* The default value of sticky
*
* @see #CLUSTER_STICKY_KEY
*/
boolean DEFAULT_CLUSTER_STICKY = false;
/**
* When this attribute appears in invocation's attachment, mock invoker will be used
*/
String INVOCATION_NEED_MOCK = "invocation.need.mock";
/**
* when ROUTER_KEY's value is set to ROUTER_TYPE_CLEAR, RegistryDirectory will clean all current routers
*/
String ROUTER_TYPE_CLEAR = "clean";
String DEFAULT_SCRIPT_TYPE_KEY = "javascript";
String PRIORITY_KEY = "priority";
String RULE_KEY = "rule";
String TYPE_KEY = "type";
String RUNTIME_KEY = "runtime";
String WARMUP_KEY = "warmup";
int DEFAULT_WARMUP = 10 * 60 * 1000;
String CONFIG_VERSION_KEY = "configVersion";
String OVERRIDE_PROVIDERS_KEY = "providerAddresses";
/**
* key for router type, for e.g., "script"/"file", corresponding to ScriptRouterFactory.NAME, FileRouterFactory.NAME
*/
String ROUTER_KEY = "router";
/**
* The key name for reference URL in register center
*/
String REFER_KEY = "refer";
String ATTRIBUTE_KEY = "attribute";
/**
* The key name for export URL in register center
*/
String EXPORT_KEY = "export";
String PEER_KEY = "peer";
String CONSUMER_URL_KEY = "CONSUMER_URL";
/**
* prefix of arguments router key
*/
String ARGUMENTS = "arguments";
String NEED_REEXPORT = "need-reexport";
/**
* The key of shortestResponseSlidePeriod
*/
String SHORTEST_RESPONSE_SLIDE_PERIOD = "shortestResponseSlidePeriod";
String SHOULD_FAIL_FAST_KEY = "dubbo.router.should-fail-fast";
String RULE_VERSION_V27 = "v2.7";
String RULE_VERSION_V30 = "v3.0";
String RULE_VERSION_V31 = "v3.1";
public static final String TRAFFIC_DISABLE_KEY = "trafficDisable";
public static final String RATIO_KEY = "ratio";
public static final int DefaultRouteRatio = 0;
public static final int DefaultRouteConditionSubSetWeight = 100;
public static final int DefaultRoutePriority = 0;
public static final double DefaultAffinityRatio = 0;
}
|
Constants
|
java
|
apache__camel
|
components/camel-infinispan/camel-infinispan/src/main/java/org/apache/camel/component/infinispan/remote/InfinispanRemoteConsumer.java
|
{
"start": 4457,
"end": 6081
}
|
class ____ extends ServiceSupport {
private InfinispanEventListener<ClientEvent.Type> listener;
@SuppressWarnings("unchecked")
@Override
public void doStart() {
final RemoteCache<?, ?> cache = getCache(RemoteCache.class);
final InfinispanRemoteConfiguration configuration = getConfiguration();
listener = configuration.getCustomListener();
if (listener == null) {
Set<ClientEvent.Type> events = new HashSet<>();
if (configuration.getEventTypes() != null) {
String eventTypes = configuration.getEventTypes();
for (String event : eventTypes.split(",")) {
events.add(ClientEvent.Type.valueOf(event));
}
}
listener = new InfinispanRemoteEventListener(events);
}
listener.setCacheName(cache.getName());
listener.setEventProcessor(InfinispanRemoteConsumer.this);
cache.addClientListener(listener);
}
@SuppressWarnings("unchecked")
@Override
public void doStop() {
final RemoteCache<Object, Object> cache = getCache(RemoteCache.class);
if (cache != null) {
try {
cache.removeClientListener(listener);
} catch (RemoteCacheManagerNotStartedException e) {
LOG.debug("Cannot remote the listener because the cache manager is not started: {}", e.getMessage(), e);
}
}
}
}
}
|
ConsumerHandler
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/cache/spi/ExtendedStatisticsSupport.java
|
{
"start": 264,
"end": 395
}
|
interface ____ {
long getElementCountInMemory();
long getElementCountOnDisk();
long getSizeInMemory();
}
|
ExtendedStatisticsSupport
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridMixClasses.java
|
{
"start": 17920,
"end": 19087
}
|
class ____ extends InputStream implements Seekable,
PositionedReadable {
private long counter;
@Override
public int read() throws IOException {
return 0;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
int realLen = len - off;
counter += realLen;
for (int i = 0; i < b.length; i++) {
b[i] = 0;
}
return realLen;
}
public long getCounter() {
return counter;
}
@Override
public void seek(long pos) throws IOException {
}
@Override
public long getPos() throws IOException {
return counter;
}
@Override
public boolean seekToNewSource(long targetPos) throws IOException {
return false;
}
@Override
public int read(long position, byte[] buffer, int offset, int length)
throws IOException {
return 0;
}
@Override
public void readFully(long position, byte[] buffer, int offset, int length)
throws IOException {
}
@Override
public void readFully(long position, byte[] buffer) throws IOException {
}
}
private
|
FakeInputStream
|
java
|
apache__kafka
|
generator/src/main/java/org/apache/kafka/message/SchemaGenerator.java
|
{
"start": 1003,
"end": 1106
}
|
class ____ {
/**
* Schema information for a particular message.
*/
static
|
SchemaGenerator
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java
|
{
"start": 4251,
"end": 4571
}
|
interface ____ {
/**
* Called before start()
*/
void preStart();
/**
* Called after start()
*/
void postStart();
/**
* Called before stop()
*/
void preStop();
/**
* Called after stop()
*/
void postStop();
}
/**
* Convenient abstract
|
Callback
|
java
|
apache__camel
|
core/camel-util/src/main/java/org/apache/camel/util/concurrent/AsyncCompletionService.java
|
{
"start": 4714,
"end": 5486
}
|
class ____ implements Runnable, Comparable<Task>, Consumer<V> {
private final int id;
private final Consumer<Consumer<V>> runner;
private V result;
Task(int id, Consumer<Consumer<V>> runner) {
this.id = id;
this.runner = runner;
}
@Override
public void run() {
runner.accept(this);
}
@Override
public void accept(V result) {
this.result = result;
complete(this);
}
@Override
public int compareTo(Task other) {
return Integer.compare(this.id, other.id);
}
@Override
public String toString() {
return "SubmitOrderedTask[" + this.id + "]";
}
}
}
|
Task
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querylog/EsqlQueryLogTests.java
|
{
"start": 2129,
"end": 9272
}
|
class ____ extends ESTestCase {
private static ClusterSettings settings = new ClusterSettings(
Settings.builder()
.put(EsqlPlugin.ESQL_QUERYLOG_THRESHOLD_WARN_SETTING.getKey(), "40ms")
.put(EsqlPlugin.ESQL_QUERYLOG_THRESHOLD_INFO_SETTING.getKey(), "30ms")
.put(EsqlPlugin.ESQL_QUERYLOG_THRESHOLD_DEBUG_SETTING.getKey(), "20ms")
.put(EsqlPlugin.ESQL_QUERYLOG_THRESHOLD_TRACE_SETTING.getKey(), "10ms")
.put(EsqlPlugin.ESQL_QUERYLOG_INCLUDE_USER_SETTING.getKey(), true)
.build(),
new HashSet<>(
Arrays.asList(
EsqlPlugin.ESQL_QUERYLOG_THRESHOLD_WARN_SETTING,
EsqlPlugin.ESQL_QUERYLOG_THRESHOLD_INFO_SETTING,
EsqlPlugin.ESQL_QUERYLOG_THRESHOLD_DEBUG_SETTING,
EsqlPlugin.ESQL_QUERYLOG_THRESHOLD_TRACE_SETTING,
EsqlPlugin.ESQL_QUERYLOG_INCLUDE_USER_SETTING
)
)
);
static MockAppender appender;
static Logger queryLog = LogManager.getLogger(EsqlQueryLog.LOGGER_NAME);
static Level origQueryLogLevel = queryLog.getLevel();
@BeforeClass
public static void init() throws IllegalAccessException {
appender = new MockAppender("test_appender");
appender.start();
Loggers.addAppender(queryLog, appender);
Loggers.setLevel(queryLog, Level.TRACE);
}
@AfterClass
public static void cleanup() {
Loggers.removeAppender(queryLog, appender);
appender.stop();
Loggers.setLevel(queryLog, origQueryLogLevel);
}
public void testPrioritiesOnSuccess() {
EsqlQueryLog queryLog = new EsqlQueryLog(settings, mockFieldProvider());
String query = "from " + randomAlphaOfLength(10);
long[] actualTook = {
randomLongBetween(10_000_000, 20_000_000),
randomLongBetween(20_000_000, 30_000_000),
randomLongBetween(30_000_000, 40_000_000),
randomLongBetween(40_000_000, 50_000_000),
randomLongBetween(0, 9_999_999) };
long[] actualPlanningTook = {
randomLongBetween(0, 1_000_000),
randomLongBetween(0, 1_000_000),
randomLongBetween(0, 1_000_000),
randomLongBetween(0, 1_000_000),
randomLongBetween(0, 1_000_000), };
Level[] expectedLevel = { Level.TRACE, Level.DEBUG, Level.INFO, Level.WARN, null };
for (int i = 0; i < actualTook.length; i++) {
EsqlExecutionInfo warnQuery = getEsqlExecutionInfo(actualTook[i], actualPlanningTook[i]);
queryLog.onQueryPhase(new Result(List.of(), List.of(), DriverCompletionInfo.EMPTY, warnQuery), query);
if (expectedLevel[i] != null) {
assertThat(appender.lastEvent(), is(not(nullValue())));
var msg = (ESLogMessage) appender.lastMessage();
long took = Long.valueOf(msg.get(ELASTICSEARCH_QUERYLOG_TOOK));
long tookMillisExpected = took / 1_000_000L;
long tookMillis = Long.valueOf(msg.get(ELASTICSEARCH_QUERYLOG_TOOK_MILLIS));
assertThat(took, is(actualTook[i]));
assertThat(tookMillis, is(tookMillisExpected));
long planningTook = Long.valueOf(msg.get(ELASTICSEARCH_QUERYLOG_PLANNING_TOOK));
long planningTookMillisExpected = planningTook / 1_000_000;
long planningTookMillis = Long.valueOf(msg.get(ELASTICSEARCH_QUERYLOG_PLANNING_TOOK_MILLIS));
assertThat(planningTook, is(actualPlanningTook[i]));
assertThat(planningTookMillis, is(planningTookMillisExpected));
assertThat(took, greaterThan(planningTook));
assertThat(msg.get(ELASTICSEARCH_QUERYLOG_QUERY), is(query));
assertThat(appender.getLastEventAndReset().getLevel(), equalTo(expectedLevel[i]));
} else {
assertThat(appender.lastEvent(), is(nullValue()));
}
}
}
private SlowLogFieldProvider mockFieldProvider() {
return new SlowLogFieldProvider() {
@Override
public SlowLogFields create(IndexSettings indexSettings) {
return create();
}
@Override
public SlowLogFields create() {
return new SlowLogFields() {
@Override
public Map<String, String> indexFields() {
return Map.of();
}
@Override
public Map<String, String> searchFields() {
return Map.of();
}
};
}
};
}
public void testPrioritiesOnFailure() {
EsqlQueryLog queryLog = new EsqlQueryLog(settings, mockFieldProvider());
String query = "from " + randomAlphaOfLength(10);
long[] actualTook = {
randomLongBetween(10_000_000, 20_000_000),
randomLongBetween(20_000_000, 30_000_000),
randomLongBetween(30_000_000, 40_000_000),
randomLongBetween(40_000_000, 50_000_000),
randomLongBetween(0, 9_999_999) };
Level[] expectedLevel = { Level.TRACE, Level.DEBUG, Level.INFO, Level.WARN, null };
String validationError = randomAlphaOfLength(10);
ValidationException ex = new ValidationException().addValidationError(validationError);
for (int i = 0; i < actualTook.length; i++) {
queryLog.onQueryFailure(query, ex, actualTook[i]);
if (expectedLevel[i] != null) {
assertThat(appender.lastEvent(), is(not(nullValue())));
var msg = (ESLogMessage) appender.lastMessage();
long took = Long.valueOf(msg.get(ELASTICSEARCH_QUERYLOG_TOOK));
long tookMillisExpected = took / 1_000_000L;
long tookMillis = Long.valueOf(msg.get(ELASTICSEARCH_QUERYLOG_TOOK_MILLIS));
assertThat(took, is(actualTook[i]));
assertThat(tookMillis, is(tookMillisExpected));
assertThat(msg.get(ELASTICSEARCH_QUERYLOG_PLANNING_TOOK), is(nullValue()));
assertThat(msg.get(ELASTICSEARCH_QUERYLOG_PLANNING_TOOK_MILLIS), is(nullValue()));
assertThat(msg.get(ELASTICSEARCH_QUERYLOG_QUERY), is(query));
assertThat(appender.getLastEventAndReset().getLevel(), equalTo(expectedLevel[i]));
} else {
assertThat(appender.lastEvent(), is(nullValue()));
}
}
}
private static EsqlExecutionInfo getEsqlExecutionInfo(long tookNanos, long planningTookNanos) {
EsqlExecutionInfo info = new EsqlExecutionInfo(false) {
@Override
public TimeValue overallTook() {
return new TimeValue(tookNanos, TimeUnit.NANOSECONDS);
}
@Override
public TimeValue planningTookTime() {
return new TimeValue(planningTookNanos, TimeUnit.NANOSECONDS);
}
};
return info;
}
}
|
EsqlQueryLogTests
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/RequestMatcherConfigurerTests.java
|
{
"start": 2768,
"end": 3264
}
|
class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.securityMatchers((security) -> security
.requestMatchers(pathPattern("/api/**")))
.securityMatchers((security) -> security
.requestMatchers(pathPattern("/oauth/**")))
.authorizeHttpRequests((requests) -> requests
.anyRequest().denyAll());
return http.build();
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
static
|
Sec2908Config
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/AvroComponentBuilderFactory.java
|
{
"start": 1813,
"end": 2418
}
|
interface ____ extends ComponentBuilder<AvroComponent> {
/**
* Avro protocol to use.
*
* The option is a: <code>org.apache.avro.Protocol</code>
* type.
*
* Group: common
*
* @param protocol the value to set
* @return the dsl builder
*/
default AvroComponentBuilder protocol(org.apache.avro.Protocol protocol) {
doSetProperty("protocol", protocol);
return this;
}
/**
* Avro protocol to use defined by the FQN
|
AvroComponentBuilder
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/TestContextBootstrapper.java
|
{
"start": 2396,
"end": 3072
}
|
interface ____ {
/**
* Set the {@link BootstrapContext} to be used by this bootstrapper.
*/
void setBootstrapContext(BootstrapContext bootstrapContext);
/**
* Get the {@link BootstrapContext} associated with this bootstrapper.
*/
BootstrapContext getBootstrapContext();
/**
* Build the {@link TestContext} for the {@link BootstrapContext}
* associated with this bootstrapper.
* @return a new {@link TestContext}, never {@code null}
* @since 4.2
* @see #buildMergedContextConfiguration()
*/
TestContext buildTestContext();
/**
* Build the {@linkplain MergedContextConfiguration merged context configuration}
* for the test
|
TestContextBootstrapper
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/config/ShardedSubscriptionMode.java
|
{
"start": 688,
"end": 909
}
|
enum ____ {
/**
* Use sharded pubsub only if it's available.
*/
AUTO,
/**
* Use sharded pubsub.
*/
ON,
/**
* Don't use sharded pubsub.
*/
OFF
}
|
ShardedSubscriptionMode
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/cfg/JdbcSettings.java
|
{
"start": 9501,
"end": 14104
}
|
class ____ implements {@code ConnectionProvider}.
* </ul>
* <p>
* If this property is not explicitly set, a connection provider is chosen
* automatically:
* <ul>
* <li>if {@link #JAKARTA_JTA_DATASOURCE} or {@link #JAKARTA_NON_JTA_DATASOURCE}
* is set, {@linkplain org.hibernate.engine.jdbc.connections.internal.DataSourceConnectionProvider
* a datasource-based implementation} is used;
* <li>otherwise, a {@code ConnectionProvider} is loaded automatically as a
* {@linkplain java.util.ServiceLoader Java service};
* <li>but if no service is found, or if more than one service is available,
* {@linkplain org.hibernate.engine.jdbc.connections.internal.DriverManagerConnectionProvider
* a default implementation} is used as a fallback.
* </ul>
* <p>
* The default implementation is not recommended for use in production.
*
* @apiNote The term {@code "class"} appears in the setting name due to legacy reasons;
* however, it can accept instances.
*/
String CONNECTION_PROVIDER = "hibernate.connection.provider_class";
/**
* Specifies the maximum number of inactive connections for any
* {@linkplain ConnectionProvider connection pool} which respects this
* setting, including every built-in implementation except for
* {@link org.hibernate.engine.jdbc.connections.internal.DataSourceConnectionProvider}.
* <p>
* The default pool size depends on the connection provider.
*/
String POOL_SIZE = "hibernate.connection.pool_size";
/**
* Specifies the JDBC transaction isolation level for connections obtained
* from any {@link ConnectionProvider} implementation which respects this
* setting, including every built-in implementation except for
* {@link org.hibernate.engine.jdbc.connections.internal.DataSourceConnectionProvider}.
* <p>
* Possible values are enumerated by {@link java.sql.Connection}:
* {@code READ_UNCOMMITTED}, {@code READ_COMMITTED},
* {@code REPEATABLE_READ}, {@code SERIALIZABLE}.
* <p>
* If this setting is not explicitly specified, Hibernate does not modify
* the transaction isolation level of the JDBC connection.
*
* @see java.sql.Connection#setTransactionIsolation(int)
*/
String ISOLATION = "hibernate.connection.isolation";
/**
* Controls the autocommit mode of JDBC connections obtained from any
* {@link ConnectionProvider} implementation which respects this setting,
* including every built-in implementation except for
* {@link org.hibernate.engine.jdbc.connections.internal.DataSourceConnectionProvider}.
*
* @see java.sql.Connection#setAutoCommit(boolean)
*
* @settingDefault {@code false}
*/
String AUTOCOMMIT = "hibernate.connection.autocommit";
/**
* Indicates that connections obtained from the configured {@link ConnectionProvider} have
* {@linkplain java.sql.Connection#getAutoCommit auto-commit} already disabled when they
* are acquired.
* <p>
* It is inappropriate to set this value to {@code true} when the connections returned by
* the provider do not, in fact, have auto-commit disabled. Doing so may lead to Hibernate
* executing SQL operations outside the scope of any transaction.
*
* @apiNote By default, Hibernate calls {@link java.sql.Connection#setAutoCommit(boolean)}
* on newly obtained connections. With this setting enabled, that call is skipped, along
* with some other operations, in the interest of performance.
*
* @settingDefault {@code false}
*
* @see org.hibernate.boot.SessionFactoryBuilder#applyConnectionProviderDisablesAutoCommit(boolean)
*
* @since 5.2.10
*/
String CONNECTION_PROVIDER_DISABLES_AUTOCOMMIT = "hibernate.connection.provider_disables_autocommit";
/**
* A prefix for properties specifying arbitrary JDBC connection properties. These
* properties are simply passed along to the provider when creating a connection.
* <p>
* For example, declaring {@code hibernate.connection.foo=bar} tells Hibernate to
* append {@code foo=bar} to the JDBC connection URL.
*
* @deprecated This setting is only supported by {@code C3P0ConnectionProvider}
* and {@link org.hibernate.engine.jdbc.connections.internal.DriverManagerConnectionProvider}.
*/
@Deprecated(since="7")
@SuppressWarnings("DeprecatedIsStillUsed")
String CONNECTION_PREFIX = "hibernate.connection";
/**
* Specifies a {@link org.hibernate.resource.jdbc.spi.StatementInspector}
* implementation associated with the {@link org.hibernate.SessionFactory},
* either:
* <ul>
* <li>an instance of {@code StatementInspector},
* <li>a {@link Class} representing a
|
that
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/conversion/locale/LocaleMapper.java
|
{
"start": 281,
"end": 488
}
|
interface ____ {
LocaleMapper INSTANCE = Mappers.getMapper( LocaleMapper.class );
LocaleTarget sourceToTarget(LocaleSource source);
LocaleSource targetToSource(LocaleTarget target);
}
|
LocaleMapper
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/registration/RetryingRegistrationTest.java
|
{
"start": 2257,
"end": 18025
}
|
class ____ {
@RegisterExtension
public static final TestExecutorExtension<ScheduledExecutorService> EXECUTOR_EXTENSION =
TestingUtils.defaultExecutorExtension();
private TestingRpcService rpcService;
@BeforeEach
void setup() {
rpcService = new TestingRpcService();
}
@AfterEach
void tearDown() throws ExecutionException, InterruptedException {
if (rpcService != null) {
rpcService.closeAsync().get();
}
}
@Test
void testSimpleSuccessfulRegistration() throws Exception {
final String testId = "laissez les bon temps roulez";
final String testEndpointAddress = "<test-address>";
final UUID leaderId = UUID.randomUUID();
// an endpoint that immediately returns success
ManualResponseTestRegistrationGateway testGateway =
new ManualResponseTestRegistrationGateway(new TestRegistrationSuccess(testId));
try {
rpcService.registerGateway(testEndpointAddress, testGateway);
TestRetryingRegistration registration =
new TestRetryingRegistration(rpcService, testEndpointAddress, leaderId);
registration.startRegistration();
CompletableFuture<
RetryingRegistration.RetryingRegistrationResult<
TestRegistrationGateway,
TestRegistrationSuccess,
TestRegistrationRejection>>
future = registration.getFuture();
assertThat(future).isNotNull();
// multiple accesses return the same future
assertThat(registration.getFuture()).isEqualTo(future);
RetryingRegistration.RetryingRegistrationResult<
TestRegistrationGateway,
TestRegistrationSuccess,
TestRegistrationRejection>
registrationResponse = future.get(10L, TimeUnit.SECONDS);
// validate correct invocation and result
assertThat(registrationResponse.getSuccess().getCorrelationId()).isEqualTo(testId);
assertThat(testGateway.getInvocations().take().leaderId()).isEqualTo(leaderId);
} finally {
testGateway.stop();
}
}
@Test
void testPropagateFailures() throws Exception {
final String testExceptionMessage = "testExceptionMessage";
// RPC service that fails with exception upon the connection
RpcService rpc = mock(RpcService.class);
when(rpc.connect(anyString(), any(Class.class)))
.thenThrow(new RuntimeException(testExceptionMessage));
TestRetryingRegistration registration =
new TestRetryingRegistration(rpc, "testaddress", UUID.randomUUID());
registration.startRegistration();
CompletableFuture<?> future = registration.getFuture();
assertThat(future).isDone();
assertThatThrownBy(future::get)
.withFailMessage("We expected an ExecutionException.")
.isInstanceOf(ExecutionException.class)
.cause()
.hasMessage(testExceptionMessage);
}
@Test
void testRetryConnectOnFailure() throws Exception {
final String testId = "laissez les bon temps roulez";
final UUID leaderId = UUID.randomUUID();
ScheduledExecutorServiceAdapter executor =
new ScheduledExecutorServiceAdapter(EXECUTOR_EXTENSION.getExecutor());
ManualResponseTestRegistrationGateway testGateway =
new ManualResponseTestRegistrationGateway(new TestRegistrationSuccess(testId));
try {
// RPC service that fails upon the first connection, but succeeds on the second
RpcService rpc = mock(RpcService.class);
when(rpc.connect(anyString(), any(Class.class)))
.thenReturn(
FutureUtils.completedExceptionally(
new Exception(
"test connect failure")), // first connection attempt
// fails
CompletableFuture.completedFuture(
testGateway) // second connection attempt succeeds
);
when(rpc.getScheduledExecutor()).thenReturn(executor);
TestRetryingRegistration registration =
new TestRetryingRegistration(rpc, "foobar address", leaderId);
long start = System.currentTimeMillis();
registration.startRegistration();
RetryingRegistration.RetryingRegistrationResult<
TestRegistrationGateway,
TestRegistrationSuccess,
TestRegistrationRejection>
registrationResponse = registration.getFuture().get(10L, TimeUnit.SECONDS);
// measure the duration of the registration --> should be longer than the error delay
long duration = System.currentTimeMillis() - start;
assertThat(duration)
.withFailMessage(
"The registration should have failed the first time. Thus the duration should be longer than at least a single error delay.")
.isGreaterThan(TestRetryingRegistration.DELAY_ON_ERROR);
// validate correct invocation and result
assertThat(registrationResponse.getSuccess().getCorrelationId()).isEqualTo(testId);
assertThat(testGateway.getInvocations().take().leaderId()).isEqualTo(leaderId);
} finally {
testGateway.stop();
}
}
@Test
@Timeout(10000)
void testRetriesOnTimeouts() throws Exception {
final String testId = "rien ne va plus";
final String testEndpointAddress = "<test-address>";
final UUID leaderId = UUID.randomUUID();
// an endpoint that immediately returns futures with timeouts before returning a successful
// future
ManualResponseTestRegistrationGateway testGateway =
new ManualResponseTestRegistrationGateway(
null, // timeout
null, // timeout
new TestRegistrationSuccess(testId) // success
);
try {
rpcService.registerGateway(testEndpointAddress, testGateway);
final long initialTimeout = 20L;
TestRetryingRegistration registration =
new TestRetryingRegistration(
rpcService,
testEndpointAddress,
leaderId,
new RetryingRegistrationConfiguration(
initialTimeout,
1000L,
15000L, // make sure that we timeout in case of an error
15000L));
long started = System.nanoTime();
registration.startRegistration();
CompletableFuture<
RetryingRegistration.RetryingRegistrationResult<
TestRegistrationGateway,
TestRegistrationSuccess,
TestRegistrationRejection>>
future = registration.getFuture();
RetryingRegistration.RetryingRegistrationResult<
TestRegistrationGateway,
TestRegistrationSuccess,
TestRegistrationRejection>
registrationResponse = future.get(10L, TimeUnit.SECONDS);
long finished = System.nanoTime();
long elapsedMillis = (finished - started) / 1000000;
// validate correct invocation and result
assertThat(registrationResponse.getSuccess().getCorrelationId()).isEqualTo(testId);
assertThat(testGateway.getInvocations().take().leaderId()).isEqualTo(leaderId);
// validate that some retry-delay / back-off behavior happened
assertThat(elapsedMillis)
.withFailMessage("retries did not properly back off")
.isGreaterThanOrEqualTo(3 * initialTimeout);
} finally {
testGateway.stop();
}
}
@Test
void testFailure() throws Exception {
final String testId = "qui a coupe le fromage";
final String testEndpointAddress = "<test-address>";
final UUID leaderId = UUID.randomUUID();
ManualResponseTestRegistrationGateway testGateway =
new ManualResponseTestRegistrationGateway(
null, // timeout
new RegistrationResponse.Failure(new FlinkException("no reason")),
null, // timeout
new TestRegistrationSuccess(testId) // success
);
try {
rpcService.registerGateway(testEndpointAddress, testGateway);
TestRetryingRegistration registration =
new TestRetryingRegistration(rpcService, testEndpointAddress, leaderId);
long started = System.nanoTime();
registration.startRegistration();
CompletableFuture<
RetryingRegistration.RetryingRegistrationResult<
TestRegistrationGateway,
TestRegistrationSuccess,
TestRegistrationRejection>>
future = registration.getFuture();
RetryingRegistration.RetryingRegistrationResult<
TestRegistrationGateway,
TestRegistrationSuccess,
TestRegistrationRejection>
registrationResponse = future.get(10L, TimeUnit.SECONDS);
long finished = System.nanoTime();
long elapsedMillis = (finished - started) / 1000000;
// validate correct invocation and result
assertThat(registrationResponse.getSuccess().getCorrelationId()).isEqualTo(testId);
assertThat(testGateway.getInvocations().take().leaderId()).isEqualTo(leaderId);
// validate that some retry-delay / back-off behavior happened
assertThat(elapsedMillis)
.withFailMessage("retries did not properly back off")
.isGreaterThanOrEqualTo(
2 * TestRetryingRegistration.INITIAL_TIMEOUT
+ TestRetryingRegistration.DELAY_ON_FAILURE);
} finally {
testGateway.stop();
}
}
@Test
void testRegistrationRejection() {
final TestRegistrationGateway testRegistrationGateway =
new ManualResponseTestRegistrationGateway(
new TestRegistrationRejection(
TestRegistrationRejection.RejectionReason.REJECTED));
rpcService.registerGateway(testRegistrationGateway.getAddress(), testRegistrationGateway);
final TestRetryingRegistration testRetryingRegistration =
new TestRetryingRegistration(
rpcService, testRegistrationGateway.getAddress(), UUID.randomUUID());
testRetryingRegistration.startRegistration();
final RetryingRegistration.RetryingRegistrationResult<
TestRegistrationGateway, TestRegistrationSuccess, TestRegistrationRejection>
response = testRetryingRegistration.getFuture().join();
assertThat(response.isRejection()).isTrue();
assertThat(response.getRejection().getRejectionReason())
.isEqualTo(TestRegistrationRejection.RejectionReason.REJECTED);
}
@Test
@SuppressWarnings("unchecked")
void testRetryOnError() throws Exception {
final String testId = "Petit a petit, l'oiseau fait son nid";
final String testEndpointAddress = "<test-address>";
final UUID leaderId = UUID.randomUUID();
// gateway that upon calls first responds with a failure, then with a success
final Queue<CompletableFuture<RegistrationResponse>> responses = new ArrayDeque<>(2);
responses.add(FutureUtils.completedExceptionally(new Exception("test exception")));
responses.add(CompletableFuture.completedFuture(new TestRegistrationSuccess(testId)));
TestRegistrationGateway testGateway =
DefaultTestRegistrationGateway.newBuilder()
.setRegistrationFunction((uuid, aLong) -> responses.poll())
.build();
rpcService.registerGateway(testEndpointAddress, testGateway);
TestRetryingRegistration registration =
new TestRetryingRegistration(rpcService, testEndpointAddress, leaderId);
long started = System.nanoTime();
registration.startRegistration();
CompletableFuture<
RetryingRegistration.RetryingRegistrationResult<
TestRegistrationGateway,
TestRegistrationSuccess,
TestRegistrationRejection>>
future = registration.getFuture();
RetryingRegistration.RetryingRegistrationResult<
TestRegistrationGateway, TestRegistrationSuccess, TestRegistrationRejection>
registrationResponse = future.get(10, TimeUnit.SECONDS);
long finished = System.nanoTime();
long elapsedMillis = (finished - started) / 1000000;
assertThat(registrationResponse.getSuccess().getCorrelationId()).isEqualTo(testId);
// validate that some retry-delay / back-off behavior happened
assertThat(elapsedMillis)
.withFailMessage("retries did not properly back off")
.isGreaterThanOrEqualTo(TestRetryingRegistration.DELAY_ON_ERROR);
}
@Test
void testCancellation() throws Exception {
final String testEndpointAddress = "my-test-address";
final UUID leaderId = UUID.randomUUID();
CompletableFuture<RegistrationResponse> result = new CompletableFuture<>();
AtomicInteger registrationCallCounter = new AtomicInteger(0);
TestRegistrationGateway testGateway =
DefaultTestRegistrationGateway.newBuilder()
.setRegistrationFunction(
(uuid, aLong) -> {
registrationCallCounter.incrementAndGet();
return result;
})
.build();
rpcService.registerGateway(testEndpointAddress, testGateway);
TestRetryingRegistration registration =
new TestRetryingRegistration(rpcService, testEndpointAddress, leaderId);
registration.startRegistration();
// cancel and fail the current registration attempt
registration.cancel();
result.completeExceptionally(new TimeoutException());
// there should not be a second registration attempt
assertThat(registrationCallCounter).hasValueLessThanOrEqualTo(1);
}
// ------------------------------------------------------------------------
// test registration
// ------------------------------------------------------------------------
static
|
RetryingRegistrationTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/id/array/PrimitiveByteArrayIdTest.java
|
{
"start": 3849,
"end": 3921
}
|
class ____ {
@Id
public byte[] id;
public String name;
}
}
|
DemoEntity
|
java
|
netty__netty
|
microbench/src/main/java/io/netty/microbench/channel/DefaultChannelPipelineBenchmark.java
|
{
"start": 1927,
"end": 2134
}
|
class ____ extends ChannelInboundHandlerAdapter {
@Override
public final boolean isSharable() {
return true;
}
}
private abstract static
|
SharableInboundHandlerAdapter
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/MembersInjectionTest.java
|
{
"start": 11123,
"end": 11965
}
|
class ____ {",
" @Inject @Nullable String string;",
"}");
CompilerTests.daggerCompiler(file, NON_TYPE_USE_NULLABLE)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(0);
subject.generatedSource(
goldenFileRule.goldenSource("test/FieldInjection_MembersInjector"));
});
}
@Test
public void fieldInjectionWithQualifier() {
Source file =
CompilerTests.javaSource(
"test.FieldInjectionWithQualifier",
"package test;",
"",
"import dagger.Lazy;",
"import javax.inject.Inject;",
"import javax.inject.Named;",
"import javax.inject.Provider;",
"",
"
|
FieldInjection
|
java
|
dropwizard__dropwizard
|
dropwizard-jersey/src/test/java/io/dropwizard/jersey/caching/CacheControlledResponseFeatureTest.java
|
{
"start": 401,
"end": 3736
}
|
class ____ extends AbstractJerseyTest {
@Override
protected Application configure() {
ResourceConfig rc = DropwizardResourceConfig.forTesting();
rc = rc.register(CachingResource.class);
return rc;
}
@Test
void immutableResponsesHaveCacheControlHeaders() throws Exception {
final Response response = target("/caching/immutable").request().get();
assertThat(response.getHeaders().get(HttpHeaders.CACHE_CONTROL))
.containsOnly("no-transform, max-age=31536000");
}
@Test
void privateResponsesHaveCacheControlHeaders() throws Exception {
final Response response = target("/caching/private").request().get();
assertThat(response.getHeaders().get(HttpHeaders.CACHE_CONTROL))
.containsOnly("private, no-transform");
}
@Test
void maxAgeResponsesHaveCacheControlHeaders() throws Exception {
final Response response = target("/caching/max-age").request().get();
assertThat(response.getHeaders().get(HttpHeaders.CACHE_CONTROL))
.containsOnly("no-transform, max-age=1123200");
}
@Test
void noCacheResponsesHaveCacheControlHeaders() throws Exception {
final Response response = target("/caching/no-cache").request().get();
assertThat(response.getHeaders().get(HttpHeaders.CACHE_CONTROL))
.containsOnly("no-cache, no-transform");
}
@Test
void noStoreResponsesHaveCacheControlHeaders() throws Exception {
final Response response = target("/caching/no-store").request().get();
assertThat(response.getHeaders().get(HttpHeaders.CACHE_CONTROL))
.containsOnly("no-store, no-transform");
}
@Test
void noTransformResponsesHaveCacheControlHeaders() throws Exception {
final Response response = target("/caching/no-transform").request().get();
assertThat(response.getHeaders().get(HttpHeaders.CACHE_CONTROL))
.isNull();
}
@Test
void mustRevalidateResponsesHaveCacheControlHeaders() throws Exception {
final Response response = target("/caching/must-revalidate").request().get();
assertThat(response.getHeaders().get(HttpHeaders.CACHE_CONTROL))
.containsOnly("no-transform, must-revalidate");
}
@Test
void proxyRevalidateResponsesHaveCacheControlHeaders() throws Exception {
final Response response = target("/caching/proxy-revalidate").request().get();
assertThat(response.getHeaders().get(HttpHeaders.CACHE_CONTROL))
.containsOnly("no-transform, proxy-revalidate");
}
@Test
void sharedMaxAgeResponsesHaveCacheControlHeaders() throws Exception {
final Response response = target("/caching/shared-max-age").request().get();
assertThat(response.getHeaders().get(HttpHeaders.CACHE_CONTROL))
.containsOnly("no-transform, s-maxage=46800");
}
@Test
void staleWhileRevalidateResponsesHaveCacheControlHeaders() throws Exception {
final Response response = target("/caching/stale-while-revalidate").request().get();
assertThat(response.getHeaders().get(HttpHeaders.CACHE_CONTROL))
.containsOnly("no-transform, stale-while-revalidate=46800");
}
}
|
CacheControlledResponseFeatureTest
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/resolution/RuntimeResolutionTest.java
|
{
"start": 614,
"end": 1517
}
|
class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(MyList.class, ArrayProducer.class);
@SuppressWarnings("serial")
@Test
public void testResolution() throws IOException {
ArcContainer arc = Arc.container();
// MyList bean types: MyList, AbstractList<Integer>, List<Integer>, AbstractCollection<Integer>, Iterable<Integer>, Object
InstanceHandle<List<? extends Number>> list = arc.instance(new TypeLiteral<List<? extends Number>>() {
});
assertTrue(list.isAvailable());
assertEquals(Integer.valueOf(7), list.get().get(1));
InstanceHandle<MyList[]> array = arc.instance(MyList[].class);
assertTrue(array.isAvailable());
assertEquals(1, array.get().length);
assertEquals(Integer.valueOf(7), array.get()[0].get(1));
}
@Singleton
static
|
RuntimeResolutionTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/persister/entity/JoinedSubclassEntityPersisterTest.java
|
{
"start": 947,
"end": 1588
}
|
class ____ {
@Test
void the_table_name_must_match_the_attribute_s_column(EntityManagerFactoryScope scope) {
scope.inTransaction(
entityManager -> {
JpaMetamodelImpl metamodel = (JpaMetamodelImpl) entityManager.getMetamodel();
MappingMetamodel mappingMetamodel = metamodel.getMappingMetamodel();
EntityPersister entityDescriptor = mappingMetamodel.getEntityDescriptor( Dog.class );
String table = entityDescriptor.getTableNameForColumn( "name" );
assertEquals( "TANIMAL", table );
}
);
}
@Entity
@Inheritance(strategy = JOINED)
@Table(name = "TANIMAL")
public static
|
JoinedSubclassEntityPersisterTest
|
java
|
quarkusio__quarkus
|
extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/UserRouteRegistrationTest.java
|
{
"start": 1990,
"end": 2294
}
|
class ____ {
public void register(@Observes io.vertx.mutiny.ext.web.Router router) {
router.route("/observes-mutiny").handler(rc -> rc.response().endAndForget("observers mutiny - ok"));
}
}
@ApplicationScoped
static
|
BeanRegisteringRouteUsingObservesWithMutinyRouter
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-secure-webflux/src/test/java/smoketest/secure/webflux/SampleSecureWebFluxCustomSecurityTests.java
|
{
"start": 2155,
"end": 4209
}
|
class ____ {
@Autowired
private WebTestClient webClient;
@Test
void userDefinedMappingsSecure() {
this.webClient.get()
.uri("/")
.accept(MediaType.APPLICATION_JSON)
.exchange()
.expectStatus()
.isEqualTo(HttpStatus.UNAUTHORIZED);
}
@Test
void healthDoesNotRequireAuthentication() {
this.webClient.get()
.uri("/actuator/health")
.accept(MediaType.APPLICATION_JSON)
.exchange()
.expectStatus()
.isOk();
}
@Test
void actuatorsSecuredByRole() {
this.webClient.get()
.uri("/actuator/env")
.accept(MediaType.APPLICATION_JSON)
.header("Authorization", getBasicAuth())
.exchange()
.expectStatus()
.isForbidden();
}
@Test
void actuatorsAccessibleOnCorrectLogin() {
this.webClient.get()
.uri("/actuator/env")
.accept(MediaType.APPLICATION_JSON)
.header("Authorization", getBasicAuthForAdmin())
.exchange()
.expectStatus()
.isOk();
}
@Test
void actuatorExcludedFromEndpointRequestMatcher() {
this.webClient.get()
.uri("/actuator/mappings")
.accept(MediaType.APPLICATION_JSON)
.header("Authorization", getBasicAuth())
.exchange()
.expectStatus()
.isOk();
}
@Test
void staticResourceShouldBeAccessible() {
this.webClient.get()
.uri("/css/bootstrap.min.css")
.accept(MediaType.APPLICATION_JSON)
.exchange()
.expectStatus()
.isOk();
}
@Test
void actuatorLinksIsSecure() {
this.webClient.get()
.uri("/actuator")
.accept(MediaType.APPLICATION_JSON)
.exchange()
.expectStatus()
.isUnauthorized();
this.webClient.get()
.uri("/actuator")
.accept(MediaType.APPLICATION_JSON)
.header("Authorization", getBasicAuthForAdmin())
.exchange()
.expectStatus()
.isOk();
}
private String getBasicAuth() {
return "Basic " + Base64.getEncoder().encodeToString("user:password".getBytes());
}
private String getBasicAuthForAdmin() {
return "Basic " + Base64.getEncoder().encodeToString("admin:admin".getBytes());
}
@Configuration(proxyBeanMethods = false)
static
|
SampleSecureWebFluxCustomSecurityTests
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TimeoutExtensionTests.java
|
{
"start": 27791,
"end": 28054
}
|
class ____ {
@Test
void testMethod() {
}
@RepeatedTest(1)
void testTemplateMethod() {
}
@TestFactory
Stream<DynamicTest> testFactoryMethod() {
return Stream.of(dynamicTest("dynamicTest", () -> {
}));
}
}
static
|
NonTimeoutExceedingTestCase
|
java
|
apache__camel
|
components/camel-sql/src/test/java/org/apache/camel/component/sql/SqlConsumerDeleteTest.java
|
{
"start": 1512,
"end": 3576
}
|
class ____ extends CamelTestSupport {
private EmbeddedDatabase db;
private JdbcTemplate jdbcTemplate;
@Override
public void doPreSetup() throws Exception {
db = new EmbeddedDatabaseBuilder()
.setName(getClass().getSimpleName())
.setType(EmbeddedDatabaseType.H2)
.addScript("sql/createAndPopulateDatabase.sql").build();
jdbcTemplate = new JdbcTemplate(db);
}
@Override
public void doPostTearDown() throws Exception {
if (db != null) {
db.shutdown();
}
}
@Test
public void testConsume() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(3);
MockEndpoint.assertIsSatisfied(context);
List<Exchange> exchanges = mock.getReceivedExchanges();
assertEquals(3, exchanges.size());
assertEquals(1, exchanges.get(0).getIn().getBody(Map.class).get("ID"));
assertEquals("Camel", exchanges.get(0).getIn().getBody(Map.class).get("PROJECT"));
assertEquals(2, exchanges.get(1).getIn().getBody(Map.class).get("ID"));
assertEquals("AMQ", exchanges.get(1).getIn().getBody(Map.class).get("PROJECT"));
assertEquals(3, exchanges.get(2).getIn().getBody(Map.class).get("ID"));
assertEquals("Linux", exchanges.get(2).getIn().getBody(Map.class).get("PROJECT"));
await("Should have deleted all 3 rows")
.until(() -> jdbcTemplate.queryForObject("select count(*) from projects", Integer.class) == 0);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
getContext().getComponent("sql", SqlComponent.class).setDataSource(db);
from("sql:select * from projects order by id?initialDelay=0&delay=50&consumer.onConsume=delete from projects where id = :#id")
.to("mock:result");
}
};
}
}
|
SqlConsumerDeleteTest
|
java
|
google__guice
|
extensions/servlet/test/com/google/inject/servlet/FilterDispatchIntegrationTest.java
|
{
"start": 11439,
"end": 14236
}
|
class ____ implements Filter {
private final AtomicInteger counter;
private int calledAt = -1;
public CountFilter(AtomicInteger counter) {
this.counter = counter;
}
@Override
public void destroy() {}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws ServletException, IOException {
if (calledAt != -1) {
fail("not expecting to be called twice");
}
calledAt = counter.getAndIncrement();
chain.doFilter(request, response);
}
@Override
public void init(FilterConfig filterConfig) {}
}
public final void testFilterExceptionPrunesStack() throws Exception {
Injector injector =
Guice.createInjector(
new ServletModule() {
@Override
protected void configureServlets() {
filter("/").through(TestFilter.class);
filter("/nothing").through(TestFilter.class);
filter("/").through(ThrowingFilter.class);
}
});
HttpServletRequest request = newFakeHttpServletRequest();
FilterPipeline pipeline = injector.getInstance(FilterPipeline.class);
pipeline.initPipeline(null);
try {
pipeline.dispatch(request, null, null);
fail("expected exception");
} catch (ServletException ex) {
for (StackTraceElement element : ex.getStackTrace()) {
String className = element.getClassName();
assertTrue(
"was: " + element,
!className.equals(FilterChainInvocation.class.getName())
&& !className.equals(FilterDefinition.class.getName()));
}
}
}
public final void testServletExceptionPrunesStack() throws Exception {
Injector injector =
Guice.createInjector(
new ServletModule() {
@Override
protected void configureServlets() {
filter("/").through(TestFilter.class);
filter("/nothing").through(TestFilter.class);
serve("/").with(ThrowingServlet.class);
}
});
HttpServletRequest request = newFakeHttpServletRequest();
FilterPipeline pipeline = injector.getInstance(FilterPipeline.class);
pipeline.initPipeline(null);
try {
pipeline.dispatch(request, null, null);
fail("expected exception");
} catch (ServletException ex) {
for (StackTraceElement element : ex.getStackTrace()) {
String className = element.getClassName();
assertTrue(
"was: " + element,
!className.equals(FilterChainInvocation.class.getName())
&& !className.equals(FilterDefinition.class.getName()));
}
}
}
@Singleton
private static
|
CountFilter
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/MonoJust.java
|
{
"start": 953,
"end": 1683
}
|
class ____<T>
extends Mono<T>
implements Fuseable.ScalarCallable<T>, Fuseable, SourceProducer<T> {
final T value;
MonoJust(T value) {
this.value = Objects.requireNonNull(value, "value");
}
@Override
public T call() throws Exception {
return value;
}
@Override
public T block(Duration m) {
return value;
}
@Override
public T block() {
return value;
}
@Override
public void subscribe(CoreSubscriber<? super T> actual) {
actual.onSubscribe(Operators.scalarSubscription(actual, value));
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.BUFFERED) return 1;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return SourceProducer.super.scanUnsafe(key);
}
}
|
MonoJust
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/serializer/filters/PropertyPathTest3.java
|
{
"start": 417,
"end": 1395
}
|
class ____ extends TestCase {
/** 测试只输出子集合中的特定属性 */
public void test_path() throws Exception {
Person p1 = new Person();
p1.setId(100);
Person c1 = new Person();
c1.setId(1000);
Person c2 = new Person();
c2.setId(2000);
p1.getChildren().add(c1);
p1.getChildren().add(c2);
//只输出children.id以及根上的id
String s = JSON.toJSONString(p1, new MyPropertyPreFilter(new String[] {"children.id", "id"}));
Assert.assertEquals("{\"children\":[{\"id\":1000},{\"id\":2000}],\"id\":100}", s);
}
/** 测试只输出子字段map中的特定属性 */
public void test_path2() throws Exception {
Person2 p1 = new Person2();
p1.setId(1);
Map<String, String> infoMap = new HashMap<String, String>();
infoMap.put("name", "李三");
infoMap.put("height", "168");
p1.setInfoMap(infoMap);
//只输出infoMap.name
String s = JSON.toJSONString(p1, new MyPropertyPreFilter(new String[] {"infoMap.name"}));
Assert.assertEquals("{\"infoMap\":{\"name\":\"李三\"}}", s);
}
public static
|
PropertyPathTest3
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/spi/Language.java
|
{
"start": 1011,
"end": 2761
}
|
interface ____ {
/**
* Creates a predicate based on <b>only</b> the given string input
*
* @param expression the expression as a string input
* @return the created predicate
*/
Predicate createPredicate(String expression);
/**
* Creates an expression based on <b>only</b> the given string input
*
* @param expression the expression as a string input
* @return the created expression
*/
Expression createExpression(String expression);
/**
* Creates an expression based on the input with properties
* <p>
* This is used for languages that have been configured with custom properties most noticeable for
* xpath/xquery/tokenizer languages that have several options.
*
* @param expression the expression
* @param properties configuration properties (optimized as object array with hardcoded positions for properties)
* @return the created predicate
*/
default Predicate createPredicate(String expression, Object[] properties) {
return createPredicate(expression);
}
/**
* Creates an expression based on the input with properties
* <p>
* This is used for languages that have been configured with custom properties most noticeable for
* xpath/xquery/tokenizer languages that have several options.
*
* @param expression the expression
* @param properties configuration properties (optimized as object array with hardcoded positions for properties)
* @return the created expression
*/
default Expression createExpression(String expression, Object[] properties) {
return createExpression(expression);
}
}
|
Language
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-mcp/src/test/java/org/apache/dubbo/mcp/core/McpServiceExportListenerTest.java
|
{
"start": 3696,
"end": 3796
}
|
class ____ {
public void testMethod() {
// Test method
}
}
}
|
TestService
|
java
|
processing__processing4
|
app/src/processing/app/syntax/JEditTextArea.java
|
{
"start": 2167,
"end": 37119
}
|
class ____ extends JComponent
{
/**
* Adding components with this name to the text area will place
* them left of the horizontal scroll bar. In jEdit, the status
* bar is added this way.
*/
public static String LEFT_OF_SCROLLBAR = "los";
/** The size of the offset between the leftmost padding and the code */
public static final int leftHandGutter = 6;
private final Segment TEST_SEGMENT;
private InputMethodSupport inputMethodSupport;
private final TextAreaDefaults defaults;
private final Brackets bracketHelper = new Brackets();
private FontMetrics cachedPartialPixelWidthFont;
private float partialPixelWidth;
/**
* Creates a new JEditTextArea with the specified settings.
* @param defaults The default settings
*/
public JEditTextArea(TextAreaDefaults defaults, InputHandler inputHandler) {
this.defaults = defaults;
char[] testSegmentContents = {'w'};
TEST_SEGMENT = new Segment(testSegmentContents, 0, 1);
// Enable the necessary events
enableEvents(AWTEvent.KEY_EVENT_MASK);
if (!DISABLE_CARET) {
caretTimer = new Timer(500, e -> {
if (hasFocus()) {
blinkCaret();
}
});
caretTimer.setInitialDelay(500);
caretTimer.start();
}
// Initialize some misc. stuff
painter = createPainter(defaults);
documentHandler = new DocumentHandler();
eventListenerList = new EventListenerList();
caretEvent = new MutableCaretEvent();
lineSegment = new Segment();
bracketLine = bracketPosition = -1;
blink = true;
cachedPartialPixelWidthFont = null;
partialPixelWidth = 0;
// Initialize the GUI
/*
setLayout(new ScrollLayout());
add(CENTER, painter);
add(RIGHT, vertical = new JScrollBar(Adjustable.VERTICAL));
add(BOTTOM, horizontal = new JScrollBar(Adjustable.HORIZONTAL));
*/
setLayout(new BorderLayout());
add(painter, BorderLayout.CENTER);
add(vertical = new JScrollBar(Adjustable.VERTICAL), BorderLayout.EAST);
add(horizontal = new JScrollBar(Adjustable.HORIZONTAL), BorderLayout.SOUTH);
// what a dreadful hack to get the scrollbar to align
horizontal.setBorder(new EmptyBorder(0, 0, 0, vertical.getPreferredSize().width));
/*
// this fixes the glitch at the lower-right of the scrollbars,
// but results in the scrolling area behaving very oddly,
// presumably due to quirks in this very old JEditSyntax code.
JScrollPane pane = new JScrollPane(painter);
pane.setBorder(BorderFactory.createEmptyBorder());
horizontal = pane.getHorizontalScrollBar();
vertical = pane.getVerticalScrollBar();
setLayout(new BorderLayout());
add(pane, BorderLayout.CENTER);
*/
// Add some event listeners
vertical.addAdjustmentListener(new AdjustHandler());
horizontal.addAdjustmentListener(new AdjustHandler());
painter.addComponentListener(new ComponentHandler());
painter.addMouseListener(new MouseHandler());
painter.addMouseMotionListener(new DragHandler());
addFocusListener(new FocusHandler());
// send tab keys through to the text area
// https://download.processing.org/bugzilla/1267.html
setFocusTraversalKeysEnabled(false);
// Load the defaults
setInputHandler(inputHandler);
setDocument(defaults.document);
// editable = defaults.editable;
caretVisible = defaults.caretVisible;
caretBlinks = defaults.caretBlinks;
electricScroll = defaults.electricScroll;
// We don't seem to get the initial focus event?
// focusedComponent = this;
addMouseWheelListener(e -> {
if (scrollBarsInitialized) {
if (e.getScrollType() == MouseWheelEvent.WHEEL_UNIT_SCROLL) {
int scrollAmount = e.getUnitsToScroll();
// System.out.println("rot/amt = " + e.getWheelRotation() + " " + amt);
// int max = vertical.getMaximum();
// System.out.println("UNIT SCROLL of " + amt + " at value " + vertical.getValue() + " and max " + max);
// System.out.println(" get wheel rotation is " + e.getWheelRotation());
// int ex = e.getModifiersEx();
// String mods = InputEvent.getModifiersExText(ex);
// if (ex != 0) {
// System.out.println(" 3 2 1 0");
// System.out.println(" 10987654321098765432109876543210");
// System.out.println(" " + PApplet.binary(e.getModifiersEx()));
//// if (mods.length() > 0) {
// System.out.println(" mods extext = " + mods + " " + mods.length() + " " + PApplet.hex(mods.charAt(0)));
// }
// System.out.println(" " + e);
// inertia scrolling on OS X will fire several shift-wheel events
// that are negative values.. this makes the scrolling area jump.
boolean isHorizontal = Platform.isMacOS() && e.isShiftDown();
if (isHorizontal) {
horizontal.setValue(horizontal.getValue() + scrollAmount);
}else{
vertical.setValue(vertical.getValue() + scrollAmount);
}
}
}
});
}
public void updateTheme() {
// This default version will update the fonts and not much else.
// It's expected to always be overridden by the PdeTextArea version,
// but it's here if a Mode author *really* must avoid PdeTextArea.
painter.updateTheme();
repaint();
}
/**
* Override this to provide your own painter for this {@link JEditTextArea}.
* @return a newly constructed {@link TextAreaPainter}.
*/
protected TextAreaPainter createPainter(final TextAreaDefaults defaults) {
return new TextAreaPainter(this, defaults);
}
/**
* Inline Input Method Support for Japanese.
*/
public InputMethodRequests getInputMethodRequests() {
if (Preferences.getBoolean("editor.input_method_support")) {
if (inputMethodSupport == null) {
inputMethodSupport = new InputMethodSupport(this);
}
return inputMethodSupport;
}
return null;
}
/**
* Get current position of the vertical scroll bar. [fry]
* @deprecated Use {@link #getVerticalScrollPosition()}.
*/
public int getScrollPosition() {
return getVerticalScrollPosition();
}
/**
* Set position of the vertical scroll bar. [fry]
* @deprecated Use {@link #setVerticalScrollPosition(int)}.
*/
public void setScrollPosition(int what) {
setVerticalScrollPosition(what);
}
/**
* Get current position of the vertical scroll bar.
*/
public int getVerticalScrollPosition() {
return vertical.getValue();
}
/**
* Set position of the vertical scroll bar.
*/
public void setVerticalScrollPosition(int what) {
vertical.setValue(what);
}
/**
* Get current position of the horizontal scroll bar.
*/
public int getHorizontalScrollPosition() {
return horizontal.getValue();
}
/**
* Set position of the horizontal scroll bar.
*/
public void setHorizontalScrollPosition(int what) {
horizontal.setValue(what);
}
/**
* Returns the object responsible for painting this text area.
*/
public final TextAreaPainter getPainter() {
return painter;
}
public TextAreaDefaults getDefaults() {
return defaults;
}
/**
* Returns the input handler.
*/
public final InputHandler getInputHandler() {
return inputHandler;
}
/**
* Sets the input handler.
* @param inputHandler The new input handler
*/
public void setInputHandler(InputHandler inputHandler) {
this.inputHandler = inputHandler;
}
/**
* Returns true if the caret is blinking, false otherwise.
*/
public final boolean isCaretBlinkEnabled() {
return caretBlinks;
}
/**
* Toggles caret blinking.
* @param caretBlinks True if the caret should blink, false otherwise
*/
public void setCaretBlinkEnabled(boolean caretBlinks) {
this.caretBlinks = caretBlinks;
if (!caretBlinks) {
blink = false;
}
painter.invalidateSelectedLines();
}
/**
* Returns true if the caret is visible, false otherwise.
*/
public final boolean isCaretVisible() {
return (!caretBlinks || blink) && caretVisible;
}
/**
* Sets if the caret should be visible.
* @param caretVisible True if the caret should be visible, false
* otherwise
*/
public void setCaretVisible(boolean caretVisible) {
this.caretVisible = caretVisible;
blink = true;
painter.invalidateSelectedLines();
}
/**
* Blinks the caret.
*/
public final void blinkCaret() {
if (caretBlinks) {
blink = !blink;
painter.invalidateSelectedLines();
} else {
blink = true;
}
}
/**
* Returns the number of lines from the top and button of the
* text area that are always visible.
*/
public final int getElectricScroll() {
return electricScroll;
}
/**
* Sets the number of lines from the top and bottom of the text
* area that are always visible
* @param electricScroll The number of lines always visible from
* the top or bottom
*/
public final void setElectricScroll(int electricScroll) {
this.electricScroll = electricScroll;
}
/**
* Updates the state of the scroll bars. This should be called
* if the number of lines in the document changes, or when the
* size of the text area changes.
*/
public void updateScrollBars() {
if (vertical != null && visibleLines != 0) {
vertical.setValues(firstLine,visibleLines,0,getLineCount());
vertical.setVisible(visibleLines < getLineCount());
vertical.setUnitIncrement(2);
vertical.setBlockIncrement(visibleLines);
}
//if (horizontal != null && width != 0) {
if ((horizontal != null) && (painter.getWidth() != 0)) {
//int value = horizontal.getValue();
//System.out.println("updateScrollBars");
//int width = painter.getWidth();
int lineCount = getLineCount();
int maxLineLength = 0;
for (int i = 0; i < lineCount; i++) {
int lineLength = getLineLength(i);
if (lineLength > maxLineLength) {
maxLineLength = lineLength;
}
}
int charWidth = painter.getFontMetrics().charWidth('w');
int width = maxLineLength * charWidth;
int painterWidth = painter.getScrollWidth();
// Update to how horizontal scrolling is handled
// https://github.com/processing/processing/issues/319
// https://github.com/processing/processing/issues/355
//setValues(int newValue, int newExtent, int newMin, int newMax)
horizontal.setVisible(painterWidth < width);
if (horizontalOffset < 0) {
horizontal.setValues(-horizontalOffset, painterWidth, -leftHandGutter, width);
} else {
horizontal.setValues(-leftHandGutter, painterWidth, -leftHandGutter, width);
}
//horizontal.setUnitIncrement(painter.getFontMetrics().charWidth('w'));
horizontal.setUnitIncrement(charWidth);
horizontal.setBlockIncrement(width / 2);
}
}
/**
* Returns the line displayed at the text area's origin.
*/
public final int getFirstLine() {
return firstLine;
}
/**
* Sets the line displayed at the text area's origin without
* updating the scroll bars.
*/
public void setFirstLine(int firstLine) {
if(firstLine < 0 || firstLine > getLineCount()) {
throw new IllegalArgumentException("First line out of range: "
+ firstLine + " [0, " + getLineCount() + "]");
}
if (firstLine == this.firstLine) return;
this.firstLine = firstLine;
if (firstLine != vertical.getValue()) {
updateScrollBars();
}
painter.repaint();
}
/**
* Convenience for checking what's on-screen. [fry]
*/
public final int getLastLine() {
return getFirstLine() + getVisibleLines();
}
/**
* Returns the number of lines visible in this text area.
*/
public final int getVisibleLines() {
return visibleLines;
}
/**
* Recalculates the number of visible lines. This should not
* be called directly.
*/
public final void recalculateVisibleLines() {
if (painter == null) return;
int height = painter.getHeight();
int lineHeight = painter.getFontMetrics().getHeight();
visibleLines = height / lineHeight;
updateScrollBars();
}
/**
* Returns the horizontal offset of drawn lines.
*/
public final int getHorizontalOffset() {
return horizontalOffset;
}
/**
* Sets the horizontal offset of drawn lines. This can be used to
* implement horizontal scrolling.
* @param horizontalOffset offset The new horizontal offset
*/
public void setHorizontalOffset(int horizontalOffset) {
if (horizontalOffset == this.horizontalOffset) {
return;
}
this.horizontalOffset = horizontalOffset;
if (horizontalOffset != horizontal.getValue()) {
updateScrollBars();
}
painter.repaint();
}
/**
* A fast way of changing both the first line and horizontal
* offset.
* @param firstLine The new first line
* @param horizontalOffset The new horizontal offset
* @return True if any of the values were changed, false otherwise
*/
public boolean setOrigin(int firstLine, int horizontalOffset) {
boolean changed = false;
if (horizontalOffset != this.horizontalOffset) {
this.horizontalOffset = horizontalOffset;
changed = true;
}
if (firstLine != this.firstLine) {
this.firstLine = firstLine;
changed = true;
}
if (changed) {
updateScrollBars();
painter.repaint();
}
return changed;
}
/**
* Ensures that the caret is visible by scrolling the text area if
* necessary.
* @return True if scrolling was actually performed, false if the
* caret was already visible
*/
public boolean scrollToCaret() {
int line = getCaretLine();
int lineStart = getLineStartOffset(line);
int offset = Math.max(0,Math.min(getLineLength(line) - 1,
getCaretPosition() - lineStart));
return scrollTo(line,offset);
}
/**
* Ensures that the specified line and offset is visible by scrolling
* the text area if necessary.
* @param line The line to scroll to
* @param offset The offset in the line to scroll to
* @return True if scrolling was actually performed, false if the
* line and offset was already visible
*/
public boolean scrollTo(int line, int offset) {
// visibleLines == 0 before the component is realized
// we can't do any proper scrolling then, so we have
// this hack...
if (visibleLines == 0) {
setFirstLine(Math.max(0,line - electricScroll));
return true;
}
int newFirstLine = firstLine;
int newHorizontalOffset = horizontalOffset;
if(line < firstLine + electricScroll) {
newFirstLine = Math.max(0,line - electricScroll);
} else if(line + electricScroll >= firstLine + visibleLines) {
newFirstLine = (line - visibleLines) + electricScroll + 1;
if(newFirstLine + visibleLines >= getLineCount())
newFirstLine = getLineCount() - visibleLines;
if(newFirstLine < 0)
newFirstLine = 0;
}
int x = _offsetToX(line,offset);
int width = painter.getFontMetrics().charWidth('w');
if(x < 0) {
newHorizontalOffset = Math.max(0,horizontalOffset - x + width + 5);
} else if(x + width >= painter.getWidth()) {
newHorizontalOffset = horizontalOffset +
(painter.getWidth() - x) - width - 5;
}
return setOrigin(newFirstLine,newHorizontalOffset);
}
/**
* Converts a line index to a y co-ordinate.
* @param line The line
*/
public int lineToY(int line) {
FontMetrics fm = painter.getFontMetrics();
return (line - firstLine) * fm.getHeight()
- (fm.getLeading() + fm.getMaxDescent());
}
/**
* Converts a y co-ordinate to a line index.
* @param y The y co-ordinate
*/
public int yToLine(int y) {
FontMetrics fm = painter.getFontMetrics();
int height = fm.getHeight();
return Math.max(0, Math.min(getLineCount() - 1, y / height + firstLine));
}
/**
* Converts an offset in a line into an x co-ordinate. This is a
* slow version that can be used any time.
* @param line The line
* @param offset The offset, from the start of the line
*/
public final int offsetToX(int line, int offset) {
// don't use cached tokens
painter.currentLineTokens = null;
return _offsetToX(line,offset);
}
/**
* Converts an offset in a line into an x coordinate. This is a
* fast version that should only be used if no changes were made
* to the text since the last repaint.
* @param line The line
* @param offset The offset, from the start of the line
*/
public int _offsetToX(int line, int offset) {
TokenMarkerState tokenMarker = getTokenMarker();
FontMetrics fm = painter.getFontMetrics();
getLineText(line, lineSegment);
int segmentOffset = lineSegment.offset;
int x = horizontalOffset;
// If syntax coloring is disabled, do simple translation
if (tokenMarker == null) {
lineSegment.count = offset;
return x + getTabbedTextWidth(lineSegment, fm, x, painter, 0);
} else {
// If syntax coloring is enabled, we have to do this
// because tokens can vary in width
Token tokens;
if (painter.currentLineIndex == line && painter.currentLineTokens != null) {
tokens = painter.currentLineTokens;
} else {
painter.currentLineIndex = line;
tokens = painter.currentLineTokens = tokenMarker.markTokens(lineSegment, line);
}
// Font defaultFont = painter.getFont();
SyntaxStyle[] styles = painter.getStyles();
for (;;) {
byte id = tokens.id;
if (id == Token.END) {
return x;
}
if (id == Token.NULL) {
fm = painter.getFontMetrics();
} else {
//fm = styles[id].getFontMetrics(defaultFont, this);
fm = painter.getFontMetrics(styles[id]);
}
int length = tokens.length;
if (offset + segmentOffset < lineSegment.offset + length) {
lineSegment.count = offset - (lineSegment.offset - segmentOffset);
return x + getTabbedTextWidth(lineSegment, fm, x, painter, 0);
} else {
lineSegment.count = length;
x += getTabbedTextWidth(lineSegment, fm, x, painter, 0);
lineSegment.offset += length;
}
tokens = tokens.next;
}
}
}
/**
* Converts an x co-ordinate to an offset within a line.
* @param line The line
* @param x The x co-ordinate
*/
public int xToOffset(int line, int x) {
TokenMarkerState tokenMarker = getTokenMarker();
/* Use painter's cached info for speed */
FontMetrics fm = painter.getFontMetrics();
// System.out.println("metrics: " + fm);
getLineText(line,lineSegment);
char[] segmentArray = lineSegment.array;
int segmentOffset = lineSegment.offset;
int segmentCount = lineSegment.count;
int width = horizontalOffset;
if(tokenMarker == null)
{
for(int i = 0; i < segmentCount; i++)
{
char c = segmentArray[i + segmentOffset];
int charWidth;
if(c == '\t')
charWidth = (int)painter.nextTabStop(width,i)
- width;
else
charWidth = fm.charWidth(c);
if(painter.isBlockCaretEnabled())
{
if(x - charWidth <= width)
return i;
}
else
{
if(x - charWidth / 2 <= width)
return i;
}
width += charWidth;
}
return segmentCount;
} else {
Token tokens;
if (painter.currentLineIndex == line &&
painter.currentLineTokens != null) {
tokens = painter.currentLineTokens;
} else {
painter.currentLineIndex = line;
tokens = painter.currentLineTokens = tokenMarker.markTokens(lineSegment,line);
}
int offset = 0;
// Font defaultFont = painter.getFont();
SyntaxStyle[] styles = painter.getStyles();
// System.out.println("painter is " + painter + ", doc is " + document);
for (;;) {
byte id = tokens.id;
if(id == Token.END)
return offset;
if (id == Token.NULL) {
fm = painter.getFontMetrics();
} else {
//fm = styles[id].getFontMetrics(defaultFont, this);
fm = painter.getFontMetrics(styles[id]);
}
int length = tokens.length;
for (int i = 0; i < length; i++) {
// System.out.println("segmentOffset = " + segmentOffset +
// ", offset = " + offset +
// ", i = " + i +
// ", length = " + length +
// ", array len = " + segmentArray.length);
if (segmentOffset + offset + i >= segmentArray.length) {
return segmentArray.length - segmentOffset - 1;
}
char c = segmentArray[segmentOffset + offset + i];
int charWidth;
if (c == '\t') {
charWidth = (int)painter.nextTabStop(width,offset + i) - width;
} else {
charWidth = fm.charWidth(c);
}
if (painter.isBlockCaretEnabled()) {
if (x - charWidth <= width) {
return offset + i;
}
} else {
if (x - charWidth / 2 <= width) {
return offset + i;
}
}
width += charWidth;
}
offset += length;
tokens = tokens.next;
}
}
}
/**
* Converts a point to an offset, from the start of the text.
* @param x The x co-ordinate of the point
* @param y The y co-ordinate of the point
*/
public int xyToOffset(int x, int y) {
int line = yToLine(y);
int start = getLineStartOffset(line);
return start + xToOffset(line,x);
}
/**
* Returns the document this text area is editing.
*/
public final SyntaxDocument getDocument() {
return document;
}
/**
* Sets the document this text area is editing.
* @param document The document
*/
public void setDocument(SyntaxDocument document) {
if (this.document == document)
return;
if (this.document != null)
this.document.removeDocumentListener(documentHandler);
this.document = document;
document.addDocumentListener(documentHandler);
bracketHelper.invalidate();
select(0, 0);
updateScrollBars();
painter.repaint();
}
/**
* Set document with a twist, includes the old caret
* and scroll positions, added for p5. [fry]
*/
public void setDocument(SyntaxDocument document,
int start, int stop, int scroll) {
if (this.document == document)
return;
if (this.document != null)
this.document.removeDocumentListener(documentHandler);
this.document = document;
document.addDocumentListener(documentHandler);
bracketHelper.invalidate();
select(start, stop);
updateScrollBars();
setVerticalScrollPosition(scroll);
painter.repaint();
}
/**
* Returns the document's token marker. Equivalent to calling
* <code>getDocument().getTokenMarker()</code>.
*/
public final TokenMarkerState getTokenMarker() {
return document.getTokenMarker();
}
/**
* Sets the document's token marker. Equivalent to caling
* <code>getDocument().setTokenMarker()</code>.
* @param tokenMarker The token marker
*/
public final void setTokenMarker(TokenMarker tokenMarker) {
document.setTokenMarker(tokenMarker);
}
/**
* Returns the length of the document. Equivalent to calling
* <code>getDocument().getLength()</code>.
*/
public final int getDocumentLength() {
return document.getLength();
}
/**
* Returns the number of lines in the document.
*/
public final int getLineCount() {
return document.getDefaultRootElement().getElementCount();
}
/**
* Returns the line containing the specified offset.
* @param offset The offset
*/
public final int getLineOfOffset(int offset) {
return document.getDefaultRootElement().getElementIndex(offset);
}
/**
* Returns the start offset of the specified line.
* @param line The line
* @return The start offset of the specified line, or -1 if the line is
* invalid
*/
public int getLineStartOffset(int line) {
Element lineElement = document.getDefaultRootElement().getElement(line);
return (lineElement == null) ? -1 : lineElement.getStartOffset();
}
public int getLineStartNonWhiteSpaceOffset(int line) {
int offset = getLineStartOffset(line);
int length = getLineLength(line);
String str = getText(offset, length);
if (str != null) {
for (int i = 0; i < str.length(); i++) {
if (!Character.isWhitespace(str.charAt(i))) {
return offset + i;
}
}
}
return offset + length;
}
/**
* Returns the end offset of the specified line.
* @param line The line
* @return The end offset of the specified line, or -1 if the line is
* invalid.
*/
public int getLineStopOffset(int line) {
Element lineElement = document.getDefaultRootElement().getElement(line);
return (lineElement == null) ? -1 : lineElement.getEndOffset();
}
public int getLineStopNonWhiteSpaceOffset(int line) {
int offset = getLineStopOffset(line);
int length = getLineLength(line);
String str = getText(offset - length - 1, length);
if (str != null) {
for (int i = 0; i < length; i++) {
if (!Character.isWhitespace(str.charAt(length - i - 1))) {
return offset - i;
}
}
}
return offset - length;
}
/**
* Returns the start offset of the line after this line, or the end of
* this line if there is no next line.
* @param line The line
* @return The end offset of the specified line, or -1 if the line is
* invalid.
*/
public int getLineSelectionStopOffset(int line) {
Element lineElement = document.getDefaultRootElement().getElement(line);
return (lineElement == null) ? -1 :
Math.min(lineElement.getEndOffset(), getDocumentLength());
}
/**
* Returns the length of the specified line.
* @param line The line
*/
public int getLineLength(int line) {
Element lineElement = document.getDefaultRootElement().getElement(line);
return (lineElement == null) ? -1 :
lineElement.getEndOffset() - lineElement.getStartOffset() - 1;
}
/**
* Returns the entire text of this text area.
*/
public String getText() {
try {
return document.getText(0,document.getLength());
} catch(BadLocationException bl) {
bl.printStackTrace();
return null;
}
}
/**
* Sets the entire text of this text area.
*/
public void setText(String text) {
try {
document.beginCompoundEdit();
document.remove(0,document.getLength());
document.insertString(0,text,null);
} catch (BadLocationException bl) {
bl.printStackTrace();
} finally {
document.endCompoundEdit();
}
}
/**
* Returns the specified substring of the document.
* @param start The start offset
* @param len The length of the substring
* @return The substring, or null if the offsets are invalid
*/
public final String getText(int start, int len) {
try {
return document.getText(start,len);
} catch(BadLocationException bl) {
bl.printStackTrace();
return null;
}
}
/**
* Copies the specified substring of the document into a segment.
* If the offsets are invalid, the segment will contain a null string.
* @param start The start offset
* @param len The length of the substring
* @param segment The segment
*/
public final void getText(int start, int len, Segment segment) {
try {
document.getText(start,len,segment);
} catch (BadLocationException bl) {
bl.printStackTrace();
System.err.format("Bad Location: %d for start %d and length %d",
bl.offsetRequested(), start, len);
segment.offset = segment.count = 0;
}
}
/**
* Returns the text on the specified line.
* @param lineIndex The line
* @return The text, or null if the line is invalid
*/
public final String getLineText(int lineIndex) {
int start = getLineStartOffset(lineIndex);
return getText(start,getLineStopOffset(lineIndex) - start - 1);
}
/**
* Copies the text on the specified line into a segment. If the line
* is invalid, the segment will contain a null string.
* @param lineIndex The line
*/
public final void getLineText(int lineIndex, Segment segment) {
int start = getLineStartOffset(lineIndex);
getText(start,getLineStopOffset(lineIndex) - start - 1,segment);
}
/**
* Returns the selection start offset.
*/
public final int getSelectionStart() {
return selectionStart;
}
/**
* Returns the offset where the selection starts on the specified
* line.
*/
public int getSelectionStart(int line)
{
if (line == selectionStartLine)
return selectionStart;
else
return getLineStartOffset(line);
}
/**
* Returns the selection start line.
*/
public final int getSelectionStartLine()
{
return selectionStartLine;
}
/**
* Sets the selection start. The new selection will be the new
* selection start and the old selection end.
* @param selectionStart The selection start
* @see #select(int,int)
*/
public final void setSelectionStart(int selectionStart)
{
select(selectionStart,selectionEnd);
}
/**
* Returns the selection end offset.
*/
public final int getSelectionStop()
{
return selectionEnd;
}
/**
* Returns the offset where the selection ends on the specified
* line.
*/
public int getSelectionStop(int line)
{
if (line == selectionEndLine)
return selectionEnd;
else
return getLineStopOffset(line) - 1;
}
/**
* Returns the selection end line.
*/
public final int getSelectionStopLine()
{
return selectionEndLine;
}
/**
* Sets the selection end. The new selection will be the old
* selection start and the bew selection end.
* @param selectionEnd The selection end
* @see #select(int,int)
*/
public final void setSelectionEnd(int selectionEnd)
{
select(selectionStart,selectionEnd);
}
public final boolean isSelectionActive()
{
return(selectionStart != selectionEnd);
}
/**
* Returns the caret position. This will either be the selection
* start or the selection end, depending on which direction the
* selection was made in.
*/
public final int getCaretPosition()
{
return (biasLeft ? selectionStart : selectionEnd);
}
/**
* Returns the caret line.
*/
public final int getCaretLine()
{
return (biasLeft ? selectionStartLine : selectionEndLine);
}
/**
* Returns the mark position. This will be the opposite selection
* bound to the caret position.
* @see #getCaretPosition()
*/
public final int getMarkPosition()
{
return (biasLeft ? selectionEnd : selectionStart);
}
/**
* Returns the mark line.
*/
public final int getMarkLine()
{
return (biasLeft ? selectionEndLine : selectionStartLine);
}
/**
* Sets the caret position. The new selection will consist of the
* caret position only (hence no text will be selected)
* @param caret The caret position
* @see #select(int,int)
*/
public final void setCaretPosition(int caret)
{
select(caret,caret);
}
/**
* Selects all text in the document.
*/
public final void selectAll()
{
select(0,getDocumentLength());
}
/**
* Selects all text in the given line.
* @param line The line number to select all text in it.
*/
private void selectLine(final int line) {
selectLine = true;
final int lineStart = getLineStartOffset(line);
final int lineEnd = getLineSelectionStopOffset(line);
select(lineStart, lineEnd);
selectionAncorStart = selectionStart;
selectionAncorEnd = selectionEnd;
}
/**
* Moves the mark to the caret position.
*/
public final void selectNone()
{
select(getCaretPosition(),getCaretPosition());
}
/**
* Selects from the start offset to the end offset. This is the
* general selection method used by all other selecting methods.
* The caret position will be start if start < end, and end
* if end > start.
* @param start The start offset
* @param end The end offset
*/
public void select(int start, int end)
{
int newStart, newEnd;
boolean newBias;
if(start <= end)
{
newStart = start;
newEnd = end;
newBias = false;
}
else
{
newStart = end;
newEnd = start;
newBias = true;
}
if((newStart < 0 || newEnd > getDocumentLength()) && start != end)
{
throw new IllegalArgumentException("Bounds out of"
+ " range: " + newStart + "," +
newEnd + " [" + getDocumentLength() + "]");
}
// If the new position is the same as the old, we don't
// do all this crap, however we still do the stuff at
// the end (clearing magic position, scrolling)
if(newStart != selectionStart || newEnd != selectionEnd
|| newBias != biasLeft)
{
int newStartLine = getLineOfOffset(newStart);
int newEndLine = getLineOfOffset(newEnd);
if(painter.isBracketHighlightEnabled())
{
if(bracketLine != -1)
painter.invalidateLine(bracketLine);
updateBracketHighlight(end);
if(bracketLine != -1)
painter.invalidateLine(bracketLine);
}
painter.invalidateLineRange(selectionStartLine,selectionEndLine);
painter.invalidateLineRange(newStartLine,newEndLine);
document.addUndoableEdit(new CaretUndo(selectionStart,selectionEnd));
selectionStart = newStart;
selectionEnd = newEnd;
selectionStartLine = newStartLine;
selectionEndLine = newEndLine;
biasLeft = newBias;
fireCaretEvent();
}
// When the user is typing, etc, we don't want the caret to blink
blink = true;
if (!DISABLE_CARET) {
caretTimer.restart();
}
// Clear the `magic' caret position used by up/down
magicCaret = -1;
scrollToCaret();
// // notify the line number feller
// if (editorLineStatus != null) {
// editorLineStatus.set(selectionStartLine, selectionEndLine);
// //System.out.println("why " + selectionStartLine + " " + selectionEndLine);
// //System.out.println(getLineOfOffset(start) + " " +
// // getLineOfOffset(end));
// }
}
private
|
JEditTextArea
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumReader.java
|
{
"start": 17636,
"end": 18247
}
|
class ____ {
private final Schema schema;
private final int hashcode;
public IdentitySchemaKey(Schema schema) {
this.schema = schema;
this.hashcode = System.identityHashCode(schema);
}
@Override
public int hashCode() {
return this.hashcode;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof GenericDatumReader.IdentitySchemaKey)) {
return false;
}
IdentitySchemaKey key = (IdentitySchemaKey) obj;
return this == key || this.schema == key.schema;
}
}
// VisibleForTesting
static
|
IdentitySchemaKey
|
java
|
spring-projects__spring-boot
|
module/spring-boot-r2dbc/src/main/java/org/springframework/boot/r2dbc/testcontainers/SqlServerR2dbcContainerConnectionDetailsFactory.java
|
{
"start": 1483,
"end": 2083
}
|
class ____
extends ContainerConnectionDetailsFactory<MSSQLServerContainer, R2dbcConnectionDetails> {
SqlServerR2dbcContainerConnectionDetailsFactory() {
super(ANY_CONNECTION_NAME, "io.r2dbc.spi.ConnectionFactoryOptions");
}
@Override
public R2dbcConnectionDetails getContainerConnectionDetails(
ContainerConnectionSource<MSSQLServerContainer> source) {
return new MsSqlServerR2dbcDatabaseContainerConnectionDetails(source);
}
/**
* {@link R2dbcConnectionDetails} backed by a {@link ContainerConnectionSource}.
*/
private static final
|
SqlServerR2dbcContainerConnectionDetailsFactory
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/source/internal/hbm/FetchCharacteristicsPluralAttributeImpl.java
|
{
"start": 609,
"end": 1605
}
|
class ____ implements FetchCharacteristicsPluralAttribute {
private final FetchTiming fetchTiming;
private final FetchStyle fetchStyle;
private final Integer batchSize;
private final boolean extraLazy;
public FetchCharacteristicsPluralAttributeImpl(
FetchTiming fetchTiming,
FetchStyle fetchStyle,
Integer batchSize,
boolean extraLazy) {
this.fetchTiming = fetchTiming;
this.fetchStyle = fetchStyle;
this.batchSize = batchSize;
this.extraLazy = extraLazy;
}
@Override
public FetchTiming getFetchTiming() {
return fetchTiming;
}
@Override
public FetchStyle getFetchStyle() {
return fetchStyle;
}
@Override
public Integer getBatchSize() {
return batchSize;
}
@Override
public boolean isExtraLazy() {
return getFetchTiming() == FetchTiming.DELAYED && extraLazy;
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Builder
public static
|
FetchCharacteristicsPluralAttributeImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-community-dialects/src/test/java/org/hibernate/community/dialect/unit/sequence/DerbyTenSixDialectSequenceInformationExtractorTest.java
|
{
"start": 673,
"end": 1305
}
|
class ____ extends AbstractSequenceInformationExtractorTest {
@Override
public Dialect getDialect() {
return new DerbyLegacyDialect( DatabaseVersion.make( 10, 6 ) );
}
@Override
public String expectedQuerySequencesString() {
return "select sys.sysschemas.schemaname as sequence_schema,sys.syssequences.* from sys.syssequences left join sys.sysschemas on sys.syssequences.schemaid=sys.sysschemas.schemaid";
}
@Override
public Class<? extends SequenceInformationExtractor> expectedSequenceInformationExtractor() {
return SequenceInformationExtractorLegacyImpl.class;
}
}
|
DerbyTenSixDialectSequenceInformationExtractorTest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.