language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/reflect/AvroMeta.java | {
"start": 1274,
"end": 1423
} | interface ____ {
String key();
String value();
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.TYPE, ElementType.FIELD })
@ | AvroMeta |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/invoker/wrapper/InvocationWrapperTest.java | {
"start": 648,
"end": 1426
} | class ____ {
@RegisterExtension
public ArcTestContainer container = ArcTestContainer.builder()
.beanClasses(MyService.class)
.beanRegistrars(new InvokerHelperRegistrar(MyService.class, (bean, factory, invokers) -> {
MethodInfo hello = bean.getImplClazz().firstMethod("hello");
MethodInfo doSomething = bean.getImplClazz().firstMethod("doSomething");
for (MethodInfo method : List.of(hello, doSomething)) {
invokers.put(method.name(), factory.createInvoker(bean, method)
.withInvocationWrapper(InvocationWrapper.class, "wrap")
.build());
}
}))
.build();
static | InvocationWrapperTest |
java | quarkusio__quarkus | extensions/devui/runtime/src/main/java/io/quarkus/devui/runtime/DevUIBuildTimeStaticHandler.java | {
"start": 3170,
"end": 3545
} | interface ____ {
public static final String HTML = "text/html; charset=utf-8";
public static final String JS = "text/javascript; charset=utf-8";
public static final String JSON = "application/json";
public static final String CSS = "text/css; charset=utf-8";
public static final String PLAIN = "text/plain; charset=utf-8";
}
}
| MimeType |
java | apache__camel | components/camel-cm-sms/src/main/java/org/apache/camel/component/cm/exceptions/CMDirectException.java | {
"start": 861,
"end": 1484
} | class ____ extends RuntimeException {
public CMDirectException() {
}
public CMDirectException(final String message) {
super(message);
}
public CMDirectException(final Throwable cause) {
super(cause);
}
public CMDirectException(final String message, final Throwable cause) {
super(message, cause);
}
public CMDirectException(final String message, final Throwable cause,
final boolean enableSuppression, final boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
| CMDirectException |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/error/ShouldBeAfterYear.java | {
"start": 983,
"end": 2184
} | class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldBeAfterYear}</code>.
* @param actual the actual value in the failed assertion.
* @param year the year to compare the actual date's year to.
* @param comparisonStrategy the {@link ComparisonStrategy} used.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldBeAfterYear(Date actual, int year, ComparisonStrategy comparisonStrategy) {
return new ShouldBeAfterYear(actual, year, comparisonStrategy);
}
/**
* Creates a new <code>{@link ShouldBeAfterYear}</code>.
* @param actual the actual value in the failed assertion.
* @param year the year to compare the actual date's year to.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldBeAfterYear(Date actual, int year) {
return new ShouldBeAfterYear(actual, year, StandardComparisonStrategy.instance());
}
private ShouldBeAfterYear(Date actual, int year, ComparisonStrategy comparisonStrategy) {
super("%nExpecting year of:%n %s%nto be strictly after year:%n %s%n%s", actual, year, comparisonStrategy);
}
}
| ShouldBeAfterYear |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java | {
"start": 2729,
"end": 6461
} | class ____ extends JobTaskRequest<Request> {
private final ModelPlotConfig modelPlotConfig;
private final PerPartitionCategorizationConfig perPartitionCategorizationConfig;
private List<JobUpdate.DetectorUpdate> detectorUpdates;
private final MlFilter filter;
private final boolean updateScheduledEvents;
public Request(StreamInput in) throws IOException {
super(in);
modelPlotConfig = in.readOptionalWriteable(ModelPlotConfig::new);
perPartitionCategorizationConfig = in.readOptionalWriteable(PerPartitionCategorizationConfig::new);
if (in.readBoolean()) {
detectorUpdates = in.readCollectionAsList(JobUpdate.DetectorUpdate::new);
}
filter = in.readOptionalWriteable(MlFilter::new);
updateScheduledEvents = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalWriteable(modelPlotConfig);
out.writeOptionalWriteable(perPartitionCategorizationConfig);
boolean hasDetectorUpdates = detectorUpdates != null;
out.writeBoolean(hasDetectorUpdates);
if (hasDetectorUpdates) {
out.writeCollection(detectorUpdates);
}
out.writeOptionalWriteable(filter);
out.writeBoolean(updateScheduledEvents);
}
public Request(
String jobId,
ModelPlotConfig modelPlotConfig,
PerPartitionCategorizationConfig perPartitionCategorizationConfig,
List<JobUpdate.DetectorUpdate> detectorUpdates,
MlFilter filter,
boolean updateScheduledEvents
) {
super(jobId);
this.modelPlotConfig = modelPlotConfig;
this.perPartitionCategorizationConfig = perPartitionCategorizationConfig;
this.detectorUpdates = detectorUpdates;
this.filter = filter;
this.updateScheduledEvents = updateScheduledEvents;
}
public ModelPlotConfig getModelPlotConfig() {
return modelPlotConfig;
}
public PerPartitionCategorizationConfig getPerPartitionCategorizationConfig() {
return perPartitionCategorizationConfig;
}
public List<JobUpdate.DetectorUpdate> getDetectorUpdates() {
return detectorUpdates;
}
public MlFilter getFilter() {
return filter;
}
public boolean isUpdateScheduledEvents() {
return updateScheduledEvents;
}
@Override
public int hashCode() {
return Objects.hash(
getJobId(),
modelPlotConfig,
perPartitionCategorizationConfig,
detectorUpdates,
filter,
updateScheduledEvents
);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Request other = (Request) obj;
return Objects.equals(getJobId(), other.getJobId())
&& Objects.equals(modelPlotConfig, other.modelPlotConfig)
&& Objects.equals(perPartitionCategorizationConfig, other.perPartitionCategorizationConfig)
&& Objects.equals(detectorUpdates, other.detectorUpdates)
&& Objects.equals(filter, other.filter)
&& Objects.equals(updateScheduledEvents, other.updateScheduledEvents);
}
}
}
| Request |
java | apache__camel | core/camel-main/src/test/java/org/apache/camel/main/support/MyDummyComponent.java | {
"start": 1077,
"end": 2305
} | class ____ extends DefaultComponent {
private MyDummyConfiguration configuration;
private final boolean configurer;
private String componentValue;
public MyDummyComponent(boolean configurer) {
this.configurer = configurer;
}
public MyDummyConfiguration getConfiguration() {
return configuration;
}
public void setConfiguration(MyDummyConfiguration configuration) {
this.configuration = configuration;
}
// this method makes camel no able to find a suitable setter
public void setConfiguration(Object configuration) {
this.configuration = (MyDummyConfiguration) configuration;
}
public String getComponentValue() {
return componentValue;
}
public MyDummyComponent setComponentValue(String componentValue) {
this.componentValue = componentValue;
return this;
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) {
throw new UnsupportedOperationException();
}
@Override
public PropertyConfigurer getComponentPropertyConfigurer() {
return configurer ? new MyDummyComponentConfigurer() : null;
}
}
| MyDummyComponent |
java | grpc__grpc-java | xds/src/test/java/io/grpc/xds/GrpcXdsClientImplDataTest.java | {
"start": 8973,
"end": 62570
} | class ____ {
private static final FaultFilter.Provider FAULT_FILTER_PROVIDER = new FaultFilter.Provider();
private static final RbacFilter.Provider RBAC_FILTER_PROVIDER = new RbacFilter.Provider();
private static final RouterFilter.Provider ROUTER_FILTER_PROVIDER = new RouterFilter.Provider();
private static final ServerInfo LRS_SERVER_INFO =
ServerInfo.create("lrs.googleapis.com", InsecureChannelCredentials.create());
private static final String GRPC_EXPERIMENTAL_XDS_AUTHORITY_REWRITE =
"GRPC_EXPERIMENTAL_XDS_AUTHORITY_REWRITE";
private final FilterRegistry filterRegistry = FilterRegistry.getDefaultRegistry();
private boolean originalEnableRouteLookup;
private boolean originalEnableLeastRequest;
private boolean originalEnableUseSystemRootCerts;
@Before
public void setUp() {
originalEnableRouteLookup = XdsRouteConfigureResource.enableRouteLookup;
originalEnableLeastRequest = XdsClusterResource.enableLeastRequest;
originalEnableUseSystemRootCerts = XdsClusterResource.enableSystemRootCerts;
}
@After
public void tearDown() {
XdsRouteConfigureResource.enableRouteLookup = originalEnableRouteLookup;
XdsClusterResource.enableLeastRequest = originalEnableLeastRequest;
XdsClusterResource.enableSystemRootCerts = originalEnableUseSystemRootCerts;
}
@Test
public void parseRoute_withRouteAction() {
io.envoyproxy.envoy.config.route.v3.Route proto =
io.envoyproxy.envoy.config.route.v3.Route.newBuilder()
.setName("route-blade")
.setMatch(
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setPath("/service/method"))
.setRoute(
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo"))
.build();
StructOrError<Route> struct = XdsRouteConfigureResource.parseRoute(
proto, filterRegistry, ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct())
.isEqualTo(
Route.forAction(
RouteMatch.create(PathMatcher.fromPath("/service/method", false),
Collections.<HeaderMatcher>emptyList(), null),
RouteAction.forCluster(
"cluster-foo", Collections.<HashPolicy>emptyList(), null, null, false),
ImmutableMap.<String, FilterConfig>of()));
}
@Test
public void parseRoute_withNonForwardingAction() {
io.envoyproxy.envoy.config.route.v3.Route proto =
io.envoyproxy.envoy.config.route.v3.Route.newBuilder()
.setName("route-blade")
.setMatch(
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setPath("/service/method"))
.setNonForwardingAction(NonForwardingAction.getDefaultInstance())
.build();
StructOrError<Route> struct = XdsRouteConfigureResource.parseRoute(
proto, filterRegistry, ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getStruct())
.isEqualTo(
Route.forNonForwardingAction(
RouteMatch.create(PathMatcher.fromPath("/service/method", false),
Collections.<HeaderMatcher>emptyList(), null),
ImmutableMap.<String, FilterConfig>of()));
}
@Test
public void parseRoute_withUnsupportedActionTypes() {
StructOrError<Route> res;
io.envoyproxy.envoy.config.route.v3.Route redirectRoute =
io.envoyproxy.envoy.config.route.v3.Route.newBuilder()
.setName("route-blade")
.setMatch(io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder().setPath(""))
.setRedirect(RedirectAction.getDefaultInstance())
.build();
res = XdsRouteConfigureResource.parseRoute(
redirectRoute, filterRegistry, ImmutableMap.of(), ImmutableSet.of(),
getXdsResourceTypeArgs(true));
assertThat(res.getStruct()).isNull();
assertThat(res.getErrorDetail())
.isEqualTo("Route [route-blade] with unknown action type: REDIRECT");
io.envoyproxy.envoy.config.route.v3.Route directResponseRoute =
io.envoyproxy.envoy.config.route.v3.Route.newBuilder()
.setName("route-blade")
.setMatch(io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder().setPath(""))
.setDirectResponse(DirectResponseAction.getDefaultInstance())
.build();
res = XdsRouteConfigureResource.parseRoute(
directResponseRoute, filterRegistry, ImmutableMap.of(), ImmutableSet.of(),
getXdsResourceTypeArgs(true));
assertThat(res.getStruct()).isNull();
assertThat(res.getErrorDetail())
.isEqualTo("Route [route-blade] with unknown action type: DIRECT_RESPONSE");
io.envoyproxy.envoy.config.route.v3.Route filterRoute =
io.envoyproxy.envoy.config.route.v3.Route.newBuilder()
.setName("route-blade")
.setMatch(io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder().setPath(""))
.setFilterAction(FilterAction.getDefaultInstance())
.build();
res = XdsRouteConfigureResource.parseRoute(
filterRoute, filterRegistry, ImmutableMap.of(), ImmutableSet.of(),
getXdsResourceTypeArgs(true));
assertThat(res.getStruct()).isNull();
assertThat(res.getErrorDetail())
.isEqualTo("Route [route-blade] with unknown action type: FILTER_ACTION");
}
@Test
public void parseRoute_skipRouteWithUnsupportedMatcher() {
io.envoyproxy.envoy.config.route.v3.Route proto =
io.envoyproxy.envoy.config.route.v3.Route.newBuilder()
.setName("ignore me")
.setMatch(
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setPath("/service/method")
.addQueryParameters(
io.envoyproxy.envoy.config.route.v3.QueryParameterMatcher
.getDefaultInstance())) // query parameter not supported
.setRoute(
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo"))
.build();
assertThat(XdsRouteConfigureResource.parseRoute(
proto, filterRegistry, ImmutableMap.of(), ImmutableSet.of(),
getXdsResourceTypeArgs(true)))
.isNull();
}
@Test
public void parseRoute_skipRouteWithUnsupportedAction() {
io.envoyproxy.envoy.config.route.v3.Route proto =
io.envoyproxy.envoy.config.route.v3.Route.newBuilder()
.setName("ignore me")
.setMatch(
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setPath("/service/method"))
.setRoute(
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setClusterHeader("cluster header")) // cluster_header action not supported
.build();
assertThat(XdsRouteConfigureResource.parseRoute(
proto, filterRegistry, ImmutableMap.of(), ImmutableSet.of(),
getXdsResourceTypeArgs(true)))
.isNull();
}
@Test
@SuppressWarnings("deprecation")
public void parseRouteMatch_withHeaderMatcher() {
io.envoyproxy.envoy.config.route.v3.RouteMatch proto =
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setPrefix("")
.addHeaders(
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName(":scheme")
.setPrefixMatch("http"))
.addHeaders(
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName(":method")
.setExactMatch("PUT"))
.build();
StructOrError<RouteMatch> struct = XdsRouteConfigureResource.parseRouteMatch(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct())
.isEqualTo(
RouteMatch.create(
PathMatcher.fromPrefix("", false),
Arrays.asList(
HeaderMatcher.forPrefix(":scheme", "http", false),
HeaderMatcher.forExactValue(":method", "PUT", false)),
null));
}
@Test
public void parseRouteMatch_withRuntimeFractionMatcher() {
io.envoyproxy.envoy.config.route.v3.RouteMatch proto =
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setPrefix("")
.setRuntimeFraction(
RuntimeFractionalPercent.newBuilder()
.setDefaultValue(
FractionalPercent.newBuilder()
.setNumerator(30)
.setDenominator(FractionalPercent.DenominatorType.HUNDRED)))
.build();
StructOrError<RouteMatch> struct = XdsRouteConfigureResource.parseRouteMatch(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct())
.isEqualTo(
RouteMatch.create(
PathMatcher.fromPrefix( "", false), Collections.<HeaderMatcher>emptyList(),
FractionMatcher.create(30, 100)));
}
@Test
public void parsePathMatcher_withFullPath() {
io.envoyproxy.envoy.config.route.v3.RouteMatch proto =
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setPath("/service/method")
.build();
StructOrError<PathMatcher> struct = XdsRouteConfigureResource.parsePathMatcher(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct()).isEqualTo(
PathMatcher.fromPath("/service/method", false));
}
@Test
public void parsePathMatcher_withPrefix() {
io.envoyproxy.envoy.config.route.v3.RouteMatch proto =
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder().setPrefix("/").build();
StructOrError<PathMatcher> struct = XdsRouteConfigureResource.parsePathMatcher(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct()).isEqualTo(
PathMatcher.fromPrefix("/", false));
}
@Test
public void parsePathMatcher_withSafeRegEx() {
io.envoyproxy.envoy.config.route.v3.RouteMatch proto =
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setSafeRegex(RegexMatcher.newBuilder().setRegex("."))
.build();
StructOrError<PathMatcher> struct = XdsRouteConfigureResource.parsePathMatcher(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct()).isEqualTo(PathMatcher.fromRegEx(Pattern.compile(".")));
}
@Test
@SuppressWarnings("deprecation")
public void parseHeaderMatcher_withExactMatch() {
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName(":method")
.setExactMatch("PUT")
.build();
StructOrError<HeaderMatcher> struct1 = XdsRouteConfigureResource.parseHeaderMatcher(proto);
assertThat(struct1.getErrorDetail()).isNull();
assertThat(struct1.getStruct()).isEqualTo(
HeaderMatcher.forExactValue(":method", "PUT", false));
}
@Test
@SuppressWarnings("deprecation")
public void parseHeaderMatcher_withSafeRegExMatch() {
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName(":method")
.setSafeRegexMatch(RegexMatcher.newBuilder().setRegex("P*"))
.build();
StructOrError<HeaderMatcher> struct3 = XdsRouteConfigureResource.parseHeaderMatcher(proto);
assertThat(struct3.getErrorDetail()).isNull();
assertThat(struct3.getStruct()).isEqualTo(
HeaderMatcher.forSafeRegEx(":method", Pattern.compile("P*"), false));
}
@Test
public void parseHeaderMatcher_withRangeMatch() {
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName("timeout")
.setRangeMatch(Int64Range.newBuilder().setStart(10L).setEnd(20L))
.build();
StructOrError<HeaderMatcher> struct4 = XdsRouteConfigureResource.parseHeaderMatcher(proto);
assertThat(struct4.getErrorDetail()).isNull();
assertThat(struct4.getStruct()).isEqualTo(
HeaderMatcher.forRange("timeout", HeaderMatcher.Range.create(10L, 20L), false));
}
@Test
public void parseHeaderMatcher_withPresentMatch() {
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName("user-agent")
.setPresentMatch(true)
.build();
StructOrError<HeaderMatcher> struct5 = XdsRouteConfigureResource.parseHeaderMatcher(proto);
assertThat(struct5.getErrorDetail()).isNull();
assertThat(struct5.getStruct()).isEqualTo(
HeaderMatcher.forPresent("user-agent", true, false));
}
@Test
@SuppressWarnings("deprecation")
public void parseHeaderMatcher_withPrefixMatch() {
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName("authority")
.setPrefixMatch("service-foo")
.build();
StructOrError<HeaderMatcher> struct6 = XdsRouteConfigureResource.parseHeaderMatcher(proto);
assertThat(struct6.getErrorDetail()).isNull();
assertThat(struct6.getStruct()).isEqualTo(
HeaderMatcher.forPrefix("authority", "service-foo", false));
}
@Test
@SuppressWarnings("deprecation")
public void parseHeaderMatcher_withSuffixMatch() {
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName("authority")
.setSuffixMatch("googleapis.com")
.build();
StructOrError<HeaderMatcher> struct7 = XdsRouteConfigureResource.parseHeaderMatcher(proto);
assertThat(struct7.getErrorDetail()).isNull();
assertThat(struct7.getStruct()).isEqualTo(
HeaderMatcher.forSuffix("authority", "googleapis.com", false));
}
@Test
@SuppressWarnings("deprecation")
public void parseHeaderMatcher_malformedRegExPattern() {
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName(":method")
.setSafeRegexMatch(RegexMatcher.newBuilder().setRegex("["))
.build();
StructOrError<HeaderMatcher> struct = XdsRouteConfigureResource.parseHeaderMatcher(proto);
assertThat(struct.getErrorDetail()).isNotNull();
assertThat(struct.getStruct()).isNull();
}
@Test
@SuppressWarnings("deprecation")
public void parseHeaderMatcher_withStringMatcher() {
io.envoyproxy.envoy.type.matcher.v3.StringMatcher stringMatcherProto =
io.envoyproxy.envoy.type.matcher.v3.StringMatcher.newBuilder()
.setPrefix("service-foo")
.setIgnoreCase(false)
.build();
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName("authority")
.setStringMatch(stringMatcherProto)
.setInvertMatch(false)
.build();
StructOrError<HeaderMatcher> struct = XdsRouteConfigureResource.parseHeaderMatcher(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct()).isEqualTo(
HeaderMatcher.forString("authority", Matchers.StringMatcher
.forPrefix("service-foo", false), false));
}
@Test
public void parseRouteAction_withCluster() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct().cluster()).isEqualTo("cluster-foo");
assertThat(struct.getStruct().weightedClusters()).isNull();
assertThat(struct.getStruct().autoHostRewrite()).isFalse();
}
@Test
public void parseRouteAction_withCluster_autoHostRewriteEnabled() {
System.setProperty(GRPC_EXPERIMENTAL_XDS_AUTHORITY_REWRITE, "true");
try {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setAutoHostRewrite(BoolValue.of(true))
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct().cluster()).isEqualTo("cluster-foo");
assertThat(struct.getStruct().weightedClusters()).isNull();
assertThat(struct.getStruct().autoHostRewrite()).isTrue();
} finally {
System.clearProperty(GRPC_EXPERIMENTAL_XDS_AUTHORITY_REWRITE);
}
}
@Test
public void parseRouteAction_withCluster_flagDisabled_autoHostRewriteNotEnabled() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setAutoHostRewrite(BoolValue.of(true))
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct().cluster()).isEqualTo("cluster-foo");
assertThat(struct.getStruct().weightedClusters()).isNull();
assertThat(struct.getStruct().autoHostRewrite()).isTrue();
}
@Test
public void parseRouteAction_withWeightedCluster() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setWeightedClusters(
WeightedCluster.newBuilder()
.addClusters(
WeightedCluster.ClusterWeight
.newBuilder()
.setName("cluster-foo")
.setWeight(UInt32Value.newBuilder().setValue(30)))
.addClusters(WeightedCluster.ClusterWeight
.newBuilder()
.setName("cluster-bar")
.setWeight(UInt32Value.newBuilder().setValue(70))))
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct().cluster()).isNull();
assertThat(struct.getStruct().weightedClusters()).containsExactly(
ClusterWeight.create("cluster-foo", 30, ImmutableMap.<String, FilterConfig>of()),
ClusterWeight.create("cluster-bar", 70, ImmutableMap.<String, FilterConfig>of()));
assertThat(struct.getStruct().autoHostRewrite()).isFalse();
}
@Test
public void parseRouteAction_withWeightedCluster_autoHostRewriteEnabled() {
System.setProperty(GRPC_EXPERIMENTAL_XDS_AUTHORITY_REWRITE, "true");
try {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setWeightedClusters(
WeightedCluster.newBuilder()
.addClusters(
WeightedCluster.ClusterWeight
.newBuilder()
.setName("cluster-foo")
.setWeight(UInt32Value.newBuilder().setValue(30)))
.addClusters(WeightedCluster.ClusterWeight
.newBuilder()
.setName("cluster-bar")
.setWeight(UInt32Value.newBuilder().setValue(70))))
.setAutoHostRewrite(BoolValue.of(true))
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct().cluster()).isNull();
assertThat(struct.getStruct().weightedClusters()).containsExactly(
ClusterWeight.create("cluster-foo", 30, ImmutableMap.<String, FilterConfig>of()),
ClusterWeight.create("cluster-bar", 70, ImmutableMap.<String, FilterConfig>of()));
assertThat(struct.getStruct().autoHostRewrite()).isTrue();
} finally {
System.clearProperty(GRPC_EXPERIMENTAL_XDS_AUTHORITY_REWRITE);
}
}
@Test
public void parseRouteAction_withWeightedCluster_flagDisabled_autoHostRewriteDisabled() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setWeightedClusters(
WeightedCluster.newBuilder()
.addClusters(
WeightedCluster.ClusterWeight
.newBuilder()
.setName("cluster-foo")
.setWeight(UInt32Value.newBuilder().setValue(30)))
.addClusters(WeightedCluster.ClusterWeight
.newBuilder()
.setName("cluster-bar")
.setWeight(UInt32Value.newBuilder().setValue(70))))
.setAutoHostRewrite(BoolValue.of(true))
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct().cluster()).isNull();
assertThat(struct.getStruct().weightedClusters()).containsExactly(
ClusterWeight.create("cluster-foo", 30, ImmutableMap.<String, FilterConfig>of()),
ClusterWeight.create("cluster-bar", 70, ImmutableMap.<String, FilterConfig>of()));
assertThat(struct.getStruct().autoHostRewrite()).isTrue();
}
@Test
public void parseRouteAction_weightedClusterSum() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setWeightedClusters(
WeightedCluster.newBuilder()
.addClusters(
WeightedCluster.ClusterWeight
.newBuilder()
.setName("cluster-foo")
.setWeight(UInt32Value.newBuilder().setValue(0)))
.addClusters(WeightedCluster.ClusterWeight
.newBuilder()
.setName("cluster-bar")
.setWeight(UInt32Value.newBuilder().setValue(0))))
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getErrorDetail()).isEqualTo("Sum of cluster weights should be above 0.");
}
@Test
public void parseRouteAction_withTimeoutByGrpcTimeoutHeaderMax() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setMaxStreamDuration(
MaxStreamDuration.newBuilder()
.setGrpcTimeoutHeaderMax(Durations.fromSeconds(5L))
.setMaxStreamDuration(Durations.fromMillis(20L)))
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getStruct().timeoutNano()).isEqualTo(TimeUnit.SECONDS.toNanos(5L));
}
@Test
public void parseRouteAction_withTimeoutByMaxStreamDuration() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setMaxStreamDuration(
MaxStreamDuration.newBuilder()
.setMaxStreamDuration(Durations.fromSeconds(5L)))
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getStruct().timeoutNano()).isEqualTo(TimeUnit.SECONDS.toNanos(5L));
}
@Test
public void parseRouteAction_withTimeoutUnset() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getStruct().timeoutNano()).isNull();
}
@Test
public void parseRouteAction_withRetryPolicy() {
RetryPolicy.Builder builder = RetryPolicy.newBuilder()
.setNumRetries(UInt32Value.of(3))
.setRetryBackOff(
RetryBackOff.newBuilder()
.setBaseInterval(Durations.fromMillis(500))
.setMaxInterval(Durations.fromMillis(600)))
.setPerTryTimeout(Durations.fromMillis(300))
.setRetryOn(
"cancelled,deadline-exceeded,internal,resource-exhausted,unavailable");
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setRetryPolicy(builder.build())
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
RouteAction.RetryPolicy retryPolicy = struct.getStruct().retryPolicy();
assertThat(retryPolicy.maxAttempts()).isEqualTo(4);
assertThat(retryPolicy.initialBackoff()).isEqualTo(Durations.fromMillis(500));
assertThat(retryPolicy.maxBackoff()).isEqualTo(Durations.fromMillis(600));
// Not supporting per_try_timeout yet.
assertThat(retryPolicy.perAttemptRecvTimeout()).isEqualTo(null);
assertThat(retryPolicy.retryableStatusCodes()).containsExactly(
Code.CANCELLED, Code.DEADLINE_EXCEEDED, Code.INTERNAL, Code.RESOURCE_EXHAUSTED,
Code.UNAVAILABLE);
// empty retry_on
builder = RetryPolicy.newBuilder()
.setNumRetries(UInt32Value.of(3))
.setRetryBackOff(
RetryBackOff.newBuilder()
.setBaseInterval(Durations.fromMillis(500))
.setMaxInterval(Durations.fromMillis(600)))
.setPerTryTimeout(Durations.fromMillis(300)); // Not supporting per_try_timeout yet.
proto = io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setRetryPolicy(builder.build())
.build();
struct = XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getStruct().retryPolicy()).isNotNull();
assertThat(struct.getStruct().retryPolicy().retryableStatusCodes()).isEmpty();
// base_interval unset
builder
.setRetryOn("cancelled")
.setRetryBackOff(RetryBackOff.newBuilder().setMaxInterval(Durations.fromMillis(600)));
proto = io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setRetryPolicy(builder)
.build();
struct = XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getErrorDetail()).isEqualTo("No base_interval specified in retry_backoff");
// max_interval unset
builder.setRetryBackOff(RetryBackOff.newBuilder().setBaseInterval(Durations.fromMillis(500)));
proto = io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setRetryPolicy(builder)
.build();
struct = XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
retryPolicy = struct.getStruct().retryPolicy();
assertThat(retryPolicy.maxBackoff()).isEqualTo(Durations.fromMillis(500 * 10));
// base_interval < 0
builder.setRetryBackOff(RetryBackOff.newBuilder().setBaseInterval(Durations.fromMillis(-1)));
proto = io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setRetryPolicy(builder)
.build();
struct = XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getErrorDetail())
.isEqualTo("base_interval in retry_backoff must be positive");
// base_interval > max_interval > 1ms
builder.setRetryBackOff(
RetryBackOff.newBuilder()
.setBaseInterval(Durations.fromMillis(200)).setMaxInterval(Durations.fromMillis(100)));
proto = io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setRetryPolicy(builder)
.build();
struct = XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getErrorDetail())
.isEqualTo("max_interval in retry_backoff cannot be less than base_interval");
// 1ms > base_interval > max_interval
builder.setRetryBackOff(
RetryBackOff.newBuilder()
.setBaseInterval(Durations.fromNanos(200)).setMaxInterval(Durations.fromNanos(100)));
proto = io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setRetryPolicy(builder)
.build();
struct = XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getErrorDetail())
.isEqualTo("max_interval in retry_backoff cannot be less than base_interval");
// 1ms > max_interval > base_interval
builder.setRetryBackOff(
RetryBackOff.newBuilder()
.setBaseInterval(Durations.fromNanos(100)).setMaxInterval(Durations.fromNanos(200)));
proto = io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setRetryPolicy(builder)
.build();
struct = XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getStruct().retryPolicy().initialBackoff())
.isEqualTo(Durations.fromMillis(1));
assertThat(struct.getStruct().retryPolicy().maxBackoff())
.isEqualTo(Durations.fromMillis(1));
// retry_backoff unset
builder = RetryPolicy.newBuilder()
.setNumRetries(UInt32Value.of(3))
.setPerTryTimeout(Durations.fromMillis(300))
.setRetryOn("cancelled");
proto = io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setRetryPolicy(builder)
.build();
struct = XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
retryPolicy = struct.getStruct().retryPolicy();
assertThat(retryPolicy.initialBackoff()).isEqualTo(Durations.fromMillis(25));
assertThat(retryPolicy.maxBackoff()).isEqualTo(Durations.fromMillis(250));
// unsupported retry_on value
builder = RetryPolicy.newBuilder()
.setNumRetries(UInt32Value.of(3))
.setRetryBackOff(
RetryBackOff.newBuilder()
.setBaseInterval(Durations.fromMillis(500))
.setMaxInterval(Durations.fromMillis(600)))
.setPerTryTimeout(Durations.fromMillis(300))
.setRetryOn("cancelled,unsupported-foo");
proto = io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setRetryPolicy(builder)
.build();
struct = XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getStruct().retryPolicy().retryableStatusCodes())
.containsExactly(Code.CANCELLED);
// unsupported retry_on code
builder = RetryPolicy.newBuilder()
.setNumRetries(UInt32Value.of(3))
.setRetryBackOff(
RetryBackOff.newBuilder()
.setBaseInterval(Durations.fromMillis(500))
.setMaxInterval(Durations.fromMillis(600)))
.setPerTryTimeout(Durations.fromMillis(300))
.setRetryOn("cancelled,abort");
proto = io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setRetryPolicy(builder)
.build();
struct = XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getStruct().retryPolicy().retryableStatusCodes())
.containsExactly(Code.CANCELLED);
// whitespace in retry_on
builder = RetryPolicy.newBuilder()
.setNumRetries(UInt32Value.of(3))
.setRetryBackOff(
RetryBackOff.newBuilder()
.setBaseInterval(Durations.fromMillis(500))
.setMaxInterval(Durations.fromMillis(600)))
.setPerTryTimeout(Durations.fromMillis(300))
.setRetryOn("abort, , cancelled , ");
proto = io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setRetryPolicy(builder)
.build();
struct = XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct.getStruct().retryPolicy().retryableStatusCodes())
.containsExactly(Code.CANCELLED);
}
@Test
@SuppressWarnings("deprecation")
public void parseRouteAction_withHashPolicies() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.addHashPolicy(
io.envoyproxy.envoy.config.route.v3.RouteAction.HashPolicy.newBuilder()
.setHeader(
Header.newBuilder()
.setHeaderName("user-agent")
.setRegexRewrite(
RegexMatchAndSubstitute.newBuilder()
.setPattern(
RegexMatcher.newBuilder()
.setGoogleRe2(GoogleRE2.getDefaultInstance())
.setRegex("grpc.*"))
.setSubstitution("gRPC"))))
.addHashPolicy(
io.envoyproxy.envoy.config.route.v3.RouteAction.HashPolicy.newBuilder()
.setConnectionProperties(ConnectionProperties.newBuilder().setSourceIp(true))
.setTerminal(true)) // unsupported
.addHashPolicy(
io.envoyproxy.envoy.config.route.v3.RouteAction.HashPolicy.newBuilder()
.setFilterState(
FilterState.newBuilder()
.setKey(XdsRouteConfigureResource.HASH_POLICY_FILTER_STATE_KEY)))
.addHashPolicy(
io.envoyproxy.envoy.config.route.v3.RouteAction.HashPolicy.newBuilder()
.setQueryParameter(
QueryParameter.newBuilder().setName("param"))) // unsupported
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
List<HashPolicy> policies = struct.getStruct().hashPolicies();
assertThat(policies).hasSize(2);
assertThat(policies.get(0).type()).isEqualTo(HashPolicy.Type.HEADER);
assertThat(policies.get(0).headerName()).isEqualTo("user-agent");
assertThat(policies.get(0).isTerminal()).isFalse();
assertThat(policies.get(0).regEx().pattern()).isEqualTo("grpc.*");
assertThat(policies.get(0).regExSubstitution()).isEqualTo("gRPC");
assertThat(policies.get(1).type()).isEqualTo(HashPolicy.Type.CHANNEL_ID);
assertThat(policies.get(1).isTerminal()).isFalse();
}
@Test
public void parseRouteAction_custerSpecifierNotSet() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct).isNull();
}
@Test
public void parseRouteAction_clusterSpecifier_routeLookupDisabled() {
XdsRouteConfigureResource.enableRouteLookup = false;
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setClusterSpecifierPlugin(CLUSTER_SPECIFIER_PLUGIN.name())
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(), ImmutableSet.of(), getXdsResourceTypeArgs(true));
assertThat(struct).isNull();
}
@Test
public void parseRouteAction_clusterSpecifier() {
XdsRouteConfigureResource.enableRouteLookup = true;
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setClusterSpecifierPlugin(CLUSTER_SPECIFIER_PLUGIN.name())
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(CLUSTER_SPECIFIER_PLUGIN.name(), RlsPluginConfig.create(
ImmutableMap.of("lookupService", "rls-cbt.googleapis.com"))), ImmutableSet.of(),
getXdsResourceTypeArgs(true));
assertThat(struct.getStruct()).isNotNull();
assertThat(struct.getStruct().autoHostRewrite()).isFalse();
}
@Test
public void parseRouteAction_clusterSpecifier_autoHostRewriteEnabled() {
System.setProperty(GRPC_EXPERIMENTAL_XDS_AUTHORITY_REWRITE, "true");
try {
XdsRouteConfigureResource.enableRouteLookup = true;
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setClusterSpecifierPlugin(CLUSTER_SPECIFIER_PLUGIN.name())
.setAutoHostRewrite(BoolValue.of(true))
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(CLUSTER_SPECIFIER_PLUGIN.name(), RlsPluginConfig.create(
ImmutableMap.of("lookupService", "rls-cbt.googleapis.com"))), ImmutableSet.of(),
getXdsResourceTypeArgs(true));
assertThat(struct.getStruct()).isNotNull();
assertThat(struct.getStruct().autoHostRewrite()).isTrue();
} finally {
System.clearProperty(GRPC_EXPERIMENTAL_XDS_AUTHORITY_REWRITE);
}
}
@Test
public void parseRouteAction_clusterSpecifier_flagDisabled_autoHostRewriteDisabled() {
XdsRouteConfigureResource.enableRouteLookup = true;
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setClusterSpecifierPlugin(CLUSTER_SPECIFIER_PLUGIN.name())
.setAutoHostRewrite(BoolValue.of(true))
.build();
StructOrError<RouteAction> struct =
XdsRouteConfigureResource.parseRouteAction(proto, filterRegistry,
ImmutableMap.of(CLUSTER_SPECIFIER_PLUGIN.name(), RlsPluginConfig.create(
ImmutableMap.of("lookupService", "rls-cbt.googleapis.com"))), ImmutableSet.of(),
getXdsResourceTypeArgs(true));
assertThat(struct.getStruct()).isNotNull();
assertThat(struct.getStruct().autoHostRewrite()).isTrue();
}
@Test
public void parseClusterWeight() {
io.envoyproxy.envoy.config.route.v3.WeightedCluster.ClusterWeight proto =
io.envoyproxy.envoy.config.route.v3.WeightedCluster.ClusterWeight.newBuilder()
.setName("cluster-foo")
.setWeight(UInt32Value.newBuilder().setValue(30))
.build();
ClusterWeight clusterWeight =
XdsRouteConfigureResource.parseClusterWeight(proto, filterRegistry).getStruct();
assertThat(clusterWeight.name()).isEqualTo("cluster-foo");
assertThat(clusterWeight.weight()).isEqualTo(30);
}
@Test
public void parseLocalityLbEndpoints_withHealthyEndpoints() throws ResourceInvalidException {
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints proto =
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints.newBuilder()
.setLocality(Locality.newBuilder()
.setRegion("region-foo").setZone("zone-foo").setSubZone("subZone-foo"))
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(100)) // locality weight
.setPriority(1)
.addLbEndpoints(io.envoyproxy.envoy.config.endpoint.v3.LbEndpoint.newBuilder()
.setEndpoint(Endpoint.newBuilder()
.setAddress(Address.newBuilder()
.setSocketAddress(
SocketAddress.newBuilder()
.setAddress("172.14.14.5").setPortValue(8888))))
.setHealthStatus(io.envoyproxy.envoy.config.core.v3.HealthStatus.HEALTHY)
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(20))) // endpoint weight
.build();
StructOrError<LocalityLbEndpoints> struct = XdsEndpointResource.parseLocalityLbEndpoints(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct()).isEqualTo(
LocalityLbEndpoints.create(
Collections.singletonList(LbEndpoint.create("172.14.14.5", 8888,
20, true, "", ImmutableMap.of())),
100, 1, ImmutableMap.of()));
}
@Test
public void parseLocalityLbEndpoints_onlyPermitIp() {
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints proto =
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints.newBuilder()
.setLocality(Locality.newBuilder()
.setRegion("region-foo").setZone("zone-foo").setSubZone("subZone-foo"))
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(100)) // locality weight
.setPriority(1)
.addLbEndpoints(io.envoyproxy.envoy.config.endpoint.v3.LbEndpoint.newBuilder()
.setEndpoint(Endpoint.newBuilder()
.setAddress(Address.newBuilder()
.setSocketAddress(
SocketAddress.newBuilder()
.setAddress("example.com").setPortValue(8888))))
.setHealthStatus(io.envoyproxy.envoy.config.core.v3.HealthStatus.HEALTHY)
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(20))) // endpoint weight
.build();
ResourceInvalidException ex = assertThrows(
ResourceInvalidException.class,
() -> XdsEndpointResource.parseLocalityLbEndpoints(proto));
assertThat(ex.getMessage()).contains("IP");
assertThat(ex.getMessage()).contains("example.com");
}
@Test
public void parseLocalityLbEndpoints_treatUnknownHealthAsHealthy()
throws ResourceInvalidException {
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints proto =
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints.newBuilder()
.setLocality(Locality.newBuilder()
.setRegion("region-foo").setZone("zone-foo").setSubZone("subZone-foo"))
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(100)) // locality weight
.setPriority(1)
.addLbEndpoints(io.envoyproxy.envoy.config.endpoint.v3.LbEndpoint.newBuilder()
.setEndpoint(Endpoint.newBuilder()
.setAddress(Address.newBuilder()
.setSocketAddress(
SocketAddress.newBuilder()
.setAddress("172.14.14.5").setPortValue(8888))))
.setHealthStatus(io.envoyproxy.envoy.config.core.v3.HealthStatus.UNKNOWN)
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(20))) // endpoint weight
.build();
StructOrError<LocalityLbEndpoints> struct = XdsEndpointResource.parseLocalityLbEndpoints(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct()).isEqualTo(
LocalityLbEndpoints.create(
Collections.singletonList(LbEndpoint.create("172.14.14.5", 8888,
20, true, "", ImmutableMap.of())),
100, 1, ImmutableMap.of()));
}
@Test
public void parseLocalityLbEndpoints_withUnHealthyEndpoints() throws ResourceInvalidException {
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints proto =
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints.newBuilder()
.setLocality(Locality.newBuilder()
.setRegion("region-foo").setZone("zone-foo").setSubZone("subZone-foo"))
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(100)) // locality weight
.setPriority(1)
.addLbEndpoints(io.envoyproxy.envoy.config.endpoint.v3.LbEndpoint.newBuilder()
.setEndpoint(Endpoint.newBuilder()
.setAddress(Address.newBuilder()
.setSocketAddress(
SocketAddress.newBuilder()
.setAddress("172.14.14.5").setPortValue(8888))))
.setHealthStatus(io.envoyproxy.envoy.config.core.v3.HealthStatus.UNHEALTHY)
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(20))) // endpoint weight
.build();
StructOrError<LocalityLbEndpoints> struct = XdsEndpointResource.parseLocalityLbEndpoints(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct()).isEqualTo(
LocalityLbEndpoints.create(
Collections.singletonList(LbEndpoint.create("172.14.14.5", 8888, 20,
false, "", ImmutableMap.of())),
100, 1, ImmutableMap.of()));
}
@Test
public void parseLocalityLbEndpoints_ignorZeroWeightLocality() throws ResourceInvalidException {
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints proto =
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints.newBuilder()
.setLocality(Locality.newBuilder()
.setRegion("region-foo").setZone("zone-foo").setSubZone("subZone-foo"))
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(0)) // locality weight
.setPriority(1)
.addLbEndpoints(io.envoyproxy.envoy.config.endpoint.v3.LbEndpoint.newBuilder()
.setEndpoint(Endpoint.newBuilder()
.setAddress(Address.newBuilder()
.setSocketAddress(
SocketAddress.newBuilder()
.setAddress("172.14.14.5").setPortValue(8888))))
.setHealthStatus(io.envoyproxy.envoy.config.core.v3.HealthStatus.UNKNOWN)
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(20))) // endpoint weight
.build();
assertThat(XdsEndpointResource.parseLocalityLbEndpoints(proto)).isNull();
}
@Test
public void parseLocalityLbEndpoints_withDualStackEndpoints() {
String originalDualStackProp =
System.setProperty(GRPC_EXPERIMENTAL_XDS_DUALSTACK_ENDPOINTS, "true");
String v4Address = "172.14.14.5";
String v6Address = "2001:db8::1";
int port = 8888;
try {
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints proto =
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints.newBuilder()
.setLocality(Locality.newBuilder()
.setRegion("region-foo").setZone("zone-foo").setSubZone("subZone-foo"))
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(100)) // locality weight
.setPriority(1)
.addLbEndpoints(io.envoyproxy.envoy.config.endpoint.v3.LbEndpoint.newBuilder()
.setEndpoint(Endpoint.newBuilder()
.setAddress(Address.newBuilder()
.setSocketAddress(
SocketAddress.newBuilder()
.setAddress(v4Address).setPortValue(port)))
.addAdditionalAddresses(Endpoint.AdditionalAddress.newBuilder()
.setAddress(Address.newBuilder()
.setSocketAddress(
SocketAddress.newBuilder()
.setAddress(v6Address).setPortValue(port)))))
.setHealthStatus(io.envoyproxy.envoy.config.core.v3.HealthStatus.HEALTHY)
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(20)))
.build();
StructOrError<LocalityLbEndpoints> struct =
XdsEndpointResource.parseLocalityLbEndpoints(proto);
assertThat(struct.getErrorDetail()).isNull();
List<java.net.SocketAddress> socketAddressList = Arrays.asList(
new InetSocketAddress(v4Address, port), new InetSocketAddress(v6Address, port));
EquivalentAddressGroup expectedEag = new EquivalentAddressGroup(socketAddressList);
assertThat(struct.getStruct()).isEqualTo(
LocalityLbEndpoints.create(
Collections.singletonList(LbEndpoint.create(
expectedEag, 20, true, "", ImmutableMap.of())), 100, 1, ImmutableMap.of()));
} catch (ResourceInvalidException e) {
throw new RuntimeException(e);
} finally {
if (originalDualStackProp != null) {
System.setProperty(GRPC_EXPERIMENTAL_XDS_DUALSTACK_ENDPOINTS, originalDualStackProp);
} else {
System.clearProperty(GRPC_EXPERIMENTAL_XDS_DUALSTACK_ENDPOINTS);
}
}
}
@Test
public void parseLocalityLbEndpoints_invalidPriority() throws ResourceInvalidException {
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints proto =
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints.newBuilder()
.setLocality(Locality.newBuilder()
.setRegion("region-foo").setZone("zone-foo").setSubZone("subZone-foo"))
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(100)) // locality weight
.setPriority(-1)
.addLbEndpoints(io.envoyproxy.envoy.config.endpoint.v3.LbEndpoint.newBuilder()
.setEndpoint(Endpoint.newBuilder()
.setAddress(Address.newBuilder()
.setSocketAddress(
SocketAddress.newBuilder()
.setAddress("172.14.14.5").setPortValue(8888))))
.setHealthStatus(io.envoyproxy.envoy.config.core.v3.HealthStatus.UNKNOWN)
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(20))) // endpoint weight
.build();
StructOrError<LocalityLbEndpoints> struct = XdsEndpointResource.parseLocalityLbEndpoints(proto);
assertThat(struct.getErrorDetail()).isEqualTo("negative priority");
}
@Test
public void parseHttpFilter_unsupportedButOptional() {
HttpFilter httpFilter = HttpFilter.newBuilder()
.setIsOptional(true)
.setTypedConfig(Any.pack(StringValue.of("unsupported")))
.build();
assertThat(XdsListenerResource.parseHttpFilter(httpFilter, filterRegistry, true)).isNull();
}
private static | GrpcXdsClientImplDataTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/Crc32PerformanceTest.java | {
"start": 2465,
"end": 3358
} | class ____ implements Crc32 {
@Override
public void verifyChunked(ByteBuffer data, int bytesPerSum,
ByteBuffer sums, String fileName, long basePos)
throws ChecksumException {
if (data.isDirect()) {
NativeCrc32.verifyChunkedSums(bytesPerSum,
DataChecksum.Type.CRC32C.id, sums, data, fileName, basePos);
} else {
final int dataOffset = data.arrayOffset() + data.position();
final int crcsOffset = sums.arrayOffset() + sums.position();
NativeCrc32.verifyChunkedSumsByteArray(bytesPerSum,
DataChecksum.Type.CRC32C.id, sums.array(), crcsOffset,
data.array(), dataOffset, data.remaining(), fileName, basePos);
}
}
@Override
public DataChecksum.Type crcType() {
return DataChecksum.Type.CRC32C;
}
}
abstract | NativeC |
java | google__gson | gson/src/test/java/com/google/gson/internal/ConstructorConstructorTest.java | {
"start": 3248,
"end": 3440
} | class ____<E> extends HashSet<E> {
// Removes default no-args constructor
@SuppressWarnings("unused")
CustomSet(Void v) {}
}
@SuppressWarnings("serial")
private static | CustomSet |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/datasource/ShardingKeyProvider.java | {
"start": 1454,
"end": 2377
} | interface ____ {
/**
* Determine the sharding key. This method returns the sharding key relevant to the current
* context which will be used to obtain a direct shard connection.
* @return the sharding key, or {@code null} if it is not available or cannot be determined
* @throws SQLException if an error occurs while obtaining the sharding key
*/
@Nullable ShardingKey getShardingKey() throws SQLException;
/**
* Determine the super sharding key, if any. This method returns the super sharding key
* relevant to the current context which will be used to obtain a direct shard connection.
* @return the super sharding key, or {@code null} if it is not available or cannot be
* determined (the default)
* @throws SQLException if an error occurs while obtaining the super sharding key
*/
default @Nullable ShardingKey getSuperShardingKey() throws SQLException {
return null;
}
}
| ShardingKeyProvider |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/annotation/JsonSeeAlsoTest.java | {
"start": 1893,
"end": 2009
} | class ____ extends Animal {
public String catName;
}
@JSONType(typeName = "tidy")
public static | Cat |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/support/monitor/annotation/MField.java | {
"start": 904,
"end": 1097
} | interface ____ {
String name() default "";
boolean groupBy() default false;
AggregateType aggregate();
String hashFor() default "";
String hashForType() default "";
}
| MField |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/heartbeat/HeartbeatManagerSenderImpl.java | {
"start": 1335,
"end": 3804
} | class ____<I, O> extends HeartbeatManagerImpl<I, O> implements Runnable {
private final long heartbeatPeriod;
HeartbeatManagerSenderImpl(
long heartbeatPeriod,
long heartbeatTimeout,
int failedRpcRequestsUntilUnreachable,
ResourceID ownResourceID,
HeartbeatListener<I, O> heartbeatListener,
ScheduledExecutor mainThreadExecutor,
Logger log) {
this(
heartbeatPeriod,
heartbeatTimeout,
failedRpcRequestsUntilUnreachable,
ownResourceID,
heartbeatListener,
mainThreadExecutor,
log,
new DefaultHeartbeatMonitor.Factory<>());
}
HeartbeatManagerSenderImpl(
long heartbeatPeriod,
long heartbeatTimeout,
int failedRpcRequestsUntilUnreachable,
ResourceID ownResourceID,
HeartbeatListener<I, O> heartbeatListener,
ScheduledExecutor mainThreadExecutor,
Logger log,
HeartbeatMonitor.Factory<O> heartbeatMonitorFactory) {
super(
heartbeatTimeout,
failedRpcRequestsUntilUnreachable,
ownResourceID,
heartbeatListener,
mainThreadExecutor,
log,
heartbeatMonitorFactory);
this.heartbeatPeriod = heartbeatPeriod;
mainThreadExecutor.schedule(this, 0L, TimeUnit.MILLISECONDS);
}
@Override
public void run() {
if (!stopped) {
log.debug("Trigger heartbeat request.");
for (HeartbeatMonitor<O> heartbeatMonitor : getHeartbeatTargets().values()) {
requestHeartbeat(heartbeatMonitor);
}
getMainThreadExecutor().schedule(this, heartbeatPeriod, TimeUnit.MILLISECONDS);
}
}
private void requestHeartbeat(HeartbeatMonitor<O> heartbeatMonitor) {
O payload = getHeartbeatListener().retrievePayload(heartbeatMonitor.getHeartbeatTargetId());
final HeartbeatTarget<O> heartbeatTarget = heartbeatMonitor.getHeartbeatTarget();
heartbeatTarget
.requestHeartbeat(getOwnResourceID(), payload)
.whenCompleteAsync(
handleHeartbeatRpc(heartbeatMonitor.getHeartbeatTargetId()),
getMainThreadExecutor());
}
}
| HeartbeatManagerSenderImpl |
java | quarkusio__quarkus | extensions/reactive-routes/runtime/src/main/java/io/quarkus/vertx/web/runtime/VirtualThreadsRouteHandler.java | {
"start": 378,
"end": 1276
} | class ____ implements Handler<RoutingContext> {
private final Handler<RoutingContext> routeHandler;
public VirtualThreadsRouteHandler(Handler<RoutingContext> routeHandler) {
this.routeHandler = routeHandler;
}
@Override
public void handle(RoutingContext context) {
Context vertxContext = VertxContext.getOrCreateDuplicatedContext(VertxCoreRecorder.getVertx().get());
VertxContextSafetyToggle.setContextSafe(vertxContext, true);
vertxContext.runOnContext(new Handler<Void>() {
@Override
public void handle(Void event) {
VirtualThreadsRecorder.getCurrent().execute(new Runnable() {
@Override
public void run() {
routeHandler.handle(context);
}
});
}
});
}
}
| VirtualThreadsRouteHandler |
java | apache__maven | impl/maven-cli/src/main/java/org/apache/maven/cling/invoker/mvnenc/goals/GoalSupport.java | {
"start": 1141,
"end": 1634
} | class ____ implements Goal {
protected final MessageBuilderFactory messageBuilderFactory;
protected final SecDispatcher secDispatcher;
protected GoalSupport(MessageBuilderFactory messageBuilderFactory, SecDispatcher secDispatcher) {
this.messageBuilderFactory = messageBuilderFactory;
this.secDispatcher = secDispatcher;
}
protected boolean configExists() throws IOException {
return secDispatcher.readConfiguration(false) != null;
}
}
| GoalSupport |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-servlet/runtime/src/main/java/io/quarkus/resteasy/reactive/server/servlet/runtime/ServletRequestContextFactory.java | {
"start": 584,
"end": 1549
} | class ____ implements RequestContextFactory {
public static final ServletRequestContextFactory INSTANCE = new ServletRequestContextFactory();
@Override
public ResteasyReactiveRequestContext createContext(Deployment deployment,
Object context, ThreadSetupAction requestContext, ServerRestHandler[] handlerChain,
ServerRestHandler[] abortHandlerChain) {
io.undertow.servlet.handlers.ServletRequestContext src = (io.undertow.servlet.handlers.ServletRequestContext) context;
return new ServletRequestContext(deployment, (HttpServletRequest) src.getServletRequest(),
(HttpServletResponse) src.getServletResponse(), requestContext, handlerChain, abortHandlerChain,
(RoutingContext) ((VertxHttpExchange) src.getExchange().getDelegate()).getContext(), src.getExchange());
}
@Override
public boolean isDefaultBlocking() {
return true;
}
}
| ServletRequestContextFactory |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/type/contributor/usertype/StringWrapperTypeContributor.java | {
"start": 341,
"end": 587
} | class ____ implements TypeContributor {
@Override
public void contribute(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
typeContributions.contributeType( StringWrapperUserType.INSTANCE );
}
}
| StringWrapperTypeContributor |
java | apache__flink | flink-connectors/flink-connector-base/src/test/java/org/apache/flink/connector/base/source/reader/fetcher/SplitFetcherTest.java | {
"start": 2151,
"end": 14358
} | class ____ {
@Test
void testNewFetcherIsIdle() {
final SplitFetcher<Object, TestingSourceSplit> fetcher =
createFetcher(new TestingSplitReader<>());
assertThat(fetcher.isIdle()).isTrue();
}
@Test
void testFetcherNotIdleAfterSplitAdded() {
final SplitFetcher<Object, TestingSourceSplit> fetcher =
createFetcher(new TestingSplitReader<>());
final TestingSourceSplit split = new TestingSourceSplit("test-split");
fetcher.addSplits(Collections.singletonList(split));
assertThat(fetcher.isIdle()).isFalse();
// need to loop here because the internal wakeup flag handling means we need multiple loops
while (fetcher.assignedSplits().isEmpty()) {
fetcher.runOnce();
assertThat(fetcher.isIdle()).isFalse();
}
}
@Test
void testIdleAfterFinishedSplitsEnqueued() {
final SplitFetcher<Object, TestingSourceSplit> fetcher =
createFetcherWithSplit(
"test-split", new TestingSplitReader<>(finishedSplitFetch("test-split")));
fetcher.runOnce();
assertThat(fetcher.assignedSplits()).isEmpty();
assertThat(fetcher.isIdle()).isTrue();
}
@Test
void testNotifiesWhenGoingIdle() {
final FutureCompletingBlockingQueue<RecordsWithSplitIds<Object>> queue =
new FutureCompletingBlockingQueue<>();
final SplitFetcher<Object, TestingSourceSplit> fetcher =
createFetcherWithSplit(
"test-split",
queue,
new TestingSplitReader<>(finishedSplitFetch("test-split")));
fetcher.runOnce();
assertThat(fetcher.assignedSplits()).isEmpty();
assertThat(fetcher.isIdle()).isTrue();
assertThat(queue.getAvailabilityFuture().isDone()).isTrue();
}
@Test
void testNotifiesOlderFutureWhenGoingIdle() {
final FutureCompletingBlockingQueue<RecordsWithSplitIds<Object>> queue =
new FutureCompletingBlockingQueue<>();
final SplitFetcher<Object, TestingSourceSplit> fetcher =
createFetcherWithSplit(
"test-split",
queue,
new TestingSplitReader<>(finishedSplitFetch("test-split")));
final CompletableFuture<?> future = queue.getAvailabilityFuture();
fetcher.runOnce();
assertThat(fetcher.assignedSplits()).isEmpty();
assertThat(fetcher.isIdle()).isTrue();
assertThat(future.isDone()).isTrue();
}
@Test
void testNotifiesWhenGoingIdleConcurrent() throws Exception {
final FutureCompletingBlockingQueue<RecordsWithSplitIds<Object>> queue =
new FutureCompletingBlockingQueue<>();
final SplitFetcher<Object, TestingSourceSplit> fetcher =
createFetcherWithSplit(
"test-split",
queue,
new TestingSplitReader<>(finishedSplitFetch("test-split")));
final QueueDrainerThread queueDrainer = new QueueDrainerThread(queue, fetcher, 1);
queueDrainer.start();
fetcher.runOnce();
queueDrainer.sync();
// either we got the notification that the fetcher went idle after the queue was drained
// (thread finished)
// or the fetcher was already idle when the thread drained the queue (then we need no
// additional notification)
assertThat(queue.getAvailabilityFuture().isDone() || queueDrainer.wasIdleWhenFinished())
.isTrue();
}
@Test
void testNotifiesOlderFutureWhenGoingIdleConcurrent() throws Exception {
final FutureCompletingBlockingQueue<RecordsWithSplitIds<Object>> queue =
new FutureCompletingBlockingQueue<>();
final SplitFetcher<Object, TestingSourceSplit> fetcher =
createFetcherWithSplit(
"test-split",
queue,
new TestingSplitReader<>(finishedSplitFetch("test-split")));
final QueueDrainerThread queueDrainer = new QueueDrainerThread(queue, fetcher, 1);
queueDrainer.start();
final CompletableFuture<?> future = queue.getAvailabilityFuture();
fetcher.runOnce();
assertThat(future.isDone()).isTrue();
queueDrainer.sync();
}
@Test
void testWakeup() throws InterruptedException {
final int numSplits = 3;
final int numRecordsPerSplit = 10_000;
final int wakeupRecordsInterval = 10;
final int numTotalRecords = numRecordsPerSplit * numSplits;
FutureCompletingBlockingQueue<RecordsWithSplitIds<int[]>> elementQueue =
new FutureCompletingBlockingQueue<>(1);
SplitFetcher<int[], MockSourceSplit> fetcher =
new SplitFetcher<>(
0,
elementQueue,
MockSplitReader.newBuilder()
.setNumRecordsPerSplitPerFetch(2)
.setBlockingFetch(true)
.build(),
ExceptionUtils::rethrow,
() -> {},
(ignore) -> {},
false);
// Prepare the splits.
List<MockSourceSplit> splits = new ArrayList<>();
for (int i = 0; i < numSplits; i++) {
splits.add(new MockSourceSplit(i, 0, numRecordsPerSplit));
int base = i * numRecordsPerSplit;
for (int j = base; j < base + numRecordsPerSplit; j++) {
splits.get(splits.size() - 1).addRecord(j);
}
}
// Add splits to the fetcher.
fetcher.addSplits(splits);
// A thread drives the fetcher.
Thread fetcherThread = new Thread(fetcher, "FetcherThread");
SortedSet<Integer> recordsRead = Collections.synchronizedSortedSet(new TreeSet<>());
// A thread waking up the split fetcher frequently.
AtomicInteger wakeupTimes = new AtomicInteger(0);
AtomicBoolean stop = new AtomicBoolean(false);
Thread wakeUpCaller =
new Thread("Wakeup Caller") {
@Override
public void run() {
int lastWakeup = 0;
while (recordsRead.size() < numTotalRecords && !stop.get()) {
int numRecordsRead = recordsRead.size();
if (numRecordsRead >= lastWakeup + wakeupRecordsInterval) {
fetcher.wakeUp(false);
wakeupTimes.incrementAndGet();
lastWakeup = numRecordsRead;
}
}
}
};
try {
fetcherThread.start();
wakeUpCaller.start();
while (recordsRead.size() < numSplits * numRecordsPerSplit) {
final RecordsWithSplitIds<int[]> nextBatch = elementQueue.take();
while (nextBatch.nextSplit() != null) {
int[] arr;
while ((arr = nextBatch.nextRecordFromSplit()) != null) {
assertThat(recordsRead.add(arr[0])).isTrue();
}
}
}
assertThat(recordsRead).hasSize(numTotalRecords);
assertThat(recordsRead.first()).isEqualTo(0);
assertThat(recordsRead.last()).isEqualTo(numTotalRecords - 1);
assertThat(wakeupTimes.get()).isGreaterThan(0);
} finally {
stop.set(true);
fetcher.shutdown();
fetcherThread.join();
wakeUpCaller.join();
}
}
@Test
void testClose() {
TestingSplitReader<Object, TestingSourceSplit> splitReader = new TestingSplitReader<>();
final SplitFetcher<Object, TestingSourceSplit> fetcher = createFetcher(splitReader);
fetcher.shutdown();
fetcher.run();
assertThat(splitReader.isClosed()).isTrue();
}
@Test
void testCloseAfterPause() throws InterruptedException {
final FutureCompletingBlockingQueue<RecordsWithSplitIds<Object>> queue =
new FutureCompletingBlockingQueue<>();
final SplitFetcher<Object, TestingSourceSplit> fetcher =
createFetcherWithSplit(
"test-split",
queue,
new TestingSplitReader<>(finishedSplitFetch("test-split")));
fetcher.pause();
Thread fetcherThread = new Thread(fetcher::shutdown);
fetcherThread.start();
fetcherThread.join();
assertThat(fetcher.runOnce()).isFalse();
}
@Test
void testShutdownWaitingForRecordsProcessing() throws Exception {
TestingSplitReader<Object, TestingSourceSplit> splitReader = new TestingSplitReader<>();
FutureCompletingBlockingQueue<RecordsWithSplitIds<Object>> queue =
new FutureCompletingBlockingQueue<>();
final SplitFetcher<Object, TestingSourceSplit> fetcher = createFetcher(splitReader, queue);
fetcher.shutdown(true);
// Spawn a new fetcher thread to go through the shutdown sequence.
CheckedThread fetcherThread =
new CheckedThread() {
@Override
public void go() throws Exception {
fetcher.run();
assertThat(splitReader.isClosed()).isTrue();
}
};
fetcherThread.start();
// Wait until the fetcher thread to block on the shutdown latch.
waitUntil(
() -> fetcherThread.getState() == WAITING,
Duration.ofSeconds(1),
"The fetcher thread should be waiting for the shutdown latch");
assertThat(splitReader.isClosed())
.as("The split reader should have not been closed.")
.isFalse();
queue.getAvailabilityFuture().thenRun(() -> queue.poll().recycle());
// Now pull the latch.
fetcherThread.sync();
}
// ------------------------------------------------------------------------
// testing utils
// ------------------------------------------------------------------------
private static <E> RecordsBySplits<E> finishedSplitFetch(String splitId) {
return new RecordsBySplits<>(Collections.emptyMap(), Collections.singleton(splitId));
}
private static <E> SplitFetcher<E, TestingSourceSplit> createFetcher(
final SplitReader<E, TestingSourceSplit> reader) {
return createFetcher(reader, new FutureCompletingBlockingQueue<>());
}
private static <E> SplitFetcher<E, TestingSourceSplit> createFetcher(
final SplitReader<E, TestingSourceSplit> reader,
final FutureCompletingBlockingQueue<RecordsWithSplitIds<E>> queue) {
return new SplitFetcher<>(
0, queue, reader, ExceptionUtils::rethrow, () -> {}, (ignore) -> {}, false);
}
private static <E> SplitFetcher<E, TestingSourceSplit> createFetcherWithSplit(
final String splitId, final SplitReader<E, TestingSourceSplit> reader) {
return createFetcherWithSplit(splitId, new FutureCompletingBlockingQueue<>(), reader);
}
private static <E> SplitFetcher<E, TestingSourceSplit> createFetcherWithSplit(
final String splitId,
final FutureCompletingBlockingQueue<RecordsWithSplitIds<E>> queue,
final SplitReader<E, TestingSourceSplit> reader) {
final SplitFetcher<E, TestingSourceSplit> fetcher = createFetcher(reader, queue);
fetcher.addSplits(Collections.singletonList(new TestingSourceSplit(splitId)));
while (fetcher.assignedSplits().isEmpty()) {
fetcher.runOnce();
}
return fetcher;
}
// ------------------------------------------------------------------------
private static final | SplitFetcherTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractRenameTest.java | {
"start": 1207,
"end": 13581
} | class ____ extends
AbstractFSContractTestBase {
@Test
public void testRenameNewFileSameDir() throws Throwable {
describe("rename a file into a new file in the same directory");
Path renameSrc = path("rename_src");
Path renameTarget = path("rename_dest");
byte[] data = dataset(256, 'a', 'z');
writeDataset(getFileSystem(), renameSrc,
data, data.length, 1024 * 1024, false);
boolean rename = rename(renameSrc, renameTarget);
assertTrue(rename,
"rename("+renameSrc+", "+ renameTarget+") returned false");
assertListStatusFinds(getFileSystem(),
renameTarget.getParent(), renameTarget);
verifyFileContents(getFileSystem(), renameTarget, data);
}
@Test
public void testRenameNonexistentFile() throws Throwable {
describe("rename a file into a new file in the same directory");
Path missing = path("testRenameNonexistentFileSrc");
Path target = path("testRenameNonexistentFileDest");
boolean renameReturnsFalseOnFailure =
isSupported(ContractOptions.RENAME_RETURNS_FALSE_IF_SOURCE_MISSING);
mkdirs(missing.getParent());
try {
boolean renamed = rename(missing, target);
//expected an exception
if (!renameReturnsFalseOnFailure) {
String destDirLS = generateAndLogErrorListing(missing, target);
fail("expected rename(" + missing + ", " + target + " ) to fail," +
" got a result of " + renamed
+ " and a destination directory of " + destDirLS);
} else {
// at least one FS only returns false here, if that is the case
// warn but continue
getLogger().warn("Rename returned {} renaming a nonexistent file", renamed);
assertFalse(renamed, "Renaming a missing file returned true");
}
} catch (FileNotFoundException e) {
if (renameReturnsFalseOnFailure) {
ContractTestUtils.fail(
"Renaming a missing file unexpectedly threw an exception", e);
}
handleExpectedException(e);
} catch (IOException e) {
handleRelaxedException("rename nonexistent file",
"FileNotFoundException",
e);
}
assertPathDoesNotExist("rename nonexistent file created a destination file",
target);
}
/**
* Rename test -handles filesystems that will overwrite the destination
* as well as those that do not (i.e. HDFS).
* @throws Throwable
*/
@Test
public void testRenameFileOverExistingFile() throws Throwable {
describe("Verify renaming a file onto an existing file matches expectations");
Path srcFile = path("source-256.txt");
byte[] srcData = dataset(256, 'a', 'z');
writeDataset(getFileSystem(), srcFile, srcData, srcData.length, 1024, false);
Path destFile = path("dest-512.txt");
byte[] destData = dataset(512, 'A', 'Z');
writeDataset(getFileSystem(), destFile, destData, destData.length, 1024, false);
assertIsFile(destFile);
boolean renameOverwritesDest = isSupported(RENAME_OVERWRITES_DEST);
boolean renameReturnsFalseOnRenameDestExists =
isSupported(RENAME_RETURNS_FALSE_IF_DEST_EXISTS);
assertFalse(renameOverwritesDest && renameReturnsFalseOnRenameDestExists,
RENAME_OVERWRITES_DEST + " and " +
RENAME_RETURNS_FALSE_IF_DEST_EXISTS + " cannot be both supported");
String expectedTo = "expected rename(" + srcFile + ", " + destFile + ") to ";
boolean destUnchanged = true;
try {
// rename is rejected by returning 'false' or throwing an exception
boolean renamed = rename(srcFile, destFile);
destUnchanged = !renamed;
if (renameOverwritesDest) {
assertTrue(renamed, expectedTo + "overwrite destination, but got false");
} else if (renameReturnsFalseOnRenameDestExists) {
assertFalse(renamed, expectedTo + "be rejected with false, but destination " +
"was overwritten");
} else if (renamed) {
String destDirLS = generateAndLogErrorListing(srcFile, destFile);
getLogger().error("dest dir {}", destDirLS);
fail(expectedTo + "be rejected with exception, but got overwritten");
} else {
fail(expectedTo + "be rejected with exception, but got false");
}
} catch (FileAlreadyExistsException e) {
// rename(file, file2) should throw exception iff
// it neither overwrites nor returns false
assertFalse(renameOverwritesDest,
expectedTo + "overwrite destination, but got exception");
assertFalse(renameReturnsFalseOnRenameDestExists,
expectedTo + "be rejected with false, but got exception");
handleExpectedException(e);
}
// verify that the destination file is as expected based on the expected
// outcome
verifyFileContents(getFileSystem(), destFile,
destUnchanged ? destData: srcData);
}
@Test
public void testRenameDirIntoExistingDir() throws Throwable {
describe("Verify renaming a dir into an existing dir puts it"
+ " underneath"
+" and leaves existing files alone");
FileSystem fs = getFileSystem();
String sourceSubdir = "source";
Path srcDir = path(sourceSubdir);
Path srcFilePath = new Path(srcDir, "source-256.txt");
byte[] srcDataset = dataset(256, 'a', 'z');
writeDataset(fs, srcFilePath, srcDataset, srcDataset.length, 1024, false);
Path destDir = path("dest");
Path destFilePath = new Path(destDir, "dest-512.txt");
byte[] destData = dataset(512, 'A', 'Z');
writeDataset(fs, destFilePath, destData, destData.length, 1024, false);
assertIsFile(destFilePath);
boolean rename = rename(srcDir, destDir);
Path renamedSrc = new Path(destDir, sourceSubdir);
assertIsFile(destFilePath);
assertIsDirectory(renamedSrc);
verifyFileContents(fs, destFilePath, destData);
assertTrue(rename, "rename returned false though the contents were copied");
}
@Test
public void testRenameFileNonexistentDir() throws Throwable {
describe("rename a file into a new file in the same directory");
Path renameSrc = path("testRenameSrc");
Path renameTarget = path("subdir/testRenameTarget");
byte[] data = dataset(256, 'a', 'z');
writeDataset(getFileSystem(), renameSrc, data, data.length, 1024 * 1024,
false);
boolean renameCreatesDestDirs = isSupported(RENAME_CREATES_DEST_DIRS);
try {
boolean rename = rename(renameSrc, renameTarget);
if (renameCreatesDestDirs) {
assertTrue(rename);
verifyFileContents(getFileSystem(), renameTarget, data);
} else {
assertFalse(rename);
verifyFileContents(getFileSystem(), renameSrc, data);
}
} catch (FileNotFoundException e) {
// allowed unless that rename flag is set
assertFalse(renameCreatesDestDirs);
}
}
@Test
public void testRenameWithNonEmptySubDir() throws Throwable {
final Path renameTestDir = path("testRenameWithNonEmptySubDir");
final Path srcDir = new Path(renameTestDir, "src1");
final Path srcSubDir = new Path(srcDir, "sub");
final Path finalDir = new Path(renameTestDir, "dest");
FileSystem fs = getFileSystem();
boolean renameRemoveEmptyDest = isSupported(RENAME_REMOVE_DEST_IF_EMPTY_DIR);
rm(fs, renameTestDir, true, false);
fs.mkdirs(srcDir);
fs.mkdirs(finalDir);
writeTextFile(fs, new Path(srcDir, "source.txt"),
"this is the file in src dir", false);
writeTextFile(fs, new Path(srcSubDir, "subfile.txt"),
"this is the file in src/sub dir", false);
assertPathExists("not created in src dir",
new Path(srcDir, "source.txt"));
assertPathExists("not created in src/sub dir",
new Path(srcSubDir, "subfile.txt"));
rename(srcDir, finalDir);
// Accept both POSIX rename behavior and CLI rename behavior
if (renameRemoveEmptyDest) {
// POSIX rename behavior
assertPathExists("not renamed into dest dir",
new Path(finalDir, "source.txt"));
assertPathExists("not renamed into dest/sub dir",
new Path(finalDir, "sub/subfile.txt"));
} else {
// CLI rename behavior
assertPathExists("not renamed into dest dir",
new Path(finalDir, "src1/source.txt"));
assertPathExists("not renamed into dest/sub dir",
new Path(finalDir, "src1/sub/subfile.txt"));
}
assertPathDoesNotExist("not deleted",
new Path(srcDir, "source.txt"));
}
/**
* Test that after renaming, the nested subdirectory is moved along with all
* its ancestors.
*/
@Test
public void testRenamePopulatesDirectoryAncestors() throws IOException {
final FileSystem fs = getFileSystem();
final Path src = path("testRenamePopulatesDirectoryAncestors/source");
fs.mkdirs(src);
final String nestedDir = "/dir1/dir2/dir3/dir4";
fs.mkdirs(path(src + nestedDir));
Path dst = path("testRenamePopulatesDirectoryAncestorsNew");
fs.rename(src, dst);
validateAncestorsMoved(src, dst, nestedDir);
}
/**
* Test that after renaming, the nested file is moved along with all its
* ancestors. It is similar to {@link #testRenamePopulatesDirectoryAncestors}.
*/
@Test
public void testRenamePopulatesFileAncestors() throws IOException {
final FileSystem fs = getFileSystem();
final Path src = path("testRenamePopulatesFileAncestors/source");
fs.mkdirs(src);
final String nestedFile = "/dir1/dir2/dir3/file4";
byte[] srcDataset = dataset(256, 'a', 'z');
writeDataset(fs, path(src + nestedFile), srcDataset, srcDataset.length,
1024, false);
Path dst = path("testRenamePopulatesFileAncestorsNew");
fs.rename(src, dst);
validateAncestorsMoved(src, dst, nestedFile);
}
/**
* Validate that the nested path and its ancestors should have been moved.
*
* @param src the source root to move
* @param dst the destination root to move
* @param nestedPath the nested path to move
*/
protected void validateAncestorsMoved(Path src, Path dst, String nestedPath)
throws IOException {
assertIsDirectory(dst);
assertPathDoesNotExist("src path should not exist", path(src + nestedPath));
assertPathExists("dst path should exist", path(dst + nestedPath));
Path path = new Path(nestedPath).getParent();
while (path != null && !path.isRoot()) {
final Path parentSrc = path(src + path.toString());
assertPathDoesNotExist(parentSrc + " is not deleted", parentSrc);
final Path parentDst = path(dst + path.toString());
assertPathExists(parentDst + " should exist after rename", parentDst);
assertIsDirectory(parentDst);
path = path.getParent();
}
}
@Test
public void testRenameFileUnderFile() throws Exception {
String action = "rename directly under file";
describe(action);
Path base = methodPath();
Path grandparent = new Path(base, "file");
expectRenameUnderFileFails(action,
grandparent,
new Path(base, "testRenameSrc"),
new Path(grandparent, "testRenameTarget"));
}
@Test
public void testRenameFileUnderFileSubdir() throws Exception {
String action = "rename directly under file/subdir";
describe(action);
Path base = methodPath();
Path grandparent = new Path(base, "file");
Path parent = new Path(grandparent, "parent");
expectRenameUnderFileFails(action,
grandparent,
new Path(base, "testRenameSrc"),
new Path(parent, "testRenameTarget"));
}
protected void expectRenameUnderFileFails(String action,
Path file, Path renameSrc, Path renameTarget)
throws Exception {
byte[] data = dataset(256, 'a', 'z');
FileSystem fs = getFileSystem();
writeDataset(fs, file, data, data.length, 1024 * 1024,
true);
writeDataset(fs, renameSrc, data, data.length, 1024 * 1024,
true);
String outcome;
boolean renamed;
try {
renamed = rename(renameSrc, renameTarget);
outcome = action + ": rename (" + renameSrc + ", " + renameTarget
+ ")= " + renamed;
} catch (IOException e) {
// raw local raises an exception here
renamed = false;
outcome = "rename raised an exception: " + e;
}
assertPathDoesNotExist("after " + outcome, renameTarget);
assertFalse(renamed, outcome);
assertPathExists(action, renameSrc);
}
}
| AbstractContractRenameTest |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/RangeUnitTests.java | {
"start": 395,
"end": 3361
} | class ____ {
@Test
void unbounded() {
Range<Object> unbounded = Range.unbounded();
assertThat(unbounded.getLower().isIncluding()).isTrue();
assertThat(unbounded.getLower().getValue()).isNull();
assertThat(unbounded.getLower().isUnbounded()).isTrue();
assertThat(unbounded.getUpper().isIncluding()).isTrue();
assertThat(unbounded.getUpper().getValue()).isNull();
assertThat(unbounded.getUpper().isUnbounded()).isTrue();
assertThat(unbounded.isUnbounded()).isTrue();
}
@Test
void createIncluded() {
Range<Object> range = Range.create("ze", "ro");
assertThat(range.getLower().isIncluding()).isTrue();
assertThat(range.getLower().getValue()).isEqualTo("ze");
assertThat(range.getLower().isBounded()).isTrue();
assertThat(range.getUpper().isIncluding()).isTrue();
assertThat(range.getUpper().getValue()).isEqualTo("ro");
assertThat(range.getUpper().isBounded()).isTrue();
assertThat(range.isUnbounded()).isFalse();
}
@Test
void fromBoundaries() {
Range<Object> range = Range.from(including("ze"), excluding("ro"));
assertThat(range.getLower().isIncluding()).isTrue();
assertThat(range.getLower().getValue()).isEqualTo("ze");
assertThat(range.getUpper().isIncluding()).isFalse();
assertThat(range.getUpper().getValue()).isEqualTo("ro");
}
@Test
void greater() {
Range<Object> gt = Range.unbounded().gt("zero");
assertThat(gt.getLower().isIncluding()).isFalse();
assertThat(gt.getLower().getValue()).isEqualTo("zero");
assertThat(gt.getUpper().isIncluding()).isTrue();
assertThat(gt.getUpper().getValue()).isNull();
}
@Test
void greaterOrEquals() {
Range<Object> gte = Range.unbounded().gte("zero");
assertThat(gte.getLower().isIncluding()).isTrue();
assertThat(gte.getLower().getValue()).isEqualTo("zero");
assertThat(gte.getUpper().isIncluding()).isTrue();
assertThat(gte.getUpper().getValue()).isNull();
}
@Test
void less() {
Range<Object> lt = Range.unbounded().lt("zero");
assertThat(lt.getLower().isIncluding()).isTrue();
assertThat(lt.getLower().getValue()).isNull();
assertThat(lt.getUpper().isIncluding()).isFalse();
assertThat(lt.getUpper().getValue()).isEqualTo("zero");
assertThat(lt.toString()).isEqualTo("Range [[unbounded] to (zero]");
}
@Test
void lessOrEquals() {
Range<Object> lte = Range.unbounded().lte("zero");
assertThat(lte.getLower().isIncluding()).isTrue();
assertThat(lte.getLower().getValue()).isNull();
assertThat(lte.getUpper().isIncluding()).isTrue();
assertThat(lte.getUpper().getValue()).isEqualTo("zero");
assertThat(lte.toString()).isEqualTo("Range [[unbounded] to [zero]");
}
}
| RangeUnitTests |
java | quarkusio__quarkus | independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/workspace/WorkspaceModule.java | {
"start": 310,
"end": 1685
} | interface ____ {
static Mutable builder() {
return DefaultWorkspaceModule.builder();
}
WorkspaceModuleId getId();
File getModuleDir();
File getBuildDir();
Collection<String> getSourceClassifiers();
boolean hasSources(String classifier);
ArtifactSources getSources(String classifier);
default boolean hasMainSources() {
return hasSources(ArtifactSources.MAIN);
}
default boolean hasTestSources() {
return hasSources(ArtifactSources.TEST);
}
default ArtifactSources getMainSources() {
return getSources(ArtifactSources.MAIN);
}
default ArtifactSources getTestSources() {
return getSources(ArtifactSources.TEST);
}
PathCollection getBuildFiles();
default PathTree getContentTree(String classifier) {
final ArtifactSources artifactSources = getSources(classifier);
return artifactSources == null || !artifactSources.isOutputAvailable() ? EmptyPathTree.getInstance()
: artifactSources.getOutputTree();
}
Collection<Dependency> getDirectDependencyConstraints();
Collection<Dependency> getDirectDependencies();
Collection<String> getTestClasspathDependencyExclusions();
Collection<String> getAdditionalTestClasspathElements();
WorkspaceModule getParent();
Mutable mutable();
| WorkspaceModule |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/mixins/MixinInheritanceTest.java | {
"start": 488,
"end": 608
} | class ____ extends BeanoMixinSuper {
@JSONField(name = "id")
public int ido;
}
static | BeanoMixinSub |
java | apache__dubbo | dubbo-metrics/dubbo-tracing/src/main/java/org/apache/dubbo/tracing/utils/ObservationSupportUtil.java | {
"start": 900,
"end": 2283
} | class ____ {
public static boolean isSupportObservation() {
return isClassPresent("io.micrometer.observation.Observation")
&& isClassPresent("io.micrometer.observation.ObservationRegistry")
&& isClassPresent("io.micrometer.observation.ObservationHandler");
}
public static boolean isSupportTracing() {
return isClassPresent("io.micrometer.tracing.Tracer")
&& isClassPresent("io.micrometer.tracing.propagation.Propagator");
}
public static boolean isSupportOTelTracer() {
return isClassPresent("io.micrometer.tracing.otel.bridge.OtelTracer")
&& isClassPresent("io.opentelemetry.sdk.trace.SdkTracerProvider")
&& isClassPresent("io.opentelemetry.api.OpenTelemetry");
}
public static boolean isSupportBraveTracer() {
return isClassPresent("io.micrometer.tracing.Tracer")
&& isClassPresent("io.micrometer.tracing.brave.bridge.BraveTracer")
&& isClassPresent("brave.Tracing");
}
public static boolean isSupportBraveURLSender() {
return isClassPresent("zipkin2.reporter.urlconnection.URLConnectionSender");
}
private static boolean isClassPresent(String className) {
return ClassUtils.isPresent(className, ObservationSupportUtil.class.getClassLoader());
}
}
| ObservationSupportUtil |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/jaxb/internal/stax/FilteringXMLEventReader.java | {
"start": 496,
"end": 1023
} | class ____ {@link XMLEventReader}s that want to modify or remove events from the reader stream.
* If a {@link StartElement} event is removed the subclass's {@link #filterEvent(XMLEvent, boolean)} will
* not see any events until after the matching {@link EndElement} event.
*
* Note, copied from the uPortal project by permission of author. See
* https://github.com/Jasig/uPortal/blob/master/uportal-war/src/main/java/org/jasig/portal/xml/stream/FilteringXMLEventReader.java
*
* @author Eric Dalquist
*/
public abstract | for |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/Long2DArraysBaseTest.java | {
"start": 888,
"end": 1106
} | class ____ testing <code>{@link Long2DArrays}</code>.
* <p>
* Is in <code>org.assertj.core.internal</code> package to be able to set {@link Long2DArrays#failures} appropriately.
*
* @author Maciej Wajcht
*/
public | for |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet2/Hamlet.java | {
"start": 429430,
"end": 430046
} | class ____<T extends __> extends EImp<T> implements HamletSpec.BR {
public BR(String name, T parent, EnumSet<EOpt> opts) {
super(name, parent, opts);
}
@Override
public BR<T> $id(String value) {
addAttr("id", value);
return this;
}
@Override
public BR<T> $class(String value) {
addAttr("class", value);
return this;
}
@Override
public BR<T> $title(String value) {
addAttr("title", value);
return this;
}
@Override
public BR<T> $style(String value) {
addAttr("style", value);
return this;
}
}
public | BR |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableEnumCheckerTest.java | {
"start": 3978,
"end": 4357
} | enum ____ implements MyInterface {
ONE {
public void bar() {}
},
TWO {
public void bar() {}
}
}
""")
.doTest();
}
@Test
public void mutableFieldType() {
compilationHelper
.addSourceLines(
"Foo.java",
"""
| Enum |
java | qos-ch__slf4j | jcl-over-slf4j/src/main/java/org/apache/commons/logging/LogFactory.java | {
"start": 8939,
"end": 9292
} | class ____ the key being the
* system property defined above.</li>
* <li>Fall back to a default implementation class (
* <code>org.apache.commons.logging.impl.SLF4FLogFactory</code>).</li>
* </ul>
*
* <p>
* <em>NOTE</em>- If the properties file method of identifying the
* <code>LogFactory</code> implementation | with |
java | bumptech__glide | library/test/src/test/java/com/bumptech/glide/manager/Issue117Activity.java | {
"start": 1531,
"end": 1787
} | class ____ extends Fragment {
@Override
public View onCreateView(
@NonNull LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return new Issue117ImageView(getActivity());
}
}
public static | Issue117Fragment |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/annotation/DiscoveredOperationMethod.java | {
"start": 1274,
"end": 2701
} | class ____ extends OperationMethod {
private final List<String> producesMediaTypes;
public DiscoveredOperationMethod(Method method, OperationType operationType,
AnnotationAttributes annotationAttributes) {
super(method, operationType);
Assert.notNull(annotationAttributes, "'annotationAttributes' must not be null");
List<String> producesMediaTypes = new ArrayList<>();
producesMediaTypes.addAll(Arrays.asList(annotationAttributes.getStringArray("produces")));
producesMediaTypes.addAll(getProducesFromProducible(annotationAttributes));
this.producesMediaTypes = Collections.unmodifiableList(producesMediaTypes);
}
private <E extends Enum<E> & Producible<E>> List<String> getProducesFromProducible(
AnnotationAttributes annotationAttributes) {
Class<?> type = getProducesFrom(annotationAttributes);
if (type == Producible.class) {
return Collections.emptyList();
}
List<String> produces = new ArrayList<>();
for (Object value : type.getEnumConstants()) {
produces.add(((Producible<?>) value).getProducedMimeType().toString());
}
return produces;
}
private Class<?> getProducesFrom(AnnotationAttributes annotationAttributes) {
try {
return annotationAttributes.getClass("producesFrom");
}
catch (IllegalArgumentException ex) {
return Producible.class;
}
}
public List<String> getProducesMediaTypes() {
return this.producesMediaTypes;
}
}
| DiscoveredOperationMethod |
java | spring-projects__spring-boot | module/spring-boot-jms/src/test/java/org/springframework/boot/jms/ConnectionFactoryUnwrapperTests.java | {
"start": 1329,
"end": 3001
} | class ____ {
@Test
void unwrapWithSingleConnectionFactory() {
ConnectionFactory connectionFactory = new SingleConnectionFactory();
assertThat(unwrapCaching(connectionFactory)).isSameAs(connectionFactory);
}
@Test
void unwrapWithConnectionFactory() {
ConnectionFactory connectionFactory = mock(ConnectionFactory.class);
assertThat(unwrapCaching(connectionFactory)).isSameAs(connectionFactory);
}
@Test
void unwrapWithCachingConnectionFactory() {
ConnectionFactory connectionFactory = mock(ConnectionFactory.class);
assertThat(unwrapCaching(new CachingConnectionFactory(connectionFactory))).isSameAs(connectionFactory);
}
@Test
void unwrapWithNestedCachingConnectionFactories() {
ConnectionFactory connectionFactory = mock(ConnectionFactory.class);
CachingConnectionFactory firstCachingConnectionFactory = new CachingConnectionFactory(connectionFactory);
CachingConnectionFactory secondCachingConnectionFactory = new CachingConnectionFactory(
firstCachingConnectionFactory);
assertThat(unwrapCaching(secondCachingConnectionFactory)).isSameAs(connectionFactory);
}
@Test
void unwrapWithJmsPoolConnectionFactory() {
ConnectionFactory connectionFactory = mock(ConnectionFactory.class);
JmsPoolConnectionFactory poolConnectionFactory = new JmsPoolConnectionFactory();
poolConnectionFactory.setConnectionFactory(connectionFactory);
assertThat(unwrapCaching(poolConnectionFactory)).isSameAs(poolConnectionFactory);
}
private ConnectionFactory unwrapCaching(ConnectionFactory connectionFactory) {
return ConnectionFactoryUnwrapper.unwrapCaching(connectionFactory);
}
}
@Nested
| UnwrapCaching |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/requests/AlterConfigsResponse.java | {
"start": 1176,
"end": 2523
} | class ____ extends AbstractResponse {
private final AlterConfigsResponseData data;
public AlterConfigsResponse(AlterConfigsResponseData data) {
super(ApiKeys.ALTER_CONFIGS);
this.data = data;
}
public Map<ConfigResource, ApiError> errors() {
return data.responses().stream().collect(Collectors.toMap(
response -> new ConfigResource(
ConfigResource.Type.forId(response.resourceType()),
response.resourceName()),
response -> new ApiError(Errors.forCode(response.errorCode()), response.errorMessage())
));
}
@Override
public Map<Errors, Integer> errorCounts() {
return apiErrorCounts(errors());
}
@Override
public int throttleTimeMs() {
return data.throttleTimeMs();
}
@Override
public void maybeSetThrottleTimeMs(int throttleTimeMs) {
data.setThrottleTimeMs(throttleTimeMs);
}
@Override
public AlterConfigsResponseData data() {
return data;
}
public static AlterConfigsResponse parse(Readable readable, short version) {
return new AlterConfigsResponse(new AlterConfigsResponseData(readable, version));
}
@Override
public boolean shouldClientThrottle(short version) {
return version >= 1;
}
}
| AlterConfigsResponse |
java | apache__hadoop | hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSSTAT3Response.java | {
"start": 1077,
"end": 4717
} | class ____ extends NFS3Response {
private Nfs3FileAttributes postOpAttr;
// The total size, in bytes, of the file system.
private final long tbytes;
// The amount of free space, in bytes, in the file system.
private final long fbytes;
/*
* The amount of free space, in bytes, available to the user identified by the
* authentication information in the RPC. (This reflects space that is
* reserved by the file system; it does not reflect any quota system
* implemented by the server.)
*/
private final long abytes;
/*
* The total number of file slots in the file system. (On a UNIX server, this
* often corresponds to the number of inodes configured.)
*/
private final long tfiles;
/* The number of free file slots in the file system. */
private final long ffiles;
/*
* The number of free file slots that are available to the user corresponding
* to the authentication information in the RPC. (This reflects slots that are
* reserved by the file system; it does not reflect any quota system
* implemented by the server.)
*/
private final long afiles;
/*
* A measure of file system volatility: this is the number of seconds for
* which the file system is not expected to change. For a volatile, frequently
* updated file system, this will be 0. For an immutable file system, such as
* a CD-ROM, this would be the largest unsigned integer. For file systems that
* are infrequently modified, for example, one containing local executable
* programs and on-line documentation, a value corresponding to a few hours or
* days might be used. The client may use this as a hint in tuning its cache
* management. Note however, this measure is assumed to be dynamic and may
* change at any time.
*/
private final int invarsec;
public FSSTAT3Response(int status) {
this(status, null, 0, 0, 0, 0, 0, 0, 0);
}
public FSSTAT3Response(int status, Nfs3FileAttributes postOpAttr,
long tbytes, long fbytes, long abytes, long tfiles, long ffiles,
long afiles, int invarsec) {
super(status);
this.postOpAttr = postOpAttr;
this.tbytes = tbytes;
this.fbytes = fbytes;
this.abytes = abytes;
this.tfiles = tfiles;
this.ffiles = ffiles;
this.afiles = afiles;
this.invarsec = invarsec;
}
public static FSSTAT3Response deserialize(XDR xdr) {
int status = xdr.readInt();
xdr.readBoolean();
Nfs3FileAttributes postOpAttr = Nfs3FileAttributes.deserialize(xdr);
long tbytes = 0;
long fbytes = 0;
long abytes = 0;
long tfiles = 0;
long ffiles = 0;
long afiles = 0;
int invarsec = 0;
if (status == Nfs3Status.NFS3_OK) {
tbytes = xdr.readHyper();
fbytes = xdr.readHyper();
abytes = xdr.readHyper();
tfiles = xdr.readHyper();
ffiles = xdr.readHyper();
afiles = xdr.readHyper();
invarsec = xdr.readInt();
}
return new FSSTAT3Response(status, postOpAttr, tbytes, fbytes, abytes,
tfiles, ffiles, afiles, invarsec);
}
@Override
public XDR serialize(XDR out, int xid, Verifier verifier) {
super.serialize(out, xid, verifier);
out.writeBoolean(true);
if (postOpAttr == null) {
postOpAttr = new Nfs3FileAttributes();
}
postOpAttr.serialize(out);
if (getStatus() == Nfs3Status.NFS3_OK) {
out.writeLongAsHyper(tbytes);
out.writeLongAsHyper(fbytes);
out.writeLongAsHyper(abytes);
out.writeLongAsHyper(tfiles);
out.writeLongAsHyper(ffiles);
out.writeLongAsHyper(afiles);
out.writeInt(invarsec);
}
return out;
}
}
| FSSTAT3Response |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/support/JpaMetamodelEntityInformation.java | {
"start": 3493,
"end": 9692
} | class ____ not contain an id attribute");
}
this.idMetadata = new IdMetadata<>(identifiableType, PersistenceProvider.fromMetamodel(metamodel));
this.versionAttribute = findVersionAttribute(identifiableType, metamodel);
Assert.notNull(persistenceUnitUtil, "PersistenceUnitUtil must not be null");
this.persistenceUnitUtil = persistenceUnitUtil;
}
/**
* Creates a new {@link JpaMetamodelEntityInformation} for the given {@link Metamodel}.
*
* @param entityType must not be {@literal null}.
* @param metamodel must not be {@literal null}.
* @param persistenceUnitUtil must not be {@literal null}.
* @since 4.0
*/
JpaMetamodelEntityInformation(EntityType<T> entityType, Metamodel metamodel,
PersistenceUnitUtil persistenceUnitUtil) {
super(new JpaMetamodelEntityMetadata<>(entityType));
this.metamodel = metamodel;
this.entityName = entityType.getName();
this.idMetadata = new IdMetadata<>(entityType, PersistenceProvider.fromMetamodel(metamodel));
this.versionAttribute = findVersionAttribute(entityType, metamodel);
Assert.notNull(persistenceUnitUtil, "PersistenceUnitUtil must not be null");
this.persistenceUnitUtil = persistenceUnitUtil;
}
@Override
public String getEntityName() {
return entityName != null ? entityName : super.getEntityName();
}
/**
* Returns the version attribute of the given {@link ManagedType} or {@literal null} if none available.
*
* @param type must not be {@literal null}.
* @param metamodel must not be {@literal null}.
*/
@SuppressWarnings("unchecked")
private static <T> Optional<SingularAttribute<? super T, ?>> findVersionAttribute(IdentifiableType<T> type,
Metamodel metamodel) {
try {
return Optional.ofNullable(type.getVersion(Object.class));
} catch (IllegalArgumentException o_O) {
// Needs workarounds as the method is implemented with a strict type check on e.g. Hibernate < 4.3
}
Set<SingularAttribute<? super T, ?>> attributes = type.getSingularAttributes();
for (SingularAttribute<? super T, ?> attribute : attributes) {
if (attribute.isVersion()) {
return Optional.of(attribute);
}
}
Class<?> superType = type.getJavaType().getSuperclass();
if (!JpaMetamodel.of(metamodel).isJpaManaged(superType)) {
return Optional.empty();
}
ManagedType<?> managedSuperType = metamodel.managedType(superType);
if (!(managedSuperType instanceof IdentifiableType)) {
return Optional.empty();
}
return findVersionAttribute((IdentifiableType<T>) managedSuperType, metamodel);
}
@Override
@SuppressWarnings("unchecked")
public @Nullable ID getId(T entity) {
// check if this is a proxy. If so use Proxy mechanics to access the id.
PersistenceProvider persistenceProvider = PersistenceProvider.fromMetamodel(metamodel);
if (persistenceProvider.shouldUseAccessorFor(entity)) {
return (ID) persistenceProvider.getIdentifierFrom(entity);
}
// If it's a simple type, then immediately delegate to the provider
if (idMetadata.hasSimpleId()) {
if (entity instanceof Tuple t) {
return (ID) t.get(idMetadata.getSimpleIdAttribute().getName());
}
if (getJavaType().isInstance(entity)) {
return (ID) persistenceUnitUtil.getIdentifier(entity);
}
}
// otherwise, check if the complex id type has any partially filled fields
BeanWrapper entityWrapper = new DirectFieldAccessFallbackBeanWrapper(entity);
boolean partialIdValueFound = false;
for (SingularAttribute<? super T, ?> attribute : idMetadata) {
Object propertyValue = entityWrapper.getPropertyValue(attribute.getName());
if (idMetadata.hasSimpleId()) {
return (ID) propertyValue;
}
if (propertyValue != null) {
partialIdValueFound = true;
}
}
return partialIdValueFound ? (ID) persistenceUnitUtil.getIdentifier(entity) : null;
}
@Override
@SuppressWarnings("unchecked")
public Class<ID> getIdType() {
return (Class<ID>) idMetadata.getType();
}
@Override
public SingularAttribute<? super T, ?> getIdAttribute() {
return idMetadata.getSimpleIdAttribute();
}
@Override
public boolean hasCompositeId() {
return !idMetadata.hasSimpleId();
}
@Override
public Collection<String> getIdAttributeNames() {
List<String> attributeNames = new ArrayList<>(idMetadata.attributes.size());
for (SingularAttribute<? super T, ?> attribute : idMetadata.attributes) {
attributeNames.add(attribute.getName());
}
return attributeNames;
}
@Override
public @Nullable Object getCompositeIdAttributeValue(Object id, String idAttribute) {
Assert.isTrue(hasCompositeId(), "Model must have a composite Id");
return new DirectFieldAccessFallbackBeanWrapper(id).getPropertyValue(idAttribute);
}
@Override
public boolean isNew(T entity) {
if (versionAttribute.isEmpty()
|| versionAttribute.map(Attribute::getJavaType).map(Class::isPrimitive).orElse(false)) {
return super.isNew(entity);
}
BeanWrapper wrapper = new DirectFieldAccessFallbackBeanWrapper(entity);
return versionAttribute.map(it -> wrapper.getPropertyValue(it.getName()) == null).orElse(true);
}
@Override
public Map<String, Object> getKeyset(Iterable<String> propertyPaths, T entity) {
Function<String, Object> getter = getPropertyValueFunction(entity);
Map<String, Object> keyset = new LinkedHashMap<>();
if (hasCompositeId()) {
for (String idAttributeName : getIdAttributeNames()) {
keyset.put(idAttributeName, getter.apply(idAttributeName));
}
} else {
keyset.put(getIdAttribute().getName(), getId(entity));
}
for (String propertyPath : propertyPaths) {
keyset.put(propertyPath, getter.apply(propertyPath));
}
return keyset;
}
private Function<String, Object> getPropertyValueFunction(Object entity) {
if (entity instanceof Tuple t) {
return t::get;
}
// TODO: Proxy handling requires more elaborate refactoring, see
// https://github.com/spring-projects/spring-data-jpa/issues/2784
BeanWrapper entityWrapper = new DirectFieldAccessFallbackBeanWrapper(entity);
return entityWrapper::getPropertyValue;
}
/**
* Simple value object to encapsulate id specific metadata.
*
* @author Oliver Gierke
* @author Thomas Darimont
*/
private static | does |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryDefaultInEnumSwitchTest.java | {
"start": 5726,
"end": 6370
} | enum ____ {
ONE,
TWO,
THREE,
UNRECOGNIZED
}
boolean m(Case c) {
switch (c) {
case ONE:
case TWO:
case THREE:
return true;
case UNRECOGNIZED:
// continue below
}
return false;
}
}
""")
.doTest();
}
@Test
public void defaultBreak() {
refactoringTestHelper
.addInputLines(
"in/Test.java",
"""
| Case |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockTokenWrappingQOP.java | {
"start": 2116,
"end": 7222
} | class ____ extends SaslDataTransferTestCase {
public static final Logger LOG = LoggerFactory.getLogger(TestPermission.class);
private HdfsConfiguration conf;
private MiniDFSCluster cluster;
private DistributedFileSystem dfs;
private String configKey;
private String qopValue;
public static Collection<Object[]> qopSettings() {
// if configured with privacy, the negotiated QOP should auth-conf
// similarly for the other two
return Arrays.asList(new Object[][] {
{"privacy", "auth-conf"},
{"integrity", "auth-int"},
{"authentication", "auth"}
});
}
public void initTestBlockTokenWrappingQOP(String pconfigKey, String pqopValue)
throws Exception {
this.configKey = pconfigKey;
this.qopValue = pqopValue;
setup();
}
public void setup() throws Exception {
conf = createSecureConfig(this.configKey);
conf.set(DFS_NAMENODE_RPC_ADDRESS_AUXILIARY_KEY, "12000");
// explicitly setting service rpc for datanode. This because
// DFSUtil.getNNServiceRpcAddressesForCluster looks up client facing port
// and service port at the same time, and if no setting for service
// rpc, it would return client port, in this case, it will be the
// auxiliary port for data node. Which is not what auxiliary is for.
// setting service rpc port to avoid this.
conf.set(DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, "localhost:9020");
conf.set(
CommonConfigurationKeys.HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS,
"org.apache.hadoop.security.IngressPortBasedResolver");
conf.set("ingress.port.sasl.configured.ports", "12000");
conf.set("ingress.port.sasl.prop.12000", this.configKey);
conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
conf.setBoolean(DFS_NAMENODE_SEND_QOP_ENABLED, true);
conf.set(HADOOP_RPC_PROTECTION, this.configKey);
cluster = null;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
cluster.waitActive();
HdfsConfiguration clientConf = new HdfsConfiguration(conf);
clientConf.unset(
CommonConfigurationKeys.HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS);
URI currentURI = cluster.getURI();
URI uriAuxiliary = new URI(currentURI.getScheme() +
"://" + currentURI.getHost() + ":12000");
dfs = (DistributedFileSystem) FileSystem.get(uriAuxiliary, conf);
}
@AfterEach
public void tearDown() throws Exception {
if (cluster != null) {
cluster.shutdown();
}
}
@MethodSource("qopSettings")
@ParameterizedTest
public void testAddBlockWrappingQOP(String pconfigKey, String pqopValue) throws Exception {
initTestBlockTokenWrappingQOP(pconfigKey, pqopValue);
final String src = "/testAddBlockWrappingQOP";
final Path path = new Path(src);
dfs.create(path);
DFSClient client = dfs.getClient();
String clientName = client.getClientName();
LocatedBlock lb = client.namenode.addBlock(src, clientName, null, null,
HdfsConstants.GRANDFATHER_INODE_ID, null, null);
byte[] secret = lb.getBlockToken().decodeIdentifier().getHandshakeMsg();
assertEquals(this.qopValue, new String(secret));
}
@MethodSource("qopSettings")
@ParameterizedTest
public void testAppendWrappingQOP(String pconfigKey, String pqopValue) throws Exception {
initTestBlockTokenWrappingQOP(pconfigKey, pqopValue);
final String src = "/testAppendWrappingQOP";
final Path path = new Path(src);
FSDataOutputStream out = dfs.create(path);
// NameNode append call returns a last block instance. If there is nothing
// it returns as a null. So write something, so that lastBlock has
// something
out.write(0);
out.close();
DFSClient client = dfs.getClient();
String clientName = client.getClientName();
LastBlockWithStatus lastBlock = client.namenode.append(src, clientName,
new EnumSetWritable<>(EnumSet.of(CreateFlag.APPEND)));
byte[] secret = lastBlock.getLastBlock().getBlockToken()
.decodeIdentifier().getHandshakeMsg();
assertEquals(this.qopValue, new String(secret));
}
@MethodSource("qopSettings")
@ParameterizedTest
public void testGetBlockLocationWrappingQOP(String pconfigKey, String pqopValue)
throws Exception {
initTestBlockTokenWrappingQOP(pconfigKey, pqopValue);
final String src = "/testGetBlockLocationWrappingQOP";
final Path path = new Path(src);
FSDataOutputStream out = dfs.create(path);
// if the file is empty, there will be no blocks returned. Write something
// so that getBlockLocations actually returns some block.
out.write(0);
out.close();
FileStatus status = dfs.getFileStatus(path);
DFSClient client = dfs.getClient();
LocatedBlocks lbs = client.namenode.getBlockLocations(
src, 0, status.getLen());
assertTrue(lbs.getLocatedBlocks().size() > 0);
for (LocatedBlock lb : lbs.getLocatedBlocks()) {
byte[] secret = lb.getBlockToken()
.decodeIdentifier().getHandshakeMsg();
assertEquals(this.qopValue, new String(secret));
}
}
}
| TestBlockTokenWrappingQOP |
java | grpc__grpc-java | api/src/test/java/io/grpc/ManagedChannelRegistryTest.java | {
"start": 5809,
"end": 5865
} | class ____ extends SocketAddress {
}
| SocketAddress1 |
java | quarkusio__quarkus | extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/JsonLogConfig.java | {
"start": 3799,
"end": 4006
} | enum ____ {
DEFAULT,
ECS,
GCP
}
}
/**
* Post additional fields. E.g. `fieldName1=value1,fieldName2=value2`.
*/
@ConfigGroup
public | LogFormat |
java | playframework__playframework | documentation/manual/working/javaGuide/main/application/code/javaguide/application/httpfilters/RoutedLoggingFilter.java | {
"start": 528,
"end": 1565
} | class ____ extends Filter {
private static final Logger log = LoggerFactory.getLogger(RoutedLoggingFilter.class);
@Inject
public RoutedLoggingFilter(Materializer mat) {
super(mat);
}
@Override
public CompletionStage<Result> apply(
Function<Http.RequestHeader, CompletionStage<Result>> nextFilter,
Http.RequestHeader requestHeader) {
long startTime = System.currentTimeMillis();
return nextFilter
.apply(requestHeader)
.thenApply(
result -> {
HandlerDef handlerDef = requestHeader.attrs().get(Router.Attrs.HANDLER_DEF);
String actionMethod = handlerDef.controller() + "." + handlerDef.method();
long endTime = System.currentTimeMillis();
long requestTime = endTime - startTime;
log.info("{} took {}ms and returned {}", actionMethod, requestTime, result.status());
return result.withHeader("Request-Time", "" + requestTime);
});
}
}
// #routing-info-access
| RoutedLoggingFilter |
java | apache__avro | lang/java/tools/src/main/java/org/apache/avro/tool/Util.java | {
"start": 1931,
"end": 10982
} | class ____ {
/**
* Returns stdin if filename is "-", else opens the File in the owning
* filesystem and returns an InputStream for it. Relative paths will be opened
* in the default filesystem.
*
* @param filename The filename to be opened
* @throws IOException
*/
static BufferedInputStream fileOrStdin(String filename, InputStream stdin) throws IOException {
return new BufferedInputStream(filename.equals("-") ? stdin : openFromFS(filename));
}
/**
* Returns stdout if filename is "-", else opens the file from the owning
* filesystem and returns an OutputStream for it. Relative paths will be opened
* in the default filesystem.
*
* @param filename The filename to be opened
* @throws IOException
*/
static BufferedOutputStream fileOrStdout(String filename, OutputStream stdout) throws IOException {
return new BufferedOutputStream(filename.equals("-") ? stdout : createFromFS(filename));
}
/**
* Returns an InputStream for the file using the owning filesystem, or the
* default if none is given.
*
* @param filename The filename to be opened
* @throws IOException
*/
static InputStream openFromFS(String filename) throws IOException {
Path p = new Path(filename);
return p.getFileSystem(new Configuration()).open(p);
}
/**
* Returns an InputStream for the file using the owning filesystem, or the
* default if none is given.
*
* @param filename The filename to be opened
* @throws IOException
*/
static InputStream openFromFS(Path filename) throws IOException {
return filename.getFileSystem(new Configuration()).open(filename);
}
/**
* Returns a seekable FsInput using the owning filesystem, or the default if
* none is given.
*
* @param filename The filename to be opened
* @throws IOException
*/
static FsInput openSeekableFromFS(String filename) throws IOException {
return new FsInput(new Path(filename), new Configuration());
}
/**
* Opens the file for writing in the owning filesystem, or the default if none
* is given.
*
* @param filename The filename to be opened.
* @return An OutputStream to the specified file.
* @throws IOException
*/
static OutputStream createFromFS(String filename) throws IOException {
Path p = new Path(filename);
return new BufferedOutputStream(p.getFileSystem(new Configuration()).create(p));
}
/**
* Closes the inputstream created from {@link Util.fileOrStdin} unless it is
* System.in.
*
* @param in The inputstream to be closed.
*/
static void close(InputStream in) {
if (!System.in.equals(in)) {
try {
in.close();
} catch (IOException e) {
System.err.println("could not close InputStream " + in.toString());
}
}
}
/**
* Closes the outputstream created from {@link Util.fileOrStdout} unless it is
* System.out.
*
* @param out The outputStream to be closed.
*/
static void close(OutputStream out) {
if (!System.out.equals(out)) {
try {
out.close();
} catch (IOException e) {
System.err.println("could not close OutputStream " + out.toString());
}
}
}
/**
* Parses a schema from the specified file.
*
* @param filename The file name to parse
* @return The parsed schema
* @throws IOException
*/
static Schema parseSchemaFromFS(String filename) throws IOException {
InputStream stream = openFromFS(filename);
try {
return new Schema.Parser().parse(stream);
} finally {
close(stream);
}
}
/**
* If pathname is a file, this method returns a list with a single absolute Path
* to that file. If pathname is a directory, this method returns a list of
* Pathes to all the files within this directory. Only files inside that
* directory are included, no subdirectories or files in subdirectories will be
* added. If pathname is a glob pattern, all files matching the pattern are
* included.
*
* The List is sorted alphabetically.
*
* @param fileOrDirName filename, directoryname or a glob pattern
* @return A Path List
* @throws IOException
*/
static List<Path> getFiles(String fileOrDirName) throws IOException {
List<Path> pathList = new ArrayList<>();
Path path = new Path(fileOrDirName);
FileSystem fs = path.getFileSystem(new Configuration());
if (fs.isFile(path)) {
pathList.add(path);
} else if (fs.isDirectory(path)) {
for (FileStatus status : fs.listStatus(path)) {
if (!status.isDirectory()) {
pathList.add(status.getPath());
}
}
} else {
FileStatus[] fileStatuses = fs.globStatus(path);
if (fileStatuses != null) {
for (FileStatus status : fileStatuses) {
pathList.add(status.getPath());
}
} else {
throw new FileNotFoundException(fileOrDirName);
}
}
Collections.sort(pathList);
return pathList;
}
/**
* Concatenate the result of {@link #getFiles(String)} applied to all file or
* directory names. The list is sorted alphabetically and contains no
* subdirectories or files within those.
*
* The list is sorted alphabetically.
*
* @param fileOrDirNames A list of filenames, directorynames or glob patterns
* @return A list of Paths, one for each file
* @throws IOException
*/
static List<Path> getFiles(List<String> fileOrDirNames) throws IOException {
ArrayList<Path> pathList = new ArrayList<>(fileOrDirNames.size());
for (String name : fileOrDirNames) {
pathList.addAll(getFiles(name));
}
Collections.sort(pathList);
return pathList;
}
/**
* Converts a String JSON object into a generic datum.
*
* This is inefficient (creates extra objects), so should be used sparingly.
*/
static Object jsonToGenericDatum(Schema schema, String jsonData) throws IOException {
GenericDatumReader<Object> reader = new GenericDatumReader<>(schema);
Object datum = reader.read(null, DecoderFactory.get().jsonDecoder(schema, jsonData));
return datum;
}
/** Reads and returns the first datum in a data file. */
static Object datumFromFile(Schema schema, String file) throws IOException {
try (DataFileReader<Object> in = new DataFileReader<>(new File(file), new GenericDatumReader<>(schema))) {
return in.next();
}
}
static OptionSpec<String> compressionCodecOption(OptionParser optParser) {
return optParser.accepts("codec", "Compression codec").withRequiredArg().ofType(String.class)
.defaultsTo(DEFLATE_CODEC);
}
static OptionSpec<String> compressionCodecOptionWithDefault(OptionParser optParser, String s) {
return optParser.accepts("codec", "Compression codec").withRequiredArg().ofType(String.class).defaultsTo(s);
}
static OptionSpec<Integer> compressionLevelOption(OptionParser optParser) {
return optParser.accepts("level", "Compression level (only applies to deflate, xz, and zstandard)")
.withRequiredArg().ofType(Integer.class).defaultsTo(Deflater.DEFAULT_COMPRESSION);
}
static CodecFactory codecFactory(OptionSet opts, OptionSpec<String> codec, OptionSpec<Integer> level) {
return codecFactory(opts, codec, level, DEFLATE_CODEC);
}
static CodecFactory codecFactory(OptionSet opts, OptionSpec<String> codec, OptionSpec<Integer> level,
String defaultCodec) {
String codecName = opts.hasArgument(codec) ? codec.value(opts) : defaultCodec;
if (codecName.equals(DEFLATE_CODEC)) {
return CodecFactory.deflateCodec(level.value(opts));
} else if (codecName.equals(DataFileConstants.XZ_CODEC)) {
return CodecFactory.xzCodec(level.value(opts));
} else if (codecName.equals(DataFileConstants.ZSTANDARD_CODEC)) {
return CodecFactory.zstandardCodec(level.value(opts));
} else {
return CodecFactory.fromString(codec.value(opts));
}
}
// Below copied from Apache commons-codec version 1.9
// org.apache.commons.codec.binary.Hex, see NOTICE.
/**
* Used to build output as Hex
*/
private static final char[] DIGITS_LOWER = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd',
'e', 'f' };
/**
* Converts an array of bytes into an array of characters representing the
* hexadecimal values of each byte in order. The returned array will be double
* the length of the passed array, as it takes two characters to represent any
* given byte.
*
* @param data a byte[] to convert to Hex characters
* @param toDigits the output alphabet
* @return A char[] containing hexadecimal characters
*/
static String encodeHex(final byte[] data) {
final int l = data.length;
final char[] out = new char[l << 1];
// two characters form the hex value.
for (int i = 0, j = 0; i < l; i++) {
out[j++] = DIGITS_LOWER[(0xF0 & data[i]) >>> 4];
out[j++] = DIGITS_LOWER[0x0F & data[i]];
}
return new String(out);
}
// end copied from Apache commons-codec
}
| Util |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/profile/UnlessBuildProfileAllAnyTest.java | {
"start": 3383,
"end": 3700
} | class ____ implements UnlessBuildProfileBean {
@Override
public String profile() {
return "anyOf-build";
}
}
// Not Active, both "test" and "build" profiles are active
@ApplicationScoped
@UnlessBuildProfile(allOf = { "test", "build" })
public static | AnyOfBuildBean |
java | google__guice | core/test/com/google/inject/DefaultMethodInterceptionTest.java | {
"start": 5464,
"end": 6254
} | class ____ extends BaseClass2 implements Foo {}
@Test
public void testInterceptedDefaultMethod_whenParentClassDefinesInterceptedMethod() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bindInterceptor(
Matchers.any(), Matchers.annotatedWith(InterceptMe.class), interceptor);
bind(Foo.class).to(InheritingFoo2.class);
}
});
// the concrete implementation that wins is not annotated
Foo foo = injector.getInstance(Foo.class);
assertEquals("BaseClass2", foo.defaultMethod());
assertEquals(1, callCount.get());
assertEquals(1, interceptedCallCount.get());
}
public | InheritingFoo2 |
java | apache__camel | components/camel-test/camel-test-junit5/src/main/java/org/apache/camel/test/junit5/JunitPropertiesSource.java | {
"start": 946,
"end": 1371
} | class ____ implements PropertiesSource {
private final ExtensionContext.Store globalStore;
JunitPropertiesSource(ExtensionContext.Store globalStore) {
this.globalStore = globalStore;
}
@Override
public String getName() {
return "junit-store";
}
@Override
public String getProperty(String name) {
return globalStore.get(name, String.class);
}
}
| JunitPropertiesSource |
java | apache__flink | flink-formats/flink-parquet/src/main/java/org/apache/flink/formats/parquet/row/ParquetRowDataBuilder.java | {
"start": 4623,
"end": 6234
} | class ____ implements ParquetBuilder<RowData> {
private final RowType rowType;
private final SerializableConfiguration configuration;
private final boolean utcTimestamp;
public FlinkParquetBuilder(RowType rowType, Configuration conf, boolean utcTimestamp) {
this.rowType = rowType;
this.configuration = new SerializableConfiguration(conf);
this.utcTimestamp = utcTimestamp;
}
@Override
public ParquetWriter<RowData> createWriter(OutputFile out) throws IOException {
Configuration conf = configuration.conf();
return new ParquetRowDataBuilder(out, rowType, utcTimestamp)
.withCompressionCodec(
CompressionCodecName.fromConf(
conf.get(
ParquetOutputFormat.COMPRESSION,
CompressionCodecName.SNAPPY.name())))
.withRowGroupSize(getBlockSize(conf))
.withPageSize(getPageSize(conf))
.withDictionaryPageSize(getDictionaryPageSize(conf))
.withMaxPaddingSize(
conf.getInt(MAX_PADDING_BYTES, ParquetWriter.MAX_PADDING_SIZE_DEFAULT))
.withDictionaryEncoding(getEnableDictionary(conf))
.withValidation(getValidation(conf))
.withWriterVersion(getWriterVersion(conf))
.withConf(conf)
.build();
}
}
}
| FlinkParquetBuilder |
java | apache__camel | components/camel-salesforce/camel-salesforce-maven-plugin/src/test/resources/generated/QueryRecordsCase.java | {
"start": 355,
"end": 421
} | class ____ extends AbstractQueryRecordsBase<Case> {
}
| QueryRecordsCase |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineHealthReader.java | {
"start": 1569,
"end": 2220
} | class ____ implements MessageBodyReader<TimelineHealth> {
private ObjectMapper objectMapper = new ObjectMapper();
@Override
public boolean isReadable(Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType) {
return type == TimelineHealth.class;
}
@Override
public TimelineHealth readFrom(Class<TimelineHealth> type, Type genericType,
Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, String> httpHeaders,
InputStream entityStream) throws IOException, WebApplicationException {
return objectMapper.readValue(entityStream, TimelineHealth.class);
}
}
| TimelineHealthReader |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/WrappingExecutorServiceTest.java | {
"start": 5898,
"end": 6154
} | class ____ implements Runnable {
private final Runnable delegate;
WrappedRunnable(Runnable delegate) {
this.delegate = delegate;
}
@Override
public void run() {
delegate.run();
}
}
private static final | WrappedRunnable |
java | elastic__elasticsearch | x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageDeciderService.java | {
"start": 4773,
"end": 7629
} | class ____ implements AutoscalingDeciderResult.Reason {
private final String reason;
private final long unassigned;
private final long assigned;
private final long forecasted;
private final TimeValue forecastWindow;
public ProactiveReason(String reason, long unassigned, long assigned, long forecasted, TimeValue forecastWindow) {
this.reason = reason;
this.unassigned = unassigned;
this.assigned = assigned;
this.forecasted = forecasted;
this.forecastWindow = forecastWindow;
}
public ProactiveReason(StreamInput in) throws IOException {
this.reason = in.readString();
this.unassigned = in.readLong();
this.assigned = in.readLong();
this.forecasted = in.readLong();
this.forecastWindow = in.readTimeValue();
}
@Override
public String summary() {
return reason;
}
public long unassigned() {
return unassigned;
}
public long assigned() {
return assigned;
}
public long forecasted() {
return forecasted;
}
public TimeValue forecastWindow() {
return forecastWindow;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(reason);
out.writeLong(unassigned);
out.writeLong(assigned);
out.writeLong(forecasted);
out.writeTimeValue(forecastWindow);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("reason", reason);
builder.field("unassigned", unassigned);
builder.field("assigned", assigned);
builder.field("forecasted", forecasted);
builder.field("forecast_window", forecastWindow);
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ProactiveReason that = (ProactiveReason) o;
return unassigned == that.unassigned
&& assigned == that.assigned
&& forecasted == that.forecasted
&& reason.equals(that.reason)
&& forecastWindow.equals(that.forecastWindow);
}
@Override
public int hashCode() {
return Objects.hash(reason, unassigned, assigned, forecasted, forecastWindow);
}
}
}
| ProactiveReason |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/taobao/ItemUpdateDOTest.java | {
"start": 744,
"end": 1268
} | class ____ {
private long f0 = 1;
private long f1;
public long getF0() {
return f0;
}
public void setF0(long f0) {
this.f0 = f0;
}
public long getF1() {
return f1;
}
public void setF1(long f1) {
this.f1 = f1;
}
/** @deprecated */
@Deprecated
public long getUpdateFeatureCc() {
throw new IllegalArgumentException("updateFeatureCc不再使用");
}
}
}
| Model |
java | google__guava | guava/src/com/google/common/collect/ImmutableRangeMap.java | {
"start": 4432,
"end": 13943
} | class ____<K extends Comparable<?>, V> {
private final List<Entry<Range<K>, V>> entries;
public Builder() {
this.entries = new ArrayList<>();
}
/**
* Associates the specified range with the specified value.
*
* @throws IllegalArgumentException if {@code range} is empty
*/
@CanIgnoreReturnValue
public Builder<K, V> put(Range<K> range, V value) {
checkNotNull(range);
checkNotNull(value);
checkArgument(!range.isEmpty(), "Range must not be empty, but was %s", range);
entries.add(immutableEntry(range, value));
return this;
}
/** Copies all associations from the specified range map into this builder. */
@CanIgnoreReturnValue
public Builder<K, V> putAll(RangeMap<K, ? extends V> rangeMap) {
for (Entry<Range<K>, ? extends V> entry : rangeMap.asMapOfRanges().entrySet()) {
put(entry.getKey(), entry.getValue());
}
return this;
}
@CanIgnoreReturnValue
Builder<K, V> combine(Builder<K, V> builder) {
entries.addAll(builder.entries);
return this;
}
/**
* Returns an {@code ImmutableRangeMap} containing the associations previously added to this
* builder.
*
* @throws IllegalArgumentException if any two ranges inserted into this builder overlap
*/
public ImmutableRangeMap<K, V> build() {
sort(entries, Range.<K>rangeLexOrdering().onKeys());
ImmutableList.Builder<Range<K>> rangesBuilder = new ImmutableList.Builder<>(entries.size());
ImmutableList.Builder<V> valuesBuilder = new ImmutableList.Builder<>(entries.size());
for (int i = 0; i < entries.size(); i++) {
Range<K> range = entries.get(i).getKey();
if (i > 0) {
Range<K> prevRange = entries.get(i - 1).getKey();
if (range.isConnected(prevRange) && !range.intersection(prevRange).isEmpty()) {
throw new IllegalArgumentException(
"Overlapping ranges: range " + prevRange + " overlaps with entry " + range);
}
}
rangesBuilder.add(range);
valuesBuilder.add(entries.get(i).getValue());
}
return new ImmutableRangeMap<>(rangesBuilder.build(), valuesBuilder.build());
}
}
private final transient ImmutableList<Range<K>> ranges;
private final transient ImmutableList<V> values;
ImmutableRangeMap(ImmutableList<Range<K>> ranges, ImmutableList<V> values) {
this.ranges = ranges;
this.values = values;
}
@Override
public @Nullable V get(K key) {
int index =
SortedLists.binarySearch(
ranges,
Range::lowerBound,
Cut.belowValue(key),
KeyPresentBehavior.ANY_PRESENT,
KeyAbsentBehavior.NEXT_LOWER);
if (index == -1) {
return null;
} else {
Range<K> range = ranges.get(index);
return range.contains(key) ? values.get(index) : null;
}
}
@Override
public @Nullable Entry<Range<K>, V> getEntry(K key) {
int index =
SortedLists.binarySearch(
ranges,
Range::lowerBound,
Cut.belowValue(key),
KeyPresentBehavior.ANY_PRESENT,
KeyAbsentBehavior.NEXT_LOWER);
if (index == -1) {
return null;
} else {
Range<K> range = ranges.get(index);
return range.contains(key) ? immutableEntry(range, values.get(index)) : null;
}
}
@Override
public Range<K> span() {
if (ranges.isEmpty()) {
throw new NoSuchElementException();
}
Range<K> firstRange = ranges.get(0);
Range<K> lastRange = ranges.get(ranges.size() - 1);
return Range.create(firstRange.lowerBound, lastRange.upperBound);
}
/**
* Guaranteed to throw an exception and leave the {@code RangeMap} unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@Deprecated
@Override
@DoNotCall("Always throws UnsupportedOperationException")
public final void put(Range<K> range, V value) {
throw new UnsupportedOperationException();
}
/**
* Guaranteed to throw an exception and leave the {@code RangeMap} unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@Deprecated
@Override
@DoNotCall("Always throws UnsupportedOperationException")
public final void putCoalescing(Range<K> range, V value) {
throw new UnsupportedOperationException();
}
/**
* Guaranteed to throw an exception and leave the {@code RangeMap} unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@Deprecated
@Override
@DoNotCall("Always throws UnsupportedOperationException")
public final void putAll(RangeMap<K, ? extends V> rangeMap) {
throw new UnsupportedOperationException();
}
/**
* Guaranteed to throw an exception and leave the {@code RangeMap} unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@Deprecated
@Override
@DoNotCall("Always throws UnsupportedOperationException")
public final void clear() {
throw new UnsupportedOperationException();
}
/**
* Guaranteed to throw an exception and leave the {@code RangeMap} unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/
@Deprecated
@Override
@DoNotCall("Always throws UnsupportedOperationException")
public final void remove(Range<K> range) {
throw new UnsupportedOperationException();
}
/**
* Guaranteed to throw an exception and leave the {@code RangeMap} unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
* @since 28.1
*/
@Deprecated
@Override
@DoNotCall("Always throws UnsupportedOperationException")
public final void merge(
Range<K> range,
@Nullable V value,
BiFunction<? super V, ? super @Nullable V, ? extends @Nullable V> remappingFunction) {
throw new UnsupportedOperationException();
}
@Override
public ImmutableMap<Range<K>, V> asMapOfRanges() {
if (ranges.isEmpty()) {
return ImmutableMap.of();
}
RegularImmutableSortedSet<Range<K>> rangeSet =
new RegularImmutableSortedSet<>(ranges, rangeLexOrdering());
return new ImmutableSortedMap<>(rangeSet, values);
}
@Override
public ImmutableMap<Range<K>, V> asDescendingMapOfRanges() {
if (ranges.isEmpty()) {
return ImmutableMap.of();
}
RegularImmutableSortedSet<Range<K>> rangeSet =
new RegularImmutableSortedSet<>(ranges.reverse(), Range.<K>rangeLexOrdering().reverse());
return new ImmutableSortedMap<>(rangeSet, values.reverse());
}
@Override
public ImmutableRangeMap<K, V> subRangeMap(Range<K> range) {
if (checkNotNull(range).isEmpty()) {
return ImmutableRangeMap.of();
} else if (ranges.isEmpty() || range.encloses(span())) {
return this;
}
int lowerIndex =
SortedLists.binarySearch(
ranges,
Range::upperBound,
range.lowerBound,
KeyPresentBehavior.FIRST_AFTER,
KeyAbsentBehavior.NEXT_HIGHER);
int upperIndex =
SortedLists.binarySearch(
ranges,
Range::lowerBound,
range.upperBound,
KeyPresentBehavior.ANY_PRESENT,
KeyAbsentBehavior.NEXT_HIGHER);
if (lowerIndex >= upperIndex) {
return ImmutableRangeMap.of();
}
int off = lowerIndex;
int len = upperIndex - lowerIndex;
ImmutableList<Range<K>> subRanges =
new ImmutableList<Range<K>>() {
@Override
public int size() {
return len;
}
@Override
public Range<K> get(int index) {
checkElementIndex(index, len);
if (index == 0 || index == len - 1) {
return ranges.get(index + off).intersection(range);
} else {
return ranges.get(index + off);
}
}
@Override
boolean isPartialView() {
return true;
}
// redeclare to help optimizers with b/310253115
@SuppressWarnings("RedundantOverride")
@Override
@J2ktIncompatible // serialization
Object writeReplace() {
return super.writeReplace();
}
};
ImmutableRangeMap<K, V> outer = this;
return new ImmutableRangeMap<K, V>(subRanges, values.subList(lowerIndex, upperIndex)) {
@Override
public ImmutableRangeMap<K, V> subRangeMap(Range<K> subRange) {
if (range.isConnected(subRange)) {
return outer.subRangeMap(subRange.intersection(range));
} else {
return ImmutableRangeMap.of();
}
}
// redeclare to help optimizers with b/310253115
@SuppressWarnings("RedundantOverride")
@Override
@J2ktIncompatible // serialization
Object writeReplace() {
return super.writeReplace();
}
};
}
@Override
public int hashCode() {
return asMapOfRanges().hashCode();
}
@Override
public boolean equals(@Nullable Object o) {
if (o instanceof RangeMap) {
RangeMap<?, ?> rangeMap = (RangeMap<?, ?>) o;
return asMapOfRanges().equals(rangeMap.asMapOfRanges());
}
return false;
}
@Override
public String toString() {
return asMapOfRanges().toString();
}
/**
* This | Builder |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/function/FailableTest.java | {
"start": 82833,
"end": 83327
} | interface ____ properly defined to throw any exception using the top level generic types
* Object and Throwable.
*/
@Test
void testThrows_FailableDoubleSupplier_Throwable() {
assertThrows(IOException.class, () -> new FailableDoubleSupplier<Throwable>() {
@Override
public double getAsDouble() throws Throwable {
throw new IOException("test");
}
}.getAsDouble());
}
/**
* Tests that our failable | is |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java | {
"start": 23603,
"end": 24057
} | class ____ implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return !( dialect instanceof MySQLDialect && !(dialect instanceof MariaDBDialect)
|| dialect instanceof SybaseDialect
|| dialect instanceof DerbyDialect
|| dialect instanceof FirebirdDialect
|| dialect instanceof InformixDialect
|| dialect instanceof DB2Dialect db2 && db2.getDB2Version().isBefore( 11 ) );
}
}
public static | SupportsMedian |
java | apache__kafka | clients/src/test/java/org/apache/kafka/clients/producer/KafkaProducerTest.java | {
"start": 138585,
"end": 139333
} | class ____ implements ProducerInterceptor<byte[], byte[]> {
@Override
public ProducerRecord<byte[], byte[]> onSend(ProducerRecord<byte[], byte[]> record) {
return record;
}
@Override
public void onAcknowledgement(RecordMetadata metadata, Exception exception, Headers headers) {
RecordHeaders recordHeaders = (RecordHeaders) headers;
// Ensure that the headers are read-only, no matter send success or send failure
assertTrue(recordHeaders.isReadOnly());
}
@Override
public void close() {
}
@Override
public void configure(Map<String, ?> configs) {
}
}
public static | ProducerInterceptorForHeaders |
java | junit-team__junit5 | junit-platform-engine/src/main/java/org/junit/platform/engine/discovery/NestedMethodSelector.java | {
"start": 5381,
"end": 5687
} | class ____ the selected method.
*/
public String getNestedClassName() {
return this.nestedClassSelector.getNestedClassName();
}
/**
* Get the nested {@link Class} containing the selected {@link Method}.
*
* <p>If the {@link Class} were not provided, but only the name of the
* nested | containing |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/network/KafkaChannelTest.java | {
"start": 1398,
"end": 4929
} | class ____ {
@Test
public void testSending() throws IOException {
Authenticator authenticator = Mockito.mock(Authenticator.class);
TransportLayer transport = Mockito.mock(TransportLayer.class);
MemoryPool pool = Mockito.mock(MemoryPool.class);
ChannelMetadataRegistry metadataRegistry = Mockito.mock(ChannelMetadataRegistry.class);
KafkaChannel channel = new KafkaChannel("0", transport, () -> authenticator,
1024, pool, metadataRegistry);
ByteBufferSend send = ByteBufferSend.sizePrefixed(ByteBuffer.wrap(TestUtils.randomBytes(128)));
NetworkSend networkSend = new NetworkSend("0", send);
channel.setSend(networkSend);
assertTrue(channel.hasSend());
assertThrows(IllegalStateException.class, () -> channel.setSend(networkSend));
Mockito.when(transport.write(Mockito.any(ByteBuffer[].class))).thenReturn(4L);
assertEquals(4L, channel.write());
assertEquals(128, send.remaining());
assertNull(channel.maybeCompleteSend());
Mockito.when(transport.write(Mockito.any(ByteBuffer[].class))).thenReturn(64L);
assertEquals(64, channel.write());
assertEquals(64, send.remaining());
assertNull(channel.maybeCompleteSend());
Mockito.when(transport.write(Mockito.any(ByteBuffer[].class))).thenReturn(64L);
assertEquals(64, channel.write());
assertEquals(0, send.remaining());
assertEquals(networkSend, channel.maybeCompleteSend());
}
@Test
public void testReceiving() throws IOException {
Authenticator authenticator = Mockito.mock(Authenticator.class);
TransportLayer transport = Mockito.mock(TransportLayer.class);
MemoryPool pool = Mockito.mock(MemoryPool.class);
ChannelMetadataRegistry metadataRegistry = Mockito.mock(ChannelMetadataRegistry.class);
ArgumentCaptor<Integer> sizeCaptor = ArgumentCaptor.forClass(Integer.class);
Mockito.when(pool.tryAllocate(sizeCaptor.capture())).thenAnswer(invocation ->
ByteBuffer.allocate(sizeCaptor.getValue())
);
KafkaChannel channel = new KafkaChannel("0", transport, () -> authenticator,
1024, pool, metadataRegistry);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
Mockito.when(transport.read(bufferCaptor.capture())).thenAnswer(invocation -> {
bufferCaptor.getValue().putInt(128);
return 4;
}).thenReturn(0);
assertEquals(4, channel.read());
assertEquals(4, channel.currentReceive().bytesRead());
assertNull(channel.maybeCompleteReceive());
Mockito.reset(transport);
Mockito.when(transport.read(bufferCaptor.capture())).thenAnswer(invocation -> {
bufferCaptor.getValue().put(TestUtils.randomBytes(64));
return 64;
});
assertEquals(64, channel.read());
assertEquals(68, channel.currentReceive().bytesRead());
assertNull(channel.maybeCompleteReceive());
Mockito.reset(transport);
Mockito.when(transport.read(bufferCaptor.capture())).thenAnswer(invocation -> {
bufferCaptor.getValue().put(TestUtils.randomBytes(64));
return 64;
});
assertEquals(64, channel.read());
assertEquals(132, channel.currentReceive().bytesRead());
assertNotNull(channel.maybeCompleteReceive());
assertNull(channel.currentReceive());
}
}
| KafkaChannelTest |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsClientRenameResult.java | {
"start": 866,
"end": 1006
} | class ____ store the Result of an AbfsClient rename operation, signifying the
* AbfsRestOperation result and the rename recovery.
*/
public | to |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java | {
"start": 4768,
"end": 19787
} | class ____ extends AggregateFunction
implements
TwoOptionalArguments,
ToAggregator,
SurrogateExpression,
PostOptimizationVerificationAware {
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Top", Top::new);
private static final String ORDER_ASC = "ASC";
private static final String ORDER_DESC = "DESC";
@FunctionInfo(
returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword" },
description = "Collects the top values for a field. Includes repeated values.",
type = FunctionType.AGGREGATE,
examples = @Example(file = "stats_top", tag = "top")
)
public Top(
Source source,
@Param(
name = "field",
type = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "text" },
description = "The field to collect the top values for."
) Expression field,
@Param(name = "limit", type = { "integer" }, description = "The maximum number of values to collect.") Expression limit,
@Param(
optional = true,
name = "order",
type = { "keyword" },
description = "The order to calculate the top values. Either `asc` or `desc`, and defaults to `asc` if omitted."
) Expression order,
@Param(
optional = true,
name = "outputField",
type = { "double", "integer", "long", "date" },
description = "The extra field that, if present, will be the output of the TOP call instead of `field`."
+ "{applies_to}`stack: ga 9.3`"
) Expression outputField
) {
this(source, field, Literal.TRUE, NO_WINDOW, limit, order == null ? Literal.keyword(source, ORDER_ASC) : order, outputField);
}
public Top(
Source source,
Expression field,
Expression filter,
Expression window,
Expression limit,
Expression order,
@Nullable Expression outputField
) {
super(source, field, filter, window, outputField != null ? asList(limit, order, outputField) : asList(limit, order));
}
private Top(StreamInput in) throws IOException {
super(in);
}
@Override
public Top withFilter(Expression filter) {
return new Top(source(), field(), filter, window(), limitField(), orderField(), outputField());
}
@Override
public String getWriteableName() {
return ENTRY.name;
}
Expression limitField() {
return parameters().get(0);
}
Expression orderField() {
return parameters().get(1);
}
@Nullable
Expression outputField() {
return parameters().size() > 2 ? parameters().get(2) : null;
}
private Integer limitValue() {
return Foldables.limitValue(limitField(), sourceText());
}
private boolean orderValue() {
if (orderField() instanceof Literal literal) {
String order = BytesRefs.toString(literal.value());
if (ORDER_ASC.equalsIgnoreCase(order) || ORDER_DESC.equalsIgnoreCase(order)) {
return order.equalsIgnoreCase(ORDER_ASC);
}
}
throw new EsqlIllegalArgumentException("Order value must be a literal, found: " + orderField());
}
@Override
protected TypeResolution resolveType() {
if (childrenResolved() == false) {
return new TypeResolution("Unresolved children");
}
var typeResolution = isType(
field(),
dt -> dt == DataType.BOOLEAN
|| dt == DataType.DATETIME
|| dt == DataType.IP
|| DataType.isString(dt)
|| (dt.isNumeric() && dt != DataType.UNSIGNED_LONG),
sourceText(),
FIRST,
"boolean",
"date",
"ip",
"string",
"numeric except unsigned_long or counter types"
).and(isNotNull(limitField(), sourceText(), SECOND))
.and(isType(limitField(), dt -> dt == DataType.INTEGER, sourceText(), SECOND, "integer"))
.and(isNotNull(orderField(), sourceText(), THIRD))
.and(isString(orderField(), sourceText(), THIRD));
if (outputField() != null) {
typeResolution = typeResolution.and(
isType(
outputField(),
dt -> dt == DataType.DATETIME || (dt.isNumeric() && dt != DataType.UNSIGNED_LONG),
sourceText(),
FOURTH,
"date",
"numeric except unsigned_long or counter types"
)
)
.and(
isType(
field(),
dt -> dt == DataType.DATETIME || (dt.isNumeric() && dt != DataType.UNSIGNED_LONG),
"when fourth argument is set, ",
sourceText(),
FIRST,
false,
"date",
"numeric except unsigned_long or counter types"
)
);
}
if (typeResolution.unresolved()) {
return typeResolution;
}
TypeResolution result = resolveTypeLimit();
if (result.equals(TypeResolution.TYPE_RESOLVED) == false) {
return result;
}
result = resolveTypeOrder(forPreOptimizationValidation(orderField()));
if (result.equals(TypeResolution.TYPE_RESOLVED) == false) {
return result;
}
return TypeResolution.TYPE_RESOLVED;
}
/**
* We check that the limit is not null and that if it is a literal, it is a positive integer
* During postOptimizationVerification folding is already done, so we also verify that it is definitively a literal
*/
private TypeResolution resolveTypeLimit() {
return Foldables.resolveTypeLimit(limitField(), sourceText(), forPreOptimizationValidation(limitField()));
}
/**
* We check that the order is not null and that if it is a literal, it is one of the two valid values: "asc" or "desc".
* During postOptimizationVerification folding is already done, so we also verify that it is definitively a literal
*/
private Expression.TypeResolution resolveTypeOrder(TypeResolutionValidator validator) {
Expression order = orderField();
if (order == null) {
validator.invalid(new TypeResolution(format(null, "Order must be a valid string in [{}], found [{}]", sourceText(), order)));
} else if (order instanceof Literal literal) {
if (literal.value() == null) {
validator.invalid(
new TypeResolution(
format(
null,
"Invalid order value in [{}], expected [{}, {}] but got [{}]",
sourceText(),
ORDER_ASC,
ORDER_DESC,
order
)
)
);
} else {
String value = BytesRefs.toString(literal.value());
if (value == null || value.equalsIgnoreCase(ORDER_ASC) == false && value.equalsIgnoreCase(ORDER_DESC) == false) {
validator.invalid(
new TypeResolution(
format(
null,
"Invalid order value in [{}], expected [{}, {}] but got [{}]",
sourceText(),
ORDER_ASC,
ORDER_DESC,
order
)
)
);
}
}
} else {
// it is expected that the expression is a literal after folding
// we fail if it is not a literal
validator.invalidIfPostValidation(fail(order, "Order must be a valid string in [{}], found [{}]", sourceText(), order));
}
return validator.getResolvedType();
}
@Override
public void postOptimizationVerification(Failures failures) {
postOptimizationVerificationLimit(failures);
postOptimizationVerificationOrder(failures);
}
private void postOptimizationVerificationLimit(Failures failures) {
Foldables.resolveTypeLimit(limitField(), sourceText(), forPostOptimizationValidation(limitField(), failures));
}
private void postOptimizationVerificationOrder(Failures failures) {
resolveTypeOrder(forPostOptimizationValidation(orderField(), failures));
}
@Override
public DataType dataType() {
return outputField() == null ? field().dataType().noText() : outputField().dataType().noText();
}
@Override
protected NodeInfo<Top> info() {
return NodeInfo.create(this, Top::new, field(), filter(), window(), limitField(), orderField(), outputField());
}
@Override
public Top replaceChildren(List<Expression> newChildren) {
return new Top(
source(),
newChildren.get(0),
newChildren.get(1),
newChildren.get(2),
newChildren.get(3),
newChildren.get(4),
newChildren.size() > 5 ? newChildren.get(5) : null
);
}
private static final Map<DataType, BiFunction<Integer, Boolean, AggregatorFunctionSupplier>> SUPPLIERS = Map.ofEntries(
Map.entry(DataType.LONG, TopLongAggregatorFunctionSupplier::new),
Map.entry(DataType.DATETIME, TopLongAggregatorFunctionSupplier::new),
Map.entry(DataType.INTEGER, TopIntAggregatorFunctionSupplier::new),
Map.entry(DataType.DOUBLE, TopDoubleAggregatorFunctionSupplier::new),
Map.entry(DataType.BOOLEAN, TopBooleanAggregatorFunctionSupplier::new),
Map.entry(DataType.IP, TopIpAggregatorFunctionSupplier::new),
Map.entry(DataType.KEYWORD, TopBytesRefAggregatorFunctionSupplier::new),
Map.entry(DataType.TEXT, TopBytesRefAggregatorFunctionSupplier::new)
);
private static final Map<Tuple<DataType, DataType>, BiFunction<Integer, Boolean, AggregatorFunctionSupplier>> SUPPLIERS_WITH_EXTRA = Map
.ofEntries(
Map.entry(Tuple.tuple(DataType.LONG, DataType.DATETIME), TopLongLongAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.LONG, DataType.INTEGER), TopLongIntAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.LONG, DataType.LONG), TopLongLongAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.LONG, DataType.FLOAT), TopLongFloatAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.LONG, DataType.DOUBLE), TopLongDoubleAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.DATETIME, DataType.DATETIME), TopLongLongAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.DATETIME, DataType.INTEGER), TopLongIntAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.DATETIME, DataType.LONG), TopLongLongAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.DATETIME, DataType.FLOAT), TopLongFloatAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.DATETIME, DataType.DOUBLE), TopLongDoubleAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.INTEGER, DataType.DATETIME), TopIntLongAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.INTEGER, DataType.INTEGER), TopIntIntAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.INTEGER, DataType.LONG), TopIntLongAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.INTEGER, DataType.FLOAT), TopIntFloatAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.INTEGER, DataType.DOUBLE), TopIntDoubleAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.FLOAT, DataType.DATETIME), TopFloatLongAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.FLOAT, DataType.INTEGER), TopFloatIntAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.FLOAT, DataType.LONG), TopFloatLongAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.FLOAT, DataType.FLOAT), TopFloatFloatAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.FLOAT, DataType.DOUBLE), TopFloatDoubleAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.DOUBLE, DataType.DATETIME), TopDoubleLongAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.DOUBLE, DataType.INTEGER), TopDoubleIntAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.DOUBLE, DataType.LONG), TopDoubleLongAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.DOUBLE, DataType.FLOAT), TopDoubleFloatAggregatorFunctionSupplier::new),
Map.entry(Tuple.tuple(DataType.DOUBLE, DataType.DOUBLE), TopDoubleDoubleAggregatorFunctionSupplier::new)
);
@Override
public AggregatorFunctionSupplier supplier() {
DataType fieldType = field().dataType();
BiFunction<Integer, Boolean, AggregatorFunctionSupplier> supplierCtor;
if (outputField() == null) {
supplierCtor = SUPPLIERS.get(fieldType);
if (supplierCtor == null) {
throw EsqlIllegalArgumentException.illegalDataType(fieldType);
}
} else {
DataType outputFieldType = outputField().dataType();
supplierCtor = SUPPLIERS_WITH_EXTRA.get(Tuple.tuple(fieldType, outputFieldType));
if (supplierCtor == null) {
throw EsqlIllegalArgumentException.illegalDataTypeCombination(fieldType, outputFieldType);
}
}
return supplierCtor.apply(limitValue(), orderValue());
}
@Override
public Expression surrogate() {
var s = source();
// If the `outputField` is specified but its value is the same as `field` then we do not need to handle `outputField` separately.
if (outputField() != null && field().semanticEquals(outputField())) {
return new Top(s, field(), limitField(), orderField(), null);
}
// To replace Top by Min or Max, we cannot have an `outputField`
if (orderField() instanceof Literal && limitField() instanceof Literal && limitValue() == 1 && outputField() == null) {
if (orderValue()) {
return new Min(s, field(), filter(), window());
} else {
return new Max(s, field(), filter(), window());
}
}
return null;
}
}
| Top |
java | apache__flink | flink-core/src/main/java/org/apache/flink/core/execution/JobExecutionStatusEvent.java | {
"start": 1040,
"end": 1297
} | interface ____ extends JobStatusChangedEvent {
/** Old status for job. */
JobStatus oldStatus();
/** New status for job. */
JobStatus newStatus();
/** Exception for job. */
@Nullable
Throwable exception();
}
| JobExecutionStatusEvent |
java | apache__rocketmq | controller/src/main/java/org/apache/rocketmq/controller/helper/BrokerLifecycleListener.java | {
"start": 858,
"end": 1048
} | interface ____ {
/**
* Trigger when broker inactive.
*/
void onBrokerInactive(final String clusterName, final String brokerName, final Long brokerId);
}
| BrokerLifecycleListener |
java | apache__camel | components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/model/date/BindyDatePatternCsvUnmarshallTest.java | {
"start": 3223,
"end": 6083
} | class ____ {
@DataField(pos = 1)
private OrderNumber orderNr;
@DataField(pos = 2)
private String firstName;
@DataField(pos = 3)
private String lastName;
@DataField(pos = 4, pattern = "MM-dd-yyyy")
private Date orderDate;
@DataField(pos = 5, pattern = "MM-dd-yyyy")
private LocalDate deliveryDate;
@DataField(pos = 6, pattern = "MM-dd-yyyy HH:mm:ss")
private LocalDateTime returnedDateTime;
@DataField(pos = 7, pattern = "HH:mm:ss")
private LocalTime receivedTime;
@DataField(pos = 8, pattern = "MM-dd-yyyy HH:mm:ssX")
private ZonedDateTime deletedDateTime;
@DataField(pos = 9)
private ReturnReason returnReason;
public OrderNumber getOrderNr() {
return orderNr;
}
public void setOrderNr(OrderNumber orderNr) {
this.orderNr = orderNr;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public Date getOrderDate() {
return orderDate;
}
public void setOrderDate(Date orderDate) {
this.orderDate = orderDate;
}
@Override
public String toString() {
return "Model : " + Order.class.getName() + " : " + this.orderNr + ", " + this.firstName + ", " + this.lastName
+ ", " + String.valueOf(this.orderDate);
}
public LocalDate getDeliveryDate() {
return deliveryDate;
}
public void setDeliveryDate(LocalDate deliveryDate) {
this.deliveryDate = deliveryDate;
}
public LocalDateTime getReturnedDateTime() {
return returnedDateTime;
}
public void setReturnedDateTime(LocalDateTime returnedDateTime) {
this.returnedDateTime = returnedDateTime;
}
public LocalTime getReceivedTime() {
return receivedTime;
}
public void setReceivedTime(LocalTime receivedTime) {
this.receivedTime = receivedTime;
}
public ZonedDateTime getDeletedDateTime() {
return deletedDateTime;
}
public void setDeletedDateTime(ZonedDateTime deletedDateTime) {
this.deletedDateTime = deletedDateTime;
}
public ReturnReason getReturnReason() {
return returnReason;
}
public void setReturnReason(ReturnReason returnReason) {
this.returnReason = returnReason;
}
}
public | Order |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java | {
"start": 15620,
"end": 19899
} | class ____
implements AdminHelper.Command {
@Override
public String getName() {
return "-listDirectives";
}
@Override
public String getShortUsage() {
return "[" + getName()
+ " [-stats] [-path <path>] [-pool <pool>] [-id <id>]"
+ "]\n";
}
@Override
public String getLongUsage() {
TableListing listing = AdminHelper.getOptionDescriptionListing();
listing.addRow("-stats", "List path-based cache directive statistics.");
listing.addRow("<path>", "List only " +
"cache directives with this path. " +
"Note that if there is a cache directive for <path> " +
"in a cache pool that we don't have read access for, it " +
"will not be listed.");
listing.addRow("<pool>", "List only path cache directives in that pool.");
listing.addRow("<id>", "List the cache directive with this id.");
return getShortUsage() + "\n" +
"List cache directives.\n\n" +
listing.toString();
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
CacheDirectiveInfo.Builder builder =
new CacheDirectiveInfo.Builder();
String pathFilter = StringUtils.popOptionWithArgument("-path", args);
if (pathFilter != null) {
builder.setPath(new Path(pathFilter));
}
String poolFilter = StringUtils.popOptionWithArgument("-pool", args);
if (poolFilter != null) {
builder.setPool(poolFilter);
}
boolean printStats = StringUtils.popOption("-stats", args);
String idFilter = StringUtils.popOptionWithArgument("-id", args);
if (idFilter != null) {
builder.setId(Long.parseLong(idFilter));
}
if (!args.isEmpty()) {
System.err.println("Can't understand argument: " + args.get(0));
return 1;
}
TableListing.Builder tableBuilder = new TableListing.Builder().
addField("ID", Justification.RIGHT).
addField("POOL", Justification.LEFT).
addField("REPL", Justification.RIGHT).
addField("EXPIRY", Justification.LEFT).
addField("PATH", Justification.LEFT);
if (printStats) {
tableBuilder.addField("BYTES_NEEDED", Justification.RIGHT).
addField("BYTES_CACHED", Justification.RIGHT).
addField("FILES_NEEDED", Justification.RIGHT).
addField("FILES_CACHED", Justification.RIGHT);
}
TableListing tableListing = tableBuilder.build();
try {
DistributedFileSystem dfs = AdminHelper.getDFS(conf);
RemoteIterator<CacheDirectiveEntry> iter =
dfs.listCacheDirectives(builder.build());
int numEntries = 0;
while (iter.hasNext()) {
CacheDirectiveEntry entry = iter.next();
CacheDirectiveInfo directive = entry.getInfo();
CacheDirectiveStats stats = entry.getStats();
List<String> row = new LinkedList<String>();
row.add("" + directive.getId());
row.add(directive.getPool());
row.add("" + directive.getReplication());
String expiry;
// This is effectively never, round for nice printing
if (directive.getExpiration().getMillis() >
Expiration.MAX_RELATIVE_EXPIRY_MS / 2) {
expiry = "never";
} else {
expiry = directive.getExpiration().toString();
}
row.add(expiry);
row.add(directive.getPath().toUri().getPath());
if (printStats) {
row.add("" + stats.getBytesNeeded());
row.add("" + stats.getBytesCached());
row.add("" + stats.getFilesNeeded());
row.add("" + stats.getFilesCached());
}
tableListing.addRow(row.toArray(new String[row.size()]));
numEntries++;
}
System.out.print(String.format("Found %d entr%s%n",
numEntries, numEntries == 1 ? "y" : "ies"));
if (numEntries > 0) {
System.out.print(tableListing);
}
} catch (IOException e) {
System.err.println(AdminHelper.prettifyException(e));
return 2;
}
return 0;
}
}
private static | ListCacheDirectiveInfoCommand |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/OptionalEqualityTest.java | {
"start": 3095,
"end": 3639
} | class ____ {
boolean f(Optional<Integer> a) {
Optional<Integer> b = Optional.of(42);
// BUG: Diagnostic contains: Did you mean 'return Objects.equal(a, b);' or 'return
// a.equals(b);'?
return a == b;
}
}
""")
.doTest();
}
@Test
public void definitelyNull() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.common.base.Optional;
| Test |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/UrlEncodeComponentErrorTests.java | {
"start": 800,
"end": 1416
} | class ____ extends ErrorsForCasesWithoutExamplesTestCase {
@Override
protected List<TestCaseSupplier> cases() {
return paramsToSuppliers(UrlEncodeComponentTests.parameters());
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new UrlEncodeComponent(source, args.get(0));
}
@Override
protected Matcher<String> expectedTypeErrorMatcher(List<Set<DataType>> validPerPosition, List<DataType> signature) {
return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "string"));
}
}
| UrlEncodeComponentErrorTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CheckNotNullMultipleTimesTest.java | {
"start": 1248,
"end": 1688
} | class ____ {
Test(Integer a, Integer b) {
checkNotNull(a);
// BUG: Diagnostic contains:
checkNotNull(a);
}
}
""")
.doTest();
}
@Test
public void negative() {
helper
.addSourceLines(
"Test.java",
"""
import static com.google.common.base.Preconditions.checkNotNull;
| Test |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/LambdaExtractionTest.java | {
"start": 11768,
"end": 12634
} | class ____ {
private int key;
public int getKey() {
return key;
}
public void setKey(int key) {
this.key = key;
}
protected int getKey2() {
return 0;
}
}
@Test
void testInstanceMethodRefSameType() {
MapFunction<MyType, Integer> f = MyType::getKey;
TypeInformation<?> ti =
TypeExtractor.getMapReturnTypes(f, TypeExtractor.createTypeInfo(MyType.class));
assertThat(ti).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
}
@Test
void testInstanceMethodRefSuperType() {
MapFunction<Integer, String> f = Object::toString;
TypeInformation<?> ti = TypeExtractor.getMapReturnTypes(f, BasicTypeInfo.INT_TYPE_INFO);
assertThat(ti).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
}
private static | MyType |
java | google__auto | value/src/test/java/com/google/auto/value/processor/ExtensionTest.java | {
"start": 3062,
"end": 4144
} | class ____ extends $AutoValue_Baz {",
" public AutoValue_Baz(String foo) {",
" super(foo);",
" }",
" @Override public String foo() {",
" return \"foo\";",
" }",
" public String dizzle() {\n",
" return \"dizzle\";\n",
" }",
"}");
Compilation compilation =
javac()
.withProcessors(new AutoValueProcessor(ImmutableList.of(new FooExtension())))
.compile(javaFileObject);
assertThat(compilation).succeededWithoutWarnings();
assertThat(compilation)
.generatedSourceFile("foo.bar.AutoValue_Baz")
.hasSourceEquivalentTo(expectedExtensionOutput);
}
@Test
public void testExtensionConsumesProperties() {
JavaFileObject javaFileObject =
JavaFileObjects.forSourceLines(
"foo.bar.Baz",
"package foo.bar;",
"",
"import com.google.auto.value.AutoValue;",
"",
"@AutoValue",
"public abstract | AutoValue_Baz |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java | {
"start": 1508,
"end": 1837
} | class ____<B extends Bucket<B>> extends InternalMultiBucketAggregation.InternalBucketWritable
implements
RareTerms.Bucket,
KeyComparable<B>,
Writeable {
/**
* Reads a bucket. Should be a constructor reference.
*/
@FunctionalInterface
public | Bucket |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/reflection/Availability.java | {
"start": 194,
"end": 238
} | enum ____ {
ON_DUTY,
NO_SERVICE
}
| Availability |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/JoinedInheritanceTreatedJoinNullnessTest.java | {
"start": 5929,
"end": 6267
} | class ____ extends AbstractDcCompany {
@ManyToOne
private RcCompanyUser invitedBy;
public DcCompanySeed() {
}
public DcCompanySeed(String displayName, RcCompanyUser invitedBy) {
super( displayName );
this.invitedBy = invitedBy;
}
}
@SuppressWarnings("unused")
@Entity( name = "RcCompany" )
public static | DcCompanySeed |
java | apache__dubbo | dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/router/mesh/rule/virtualservice/VirtualServiceSpec.java | {
"start": 904,
"end": 1441
} | class ____ {
private List<String> hosts;
private List<DubboRoute> dubbo;
public List<String> getHosts() {
return hosts;
}
public void setHosts(List<String> hosts) {
this.hosts = hosts;
}
public List<DubboRoute> getDubbo() {
return dubbo;
}
public void setDubbo(List<DubboRoute> dubbo) {
this.dubbo = dubbo;
}
@Override
public String toString() {
return "VirtualServiceSpec{" + "hosts=" + hosts + ", dubbo=" + dubbo + '}';
}
}
| VirtualServiceSpec |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableFlatMapTest.java | {
"start": 1484,
"end": 43792
} | class ____ extends RxJavaTest {
@Test
public void normal() {
Subscriber<Object> subscriber = TestHelper.mockSubscriber();
final List<Integer> list = Arrays.asList(1, 2, 3);
Function<Integer, List<Integer>> func = new Function<Integer, List<Integer>>() {
@Override
public List<Integer> apply(Integer t1) {
return list;
}
};
BiFunction<Integer, Integer, Integer> resFunc = new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1 | t2;
}
};
List<Integer> source = Arrays.asList(16, 32, 64);
Flowable.fromIterable(source).flatMapIterable(func, resFunc).subscribe(subscriber);
for (Integer s : source) {
for (Integer v : list) {
verify(subscriber).onNext(s | v);
}
}
verify(subscriber).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void collectionFunctionThrows() {
Subscriber<Object> subscriber = TestHelper.mockSubscriber();
Function<Integer, List<Integer>> func = new Function<Integer, List<Integer>>() {
@Override
public List<Integer> apply(Integer t1) {
throw new TestException();
}
};
BiFunction<Integer, Integer, Integer> resFunc = new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1 | t2;
}
};
List<Integer> source = Arrays.asList(16, 32, 64);
Flowable.fromIterable(source).flatMapIterable(func, resFunc).subscribe(subscriber);
verify(subscriber, never()).onComplete();
verify(subscriber, never()).onNext(any());
verify(subscriber).onError(any(TestException.class));
}
@Test
public void resultFunctionThrows() {
Subscriber<Object> subscriber = TestHelper.mockSubscriber();
final List<Integer> list = Arrays.asList(1, 2, 3);
Function<Integer, List<Integer>> func = new Function<Integer, List<Integer>>() {
@Override
public List<Integer> apply(Integer t1) {
return list;
}
};
BiFunction<Integer, Integer, Integer> resFunc = new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
throw new TestException();
}
};
List<Integer> source = Arrays.asList(16, 32, 64);
Flowable.fromIterable(source).flatMapIterable(func, resFunc).subscribe(subscriber);
verify(subscriber, never()).onComplete();
verify(subscriber, never()).onNext(any());
verify(subscriber).onError(any(TestException.class));
}
@Test
public void mergeError() {
Subscriber<Object> subscriber = TestHelper.mockSubscriber();
Function<Integer, Flowable<Integer>> func = new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer t1) {
return Flowable.error(new TestException());
}
};
BiFunction<Integer, Integer, Integer> resFunc = new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1 | t2;
}
};
List<Integer> source = Arrays.asList(16, 32, 64);
Flowable.fromIterable(source).flatMap(func, resFunc).subscribe(subscriber);
verify(subscriber, never()).onComplete();
verify(subscriber, never()).onNext(any());
verify(subscriber).onError(any(TestException.class));
}
<T, R> Function<T, R> just(final R value) {
return new Function<T, R>() {
@Override
public R apply(T t1) {
return value;
}
};
}
<R> Supplier<R> just0(final R value) {
return new Supplier<R>() {
@Override
public R get() {
return value;
}
};
}
@Test
public void flatMapTransformsNormal() {
Flowable<Integer> onNext = Flowable.fromIterable(Arrays.asList(1, 2, 3));
Flowable<Integer> onComplete = Flowable.fromIterable(Arrays.asList(4));
Flowable<Integer> onError = Flowable.fromIterable(Arrays.asList(5));
Flowable<Integer> source = Flowable.fromIterable(Arrays.asList(10, 20, 30));
Subscriber<Object> subscriber = TestHelper.mockSubscriber();
source.flatMap(just(onNext), just(onError), just0(onComplete)).subscribe(subscriber);
verify(subscriber, times(3)).onNext(1);
verify(subscriber, times(3)).onNext(2);
verify(subscriber, times(3)).onNext(3);
verify(subscriber).onNext(4);
verify(subscriber).onComplete();
verify(subscriber, never()).onNext(5);
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void flatMapTransformsException() {
Flowable<Integer> onNext = Flowable.fromIterable(Arrays.asList(1, 2, 3));
Flowable<Integer> onComplete = Flowable.fromIterable(Arrays.asList(4));
Flowable<Integer> onError = Flowable.fromIterable(Arrays.asList(5));
Flowable<Integer> source = Flowable.concat(
Flowable.fromIterable(Arrays.asList(10, 20, 30)),
Flowable.<Integer> error(new RuntimeException("Forced failure!"))
);
Subscriber<Object> subscriber = TestHelper.mockSubscriber();
source.flatMap(just(onNext), just(onError), just0(onComplete)).subscribe(subscriber);
verify(subscriber, times(3)).onNext(1);
verify(subscriber, times(3)).onNext(2);
verify(subscriber, times(3)).onNext(3);
verify(subscriber).onNext(5);
verify(subscriber).onComplete();
verify(subscriber, never()).onNext(4);
verify(subscriber, never()).onError(any(Throwable.class));
}
<R> Supplier<R> funcThrow0(R r) {
return new Supplier<R>() {
@Override
public R get() {
throw new TestException();
}
};
}
<T, R> Function<T, R> funcThrow(T t, R r) {
return new Function<T, R>() {
@Override
public R apply(T t) {
throw new TestException();
}
};
}
@Test
public void flatMapTransformsOnNextFuncThrows() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Flowable<Integer> onComplete = Flowable.fromIterable(Arrays.asList(4));
Flowable<Integer> onError = Flowable.fromIterable(Arrays.asList(5));
Flowable<Integer> source = Flowable.fromIterable(Arrays.asList(10, 20, 30));
Subscriber<Object> subscriber = TestHelper.mockSubscriber();
source.flatMap(funcThrow(1, onError), just(onError), just0(onComplete)).subscribe(subscriber);
verify(subscriber).onError(any(TestException.class));
verify(subscriber, never()).onNext(any());
verify(subscriber, never()).onComplete();
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void flatMapTransformsOnErrorFuncThrows() {
Flowable<Integer> onNext = Flowable.fromIterable(Arrays.asList(1, 2, 3));
Flowable<Integer> onComplete = Flowable.fromIterable(Arrays.asList(4));
Flowable<Integer> onError = Flowable.fromIterable(Arrays.asList(5));
Flowable<Integer> source = Flowable.error(new TestException());
Subscriber<Object> subscriber = TestHelper.mockSubscriber();
source.flatMap(just(onNext), funcThrow((Throwable) null, onError), just0(onComplete)).subscribe(subscriber);
verify(subscriber).onError(any(CompositeException.class));
verify(subscriber, never()).onNext(any());
verify(subscriber, never()).onComplete();
}
@Test
public void flatMapTransformsOnCompletedFuncThrows() {
Flowable<Integer> onNext = Flowable.fromIterable(Arrays.asList(1, 2, 3));
Flowable<Integer> onComplete = Flowable.fromIterable(Arrays.asList(4));
Flowable<Integer> onError = Flowable.fromIterable(Arrays.asList(5));
Flowable<Integer> source = Flowable.fromIterable(Arrays.<Integer> asList());
Subscriber<Object> subscriber = TestHelper.mockSubscriber();
source.flatMap(just(onNext), just(onError), funcThrow0(onComplete)).subscribe(subscriber);
verify(subscriber).onError(any(TestException.class));
verify(subscriber, never()).onNext(any());
verify(subscriber, never()).onComplete();
}
@Test
public void flatMapTransformsMergeException() {
Flowable<Integer> onNext = Flowable.error(new TestException());
Flowable<Integer> onComplete = Flowable.fromIterable(Arrays.asList(4));
Flowable<Integer> onError = Flowable.fromIterable(Arrays.asList(5));
Flowable<Integer> source = Flowable.fromIterable(Arrays.asList(10, 20, 30));
Subscriber<Object> subscriber = TestHelper.mockSubscriber();
source.flatMap(just(onNext), just(onError), funcThrow0(onComplete)).subscribe(subscriber);
verify(subscriber).onError(any(TestException.class));
verify(subscriber, never()).onNext(any());
verify(subscriber, never()).onComplete();
}
private static <T> Flowable<T> composer(Flowable<T> source, final AtomicInteger subscriptionCount, final int m) {
return source.doOnSubscribe(new Consumer<Subscription>() {
@Override
public void accept(Subscription s) {
int n = subscriptionCount.getAndIncrement();
if (n >= m) {
Assert.fail("Too many subscriptions! " + (n + 1));
}
}
}).doOnComplete(new Action() {
@Override
public void run() {
int n = subscriptionCount.decrementAndGet();
if (n < 0) {
Assert.fail("Too many unsubscriptions! " + (n - 1));
}
}
});
}
@Test
public void flatMapMaxConcurrent() {
final int m = 4;
final AtomicInteger subscriptionCount = new AtomicInteger();
Flowable<Integer> source = Flowable.range(1, 10)
.flatMap(new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer t1) {
return composer(Flowable.range(t1 * 10, 2), subscriptionCount, m)
.subscribeOn(Schedulers.computation());
}
}, m);
TestSubscriber<Integer> ts = new TestSubscriber<>();
source.subscribe(ts);
ts.awaitDone(5, TimeUnit.SECONDS);
ts.assertNoErrors();
Set<Integer> expected = new HashSet<>(Arrays.asList(
10, 11, 20, 21, 30, 31, 40, 41, 50, 51, 60, 61, 70, 71, 80, 81, 90, 91, 100, 101
));
Assert.assertEquals(expected.size(), ts.values().size());
Assert.assertTrue(expected.containsAll(ts.values()));
}
@Test
public void flatMapSelectorMaxConcurrent() {
final int m = 4;
final AtomicInteger subscriptionCount = new AtomicInteger();
Flowable<Integer> source = Flowable.range(1, 10)
.flatMap(new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer t1) {
return composer(Flowable.range(t1 * 10, 2), subscriptionCount, m)
.subscribeOn(Schedulers.computation());
}
}, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1 * 1000 + t2;
}
}, m);
TestSubscriber<Integer> ts = new TestSubscriber<>();
source.subscribe(ts);
ts.awaitDone(5, TimeUnit.SECONDS);
ts.assertNoErrors();
Set<Integer> expected = new HashSet<>(Arrays.asList(
1010, 1011, 2020, 2021, 3030, 3031, 4040, 4041, 5050, 5051,
6060, 6061, 7070, 7071, 8080, 8081, 9090, 9091, 10100, 10101
));
Assert.assertEquals(expected.size(), ts.values().size());
System.out.println("--> testFlatMapSelectorMaxConcurrent: " + ts.values());
Assert.assertTrue(expected.containsAll(ts.values()));
}
@Test
public void flatMapTransformsMaxConcurrentNormalLoop() {
for (int i = 0; i < 1000; i++) {
if (i % 100 == 0) {
System.out.println("testFlatMapTransformsMaxConcurrentNormalLoop => " + i);
}
flatMapTransformsMaxConcurrentNormal();
}
}
@Test
public void flatMapTransformsMaxConcurrentNormal() {
final int m = 2;
final AtomicInteger subscriptionCount = new AtomicInteger();
Flowable<Integer> onNext =
composer(
Flowable.fromIterable(Arrays.asList(1, 2, 3))
.observeOn(Schedulers.computation())
,
subscriptionCount, m)
.subscribeOn(Schedulers.computation())
;
Flowable<Integer> onComplete = composer(Flowable.fromIterable(Arrays.asList(4)), subscriptionCount, m)
.subscribeOn(Schedulers.computation());
Flowable<Integer> onError = Flowable.fromIterable(Arrays.asList(5));
Flowable<Integer> source = Flowable.fromIterable(Arrays.asList(10, 20, 30));
Subscriber<Object> subscriber = TestHelper.mockSubscriber();
TestSubscriberEx<Object> ts = new TestSubscriberEx<>(subscriber);
Function<Integer, Flowable<Integer>> just = just(onNext);
Function<Throwable, Flowable<Integer>> just2 = just(onError);
Supplier<Flowable<Integer>> just0 = just0(onComplete);
source.flatMap(just, just2, just0, m).subscribe(ts);
ts.awaitDone(1, TimeUnit.SECONDS);
ts.assertNoErrors();
ts.assertTerminated();
verify(subscriber, times(3)).onNext(1);
verify(subscriber, times(3)).onNext(2);
verify(subscriber, times(3)).onNext(3);
verify(subscriber).onNext(4);
verify(subscriber).onComplete();
verify(subscriber, never()).onNext(5);
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void flatMapRangeMixedAsyncLoop() {
for (int i = 0; i < 2000; i++) {
if (i % 10 == 0) {
System.out.println("flatMapRangeAsyncLoop > " + i);
}
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
Flowable.range(0, 1000)
.flatMap(new Function<Integer, Flowable<Integer>>() {
final Random rnd = new Random();
@Override
public Flowable<Integer> apply(Integer t) {
Flowable<Integer> r = Flowable.just(t);
if (rnd.nextBoolean()) {
r = r.hide();
}
return r;
}
})
.observeOn(Schedulers.computation())
.subscribe(ts);
ts.awaitDone(2500, TimeUnit.MILLISECONDS);
if (ts.completions() == 0) {
System.out.println(ts.values().size());
}
ts.assertTerminated();
ts.assertNoErrors();
List<Integer> list = ts.values();
if (list.size() < 1000) {
Set<Integer> set = new HashSet<>(list);
for (int j = 0; j < 1000; j++) {
if (!set.contains(j)) {
System.out.println(j + " missing");
}
}
}
assertEquals(1000, list.size());
}
}
@Test
public void flatMapIntPassthruAsync() {
for (int i = 0; i < 1000; i++) {
TestSubscriber<Integer> ts = new TestSubscriber<>();
Flowable.range(1, 1000).flatMap(new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer t) {
return Flowable.just(1).subscribeOn(Schedulers.computation());
}
}).subscribe(ts);
ts.awaitDone(5, TimeUnit.SECONDS);
ts.assertNoErrors();
ts.assertComplete();
ts.assertValueCount(1000);
}
}
@Test
public void flatMapTwoNestedSync() {
for (final int n : new int[] { 1, 1000, 1000000 }) {
TestSubscriber<Integer> ts = new TestSubscriber<>();
Flowable.just(1, 2).flatMap(new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer t) {
return Flowable.range(1, n);
}
}).subscribe(ts);
System.out.println("flatMapTwoNestedSync >> @ " + n);
ts.assertNoErrors();
ts.assertComplete();
ts.assertValueCount(n * 2);
}
}
@Test
public void justEmptyMixture() {
TestSubscriber<Integer> ts = TestSubscriber.create();
Flowable.range(0, 4 * Flowable.bufferSize())
.flatMap(new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer v) {
return (v & 1) == 0 ? Flowable.<Integer>empty() : Flowable.just(v);
}
})
.subscribe(ts);
ts.assertValueCount(2 * Flowable.bufferSize());
ts.assertNoErrors();
ts.assertComplete();
int j = 1;
for (Integer v : ts.values()) {
Assert.assertEquals(j, v.intValue());
j += 2;
}
}
@Test
public void rangeEmptyMixture() {
TestSubscriber<Integer> ts = TestSubscriber.create();
Flowable.range(0, 4 * Flowable.bufferSize())
.flatMap(new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer v) {
return (v & 1) == 0 ? Flowable.<Integer>empty() : Flowable.range(v, 2);
}
})
.subscribe(ts);
ts.assertValueCount(4 * Flowable.bufferSize());
ts.assertNoErrors();
ts.assertComplete();
int j = 1;
List<Integer> list = ts.values();
for (int i = 0; i < list.size(); i += 2) {
Assert.assertEquals(j, list.get(i).intValue());
Assert.assertEquals(j + 1, list.get(i + 1).intValue());
j += 2;
}
}
@Test
public void justEmptyMixtureMaxConcurrent() {
TestSubscriber<Integer> ts = TestSubscriber.create();
Flowable.range(0, 4 * Flowable.bufferSize())
.flatMap(new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer v) {
return (v & 1) == 0 ? Flowable.<Integer>empty() : Flowable.just(v);
}
}, 16)
.subscribe(ts);
ts.assertValueCount(2 * Flowable.bufferSize());
ts.assertNoErrors();
ts.assertComplete();
int j = 1;
for (Integer v : ts.values()) {
Assert.assertEquals(j, v.intValue());
j += 2;
}
}
@Test
public void rangeEmptyMixtureMaxConcurrent() {
TestSubscriber<Integer> ts = TestSubscriber.create();
Flowable.range(0, 4 * Flowable.bufferSize())
.flatMap(new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer v) {
return (v & 1) == 0 ? Flowable.<Integer>empty() : Flowable.range(v, 2);
}
}, 16)
.subscribe(ts);
ts.assertValueCount(4 * Flowable.bufferSize());
ts.assertNoErrors();
ts.assertComplete();
int j = 1;
List<Integer> list = ts.values();
for (int i = 0; i < list.size(); i += 2) {
Assert.assertEquals(j, list.get(i).intValue());
Assert.assertEquals(j + 1, list.get(i + 1).intValue());
j += 2;
}
}
@Test
public void castCrashUnsubscribes() {
PublishProcessor<Integer> pp = PublishProcessor.create();
TestSubscriber<Integer> ts = TestSubscriber.create();
pp.flatMap(new Function<Integer, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Integer t) {
throw new TestException();
}
}, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return t1;
}
}).subscribe(ts);
Assert.assertTrue("Not subscribed?", pp.hasSubscribers());
pp.onNext(1);
Assert.assertFalse("Subscribed?", pp.hasSubscribers());
ts.assertError(TestException.class);
}
@Test
public void flatMapBiMapper() {
Flowable.just(1)
.flatMap(new Function<Integer, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Integer v) throws Exception {
return Flowable.just(v * 10);
}
}, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer a, Integer b) throws Exception {
return a + b;
}
}, true)
.test()
.assertResult(11);
}
@Test
public void flatMapBiMapperWithError() {
Flowable.just(1)
.flatMap(new Function<Integer, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Integer v) throws Exception {
return Flowable.just(v * 10).concatWith(Flowable.<Integer>error(new TestException()));
}
}, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer a, Integer b) throws Exception {
return a + b;
}
}, true)
.test()
.assertFailure(TestException.class, 11);
}
@Test
public void flatMapBiMapperMaxConcurrency() {
Flowable.just(1, 2)
.flatMap(new Function<Integer, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Integer v) throws Exception {
return Flowable.just(v * 10);
}
}, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer a, Integer b) throws Exception {
return a + b;
}
}, true, 1)
.test()
.assertResult(11, 22);
}
@Test
public void flatMapEmpty() {
assertSame(Flowable.empty(), Flowable.empty().flatMap(new Function<Object, Publisher<Object>>() {
@Override
public Publisher<Object> apply(Object v) throws Exception {
return Flowable.just(v);
}
}));
}
@Test
public void mergeScalar() {
Flowable.merge(Flowable.just(Flowable.just(1)))
.test()
.assertResult(1);
}
@Test
public void mergeScalar2() {
Flowable.merge(Flowable.just(Flowable.just(1)).hide())
.test()
.assertResult(1);
}
@Test
public void mergeScalarEmpty() {
Flowable.merge(Flowable.just(Flowable.empty()).hide())
.test()
.assertResult();
}
@Test
public void mergeScalarError() {
Flowable.merge(Flowable.just(Flowable.fromCallable(new Callable<Object>() {
@Override
public Object call() throws Exception {
throw new TestException();
}
})).hide())
.test()
.assertFailure(TestException.class);
}
@Test
public void scalarReentrant() {
final PublishProcessor<Flowable<Integer>> pp = PublishProcessor.create();
TestSubscriber<Integer> ts = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
if (t == 1) {
pp.onNext(Flowable.just(2));
}
}
};
Flowable.merge(pp)
.subscribe(ts);
pp.onNext(Flowable.just(1));
pp.onComplete();
ts.assertResult(1, 2);
}
@Test
public void scalarReentrant2() {
final PublishProcessor<Flowable<Integer>> pp = PublishProcessor.create();
TestSubscriber<Integer> ts = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
if (t == 1) {
pp.onNext(Flowable.just(2));
}
}
};
Flowable.merge(pp, 2)
.subscribe(ts);
pp.onNext(Flowable.just(1));
pp.onComplete();
ts.assertResult(1, 2);
}
@Test
public void innerCompleteCancelRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishProcessor<Integer> pp = PublishProcessor.create();
final TestSubscriber<Integer> ts = Flowable.merge(Flowable.just(pp)).test();
Runnable r1 = new Runnable() {
@Override
public void run() {
pp.onComplete();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
ts.cancel();
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void fusedInnerThrows() {
Flowable.just(1).hide()
.flatMap(new Function<Integer, Flowable<Object>>() {
@Override
public Flowable<Object> apply(Integer v) throws Exception {
return Flowable.range(1, 2).map(new Function<Integer, Object>() {
@Override
public Object apply(Integer w) throws Exception {
throw new TestException();
}
});
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void fusedInnerThrows2() {
TestSubscriberEx<Integer> ts = Flowable.range(1, 2).hide()
.flatMap(new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer v) throws Exception {
return Flowable.range(1, 2).map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer w) throws Exception {
throw new TestException();
}
});
}
}, true)
.to(TestHelper.<Integer>testConsumer())
.assertFailure(CompositeException.class);
List<Throwable> errors = TestHelper.errorList(ts);
TestHelper.assertError(errors, 0, TestException.class);
TestHelper.assertError(errors, 1, TestException.class);
}
@Test
public void scalarXMap() {
Flowable.fromCallable(Functions.justCallable(1))
.flatMap(Functions.justFunction(Flowable.fromCallable(Functions.justCallable(2))))
.test()
.assertResult(2);
}
@Test
public void noCrossBoundaryFusion() {
for (int i = 0; i < 500; i++) {
TestSubscriber<Object> ts = Flowable.merge(
Flowable.just(1).observeOn(Schedulers.single()).map(new Function<Integer, Object>() {
@Override
public Object apply(Integer v) throws Exception {
return Thread.currentThread().getName().substring(0, 4);
}
}),
Flowable.just(1).observeOn(Schedulers.computation()).map(new Function<Integer, Object>() {
@Override
public Object apply(Integer v) throws Exception {
return Thread.currentThread().getName().substring(0, 4);
}
})
)
.test()
.awaitDone(5, TimeUnit.SECONDS)
.assertValueCount(2);
List<Object> list = ts.values();
assertTrue(list.toString(), list.contains("RxSi"));
assertTrue(list.toString(), list.contains("RxCo"));
}
}
@Test
public void cancelScalarDrainRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final PublishProcessor<Flowable<Integer>> pp = PublishProcessor.create();
final TestSubscriber<Integer> ts = pp.flatMap(Functions.<Flowable<Integer>>identity()).test(0);
Runnable r1 = new Runnable() {
@Override
public void run() {
ts.cancel();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
pp.onComplete();
}
};
TestHelper.race(r1, r2);
assertTrue(errors.toString(), errors.isEmpty());
} finally {
RxJavaPlugins.reset();
}
}
}
@Test
public void cancelDrainRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
for (int j = 1; j < 50; j += 5) {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final PublishProcessor<Flowable<Integer>> pp = PublishProcessor.create();
final TestSubscriber<Integer> ts = pp.flatMap(Functions.<Flowable<Integer>>identity()).test(0);
final PublishProcessor<Integer> just = PublishProcessor.create();
pp.onNext(just);
Runnable r1 = new Runnable() {
@Override
public void run() {
ts.request(1);
ts.cancel();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
just.onNext(1);
}
};
TestHelper.race(r1, r2);
assertTrue(errors.toString(), errors.isEmpty());
} finally {
RxJavaPlugins.reset();
}
}
}
}
@Test
public void iterableMapperFunctionReturnsNull() {
Flowable.just(1)
.flatMapIterable(new Function<Integer, Iterable<Object>>() {
@Override
public Iterable<Object> apply(Integer v) throws Exception {
return null;
}
}, new BiFunction<Integer, Object, Object>() {
@Override
public Object apply(Integer v, Object w) throws Exception {
return v;
}
})
.to(TestHelper.<Object>testConsumer())
.assertFailureAndMessage(NullPointerException.class, "The mapper returned a null Iterable");
}
@Test
public void combinerMapperFunctionReturnsNull() {
Flowable.just(1)
.flatMap(new Function<Integer, Publisher<Object>>() {
@Override
public Publisher<Object> apply(Integer v) throws Exception {
return null;
}
}, new BiFunction<Integer, Object, Object>() {
@Override
public Object apply(Integer v, Object w) throws Exception {
return v;
}
})
.to(TestHelper.<Object>testConsumer())
.assertFailureAndMessage(NullPointerException.class, "The mapper returned a null Publisher");
}
@Test
public void failingFusedInnerCancelsSource() {
final AtomicInteger counter = new AtomicInteger();
Flowable.range(1, 5)
.doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer v) throws Exception {
counter.getAndIncrement();
}
})
.flatMap(new Function<Integer, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Integer v)
throws Exception {
return Flowable.<Integer>fromIterable(new Iterable<Integer>() {
@Override
public Iterator<Integer> iterator() {
return new Iterator<Integer>() {
@Override
public boolean hasNext() {
return true;
}
@Override
public Integer next() {
throw new TestException();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
});
}
})
.test()
.assertFailure(TestException.class);
assertEquals(1, counter.get());
}
@Test
public void maxConcurrencySustained() {
final PublishProcessor<Integer> pp1 = PublishProcessor.create();
final PublishProcessor<Integer> pp2 = PublishProcessor.create();
PublishProcessor<Integer> pp3 = PublishProcessor.create();
PublishProcessor<Integer> pp4 = PublishProcessor.create();
TestSubscriber<Integer> ts = Flowable.just(pp1, pp2, pp3, pp4)
.flatMap(new Function<PublishProcessor<Integer>, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(PublishProcessor<Integer> v) throws Exception {
return v;
}
}, 2)
.doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer v) throws Exception {
if (v == 1) {
// this will make sure the drain loop detects two completed
// inner sources and replaces them with fresh ones
pp1.onComplete();
pp2.onComplete();
}
}
})
.test();
pp1.onNext(1);
assertFalse(pp1.hasSubscribers());
assertFalse(pp2.hasSubscribers());
assertTrue(pp3.hasSubscribers());
assertTrue(pp4.hasSubscribers());
ts.cancel();
assertFalse(pp3.hasSubscribers());
assertFalse(pp4.hasSubscribers());
}
@Test
public void undeliverableUponCancel() {
TestHelper.checkUndeliverableUponCancel(new FlowableConverter<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Flowable<Integer> upstream) {
return upstream.flatMap(new Function<Integer, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Integer v) throws Throwable {
return Flowable.just(v).hide();
}
});
}
});
}
@Test
public void undeliverableUponCancelDelayError() {
TestHelper.checkUndeliverableUponCancel(new FlowableConverter<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Flowable<Integer> upstream) {
return upstream.flatMap(new Function<Integer, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Integer v) throws Throwable {
return Flowable.just(v).hide();
}
}, true);
}
});
}
@Test
public void mainErrorsInnerCancelled() {
PublishProcessor<Integer> pp1 = PublishProcessor.create();
PublishProcessor<Integer> pp2 = PublishProcessor.create();
pp1
.flatMap(v -> pp2)
.test();
pp1.onNext(1);
assertTrue("No subscribers?", pp2.hasSubscribers());
pp1.onError(new TestException());
assertFalse("Has subscribers?", pp2.hasSubscribers());
}
@Test
public void innerErrorsMainCancelled() {
PublishProcessor<Integer> pp1 = PublishProcessor.create();
PublishProcessor<Integer> pp2 = PublishProcessor.create();
pp1
.flatMap(v -> pp2)
.test();
pp1.onNext(1);
assertTrue("No subscribers?", pp2.hasSubscribers());
pp2.onError(new TestException());
assertFalse("Has subscribers?", pp1.hasSubscribers());
}
@Test
public void innerIsDisposed() {
FlowableFlatMap.InnerSubscriber<Integer, Integer> inner = new FlowableFlatMap.InnerSubscriber<>(null, 10, 0L);
assertFalse(inner.isDisposed());
inner.dispose();
assertTrue(inner.isDisposed());
}
@Test
public void badRequest() {
TestHelper.assertBadRequestReported(Flowable.never().flatMap(v -> Flowable.never()));
}
@Test
public void signalsAfterMapperCrash() throws Throwable {
TestHelper.withErrorTracking(errors -> {
new Flowable<Integer>() {
@Override
protected void subscribeActual(@NonNull Subscriber<? super @NonNull Integer> s) {
s.onSubscribe(new BooleanSubscription());
s.onNext(1);
s.onNext(2);
s.onComplete();
s.onError(new IOException());
}
}
.flatMap(v -> {
throw new TestException();
})
.test()
.assertFailure(TestException.class);
TestHelper.assertUndeliverable(errors, 0, IOException.class);
});
}
@Test
public void scalarQueueTerminate() {
PublishProcessor<Integer> pp = PublishProcessor.create();
TestSubscriber<Integer> ts = new TestSubscriber<>();
pp
.flatMap(v -> Flowable.just(v))
.doOnNext(v -> {
if (v == 1) {
pp.onNext(2);
pp.onNext(3);
}
})
.take(2)
.subscribe(ts);
pp.onNext(1);
ts.assertResult(1, 2);
}
@Test
public void scalarQueueCompleteMain() throws Exception {
PublishProcessor<Integer> pp = PublishProcessor.create();
TestSubscriber<Integer> ts = new TestSubscriber<>();
CountDownLatch cdl = new CountDownLatch(1);
pp
.flatMap(v -> Flowable.just(v))
.doOnNext(v -> {
if (v == 1) {
pp.onNext(2);
TestHelper.raceOther(() -> pp.onComplete(), cdl);
}
})
.subscribe(ts);
pp.onNext(1);
cdl.await();
ts.assertResult(1, 2);
}
@Test
public void fusedInnerCrash() {
UnicastProcessor<Integer> up = UnicastProcessor.create();
PublishProcessor<Integer> pp = PublishProcessor.create();
TestSubscriber<Integer> ts = Flowable.just(
pp,
up.map(v -> {
if (v == 10) {
throw new TestException();
}
return v;
})
.compose(TestHelper.flowableStripBoundary())
)
.flatMap(v -> v, true)
.doOnNext(v -> {
if (v == 1) {
pp.onNext(2);
up.onNext(10);
}
})
.test();
pp.onNext(1);
pp.onComplete();
ts.assertFailure(TestException.class, 1, 2);
}
@Test
public void fusedInnerCrash2() {
UnicastProcessor<Integer> up = UnicastProcessor.create();
PublishProcessor<Integer> pp = PublishProcessor.create();
TestSubscriber<Integer> ts = Flowable.just(
up.map(v -> {
if (v == 10) {
throw new TestException();
}
return v;
})
.compose(TestHelper.flowableStripBoundary())
, pp
)
.flatMap(v -> v, true)
.doOnNext(v -> {
if (v == 1) {
pp.onNext(2);
up.onNext(10);
}
})
.test();
pp.onNext(1);
pp.onComplete();
ts.assertFailure(TestException.class, 1, 2);
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeFlowable(f -> f.flatMap(v -> Flowable.never()));
}
@Test
public void allConcurrency() {
Flowable.just(1)
.hide()
.flatMap(v -> Flowable.just(2).hide(), Integer.MAX_VALUE)
.test()
.assertResult(2);
}
@Test
public void allConcurrencyScalarInner() {
Flowable.just(1)
.hide()
.flatMap(v -> Flowable.just(2), Integer.MAX_VALUE)
.test()
.assertResult(2);
}
@Test
public void allConcurrencyScalarInnerEmpty() {
Flowable.just(1)
.hide()
.flatMap(v -> Flowable.empty(), Integer.MAX_VALUE)
.test()
.assertResult();
}
static final | FlowableFlatMapTest |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/calcite/sql2rel/StandardConvertletTable.java | {
"start": 81437,
"end": 89364
} | class ____ implements SqlRexConvertlet {
private final SqlLibrary library;
SubstrConvertlet(SqlLibrary library) {
this.library = library;
Preconditions.checkArgument(
library == SqlLibrary.ORACLE
|| library == SqlLibrary.MYSQL
|| library == SqlLibrary.BIG_QUERY
|| library == SqlLibrary.POSTGRESQL);
}
@Override
public RexNode convertCall(SqlRexContext cx, SqlCall call) {
// Translate
// SUBSTR(value, start, length)
//
// to the following if we want PostgreSQL semantics:
// SUBSTRING(value, start, length)
//
// to the following if we want Oracle semantics:
// SUBSTRING(
// value
// FROM CASE
// WHEN start = 0
// THEN 1
// WHEN start + (length(value) + 1) < 1
// THEN length(value) + 1
// WHEN start < 0
// THEN start + (length(value) + 1)
// ELSE start)
// FOR CASE WHEN length < 0 THEN 0 ELSE length END)
//
// to the following in MySQL:
// SUBSTRING(
// value
// FROM CASE
// WHEN start = 0
// THEN length(value) + 1 -- different from Oracle
// WHEN start + (length(value) + 1) < 1
// THEN length(value) + 1
// WHEN start < 0
// THEN start + length(value) + 1
// ELSE start)
// FOR CASE WHEN length < 0 THEN 0 ELSE length END)
//
// to the following if we want BigQuery semantics:
// CASE
// WHEN start + (length(value) + 1) < 1
// THEN value
// ELSE SUBSTRING(
// value
// FROM CASE
// WHEN start = 0
// THEN 1
// WHEN start < 0
// THEN start + length(value) + 1
// ELSE start)
// FOR CASE WHEN length < 0 THEN 0 ELSE length END)
final RexBuilder rexBuilder = cx.getRexBuilder();
final List<RexNode> exprs =
convertOperands(cx, call, SqlOperandTypeChecker.Consistency.NONE);
final RexNode value = exprs.get(0);
final RexNode start = exprs.get(1);
final RelDataType startType = start.getType();
final RexLiteral zeroLiteral = rexBuilder.makeLiteral(0, startType);
final RexLiteral oneLiteral = rexBuilder.makeLiteral(1, startType);
final RexNode valueLength =
SqlTypeUtil.isBinary(value.getType())
? rexBuilder.makeCall(SqlStdOperatorTable.OCTET_LENGTH, value)
: rexBuilder.makeCall(SqlStdOperatorTable.CHAR_LENGTH, value);
final RexNode valueLengthPlusOne =
rexBuilder.makeCall(SqlStdOperatorTable.PLUS, valueLength, oneLiteral);
final RexNode newStart;
switch (library) {
case POSTGRESQL:
if (call.operandCount() == 2) {
newStart =
rexBuilder.makeCall(
SqlStdOperatorTable.CASE,
rexBuilder.makeCall(
SqlStdOperatorTable.LESS_THAN, start, oneLiteral),
oneLiteral,
start);
} else {
newStart = start;
}
break;
case BIG_QUERY:
newStart =
rexBuilder.makeCall(
SqlStdOperatorTable.CASE,
rexBuilder.makeCall(
SqlStdOperatorTable.EQUALS, start, zeroLiteral),
oneLiteral,
rexBuilder.makeCall(
SqlStdOperatorTable.LESS_THAN, start, zeroLiteral),
rexBuilder.makeCall(
SqlStdOperatorTable.PLUS, start, valueLengthPlusOne),
start);
break;
default:
newStart =
rexBuilder.makeCall(
SqlStdOperatorTable.CASE,
rexBuilder.makeCall(
SqlStdOperatorTable.EQUALS, start, zeroLiteral),
library == SqlLibrary.MYSQL ? valueLengthPlusOne : oneLiteral,
rexBuilder.makeCall(
SqlStdOperatorTable.LESS_THAN,
rexBuilder.makeCall(
SqlStdOperatorTable.PLUS,
start,
valueLengthPlusOne),
oneLiteral),
valueLengthPlusOne,
rexBuilder.makeCall(
SqlStdOperatorTable.LESS_THAN, start, zeroLiteral),
rexBuilder.makeCall(
SqlStdOperatorTable.PLUS, start, valueLengthPlusOne),
start);
break;
}
if (call.operandCount() == 2) {
return rexBuilder.makeCall(SqlStdOperatorTable.SUBSTRING, value, newStart);
}
assert call.operandCount() == 3;
final RexNode length = exprs.get(2);
final RexNode newLength;
switch (library) {
case POSTGRESQL:
newLength = length;
break;
default:
newLength =
rexBuilder.makeCall(
SqlStdOperatorTable.CASE,
rexBuilder.makeCall(
SqlStdOperatorTable.LESS_THAN, length, zeroLiteral),
zeroLiteral,
length);
}
final RexNode substringCall =
rexBuilder.makeCall(SqlStdOperatorTable.SUBSTRING, value, newStart, newLength);
switch (library) {
case BIG_QUERY:
return rexBuilder.makeCall(
SqlStdOperatorTable.CASE,
rexBuilder.makeCall(
SqlStdOperatorTable.LESS_THAN,
rexBuilder.makeCall(
SqlStdOperatorTable.PLUS, start, valueLengthPlusOne),
oneLiteral),
value,
substringCall);
default:
return substringCall;
}
}
}
/**
* Convertlet that handles the 3-argument {@code TIMESTAMPADD} function and the 2-argument
* BigQuery-style {@code TIMESTAMP_ADD} function.
*/
private static | SubstrConvertlet |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/compatible/jsonlib/CompatibleTest0.java | {
"start": 486,
"end": 4374
} | class ____ extends TestCase {
protected void setUp() throws Exception {
System.out.println();
}
public void test_0() throws Exception {
Map<String, Object> obj = new HashMap<String, Object>();
assertEquals(toCompatibleJSONString(obj), toJSONLibString(obj));
}
public void test_1() throws Exception {
VO vo = new VO();
assertEquals(toCompatibleJSONString(vo), toJSONLibString(vo));
}
public void test_2() throws Exception {
V1 vo = new V1();
assertEquals(toCompatibleJSONString(vo), toJSONLibString(vo));
}
// {"media":{"size":58982400,"format":"video/mpg4","uri":"http://javaone.com/keynote.mpg","title":"Javaone Keynote","width":640,"height":480,"duration":18000000,"bitrate":262144,"persons":["Bill Gates","Steve Jobs"],"player":"JAVA"}{"images":[{"size":"LARGE","uri":"http://javaone.com/keynote_large.jpg","title":"Javaone Keynote","width":1024,"height":768},{"size":"SMALL","uri":"http://javaone.com/keynote_small.jpg","title":"Javaone Keynote","width":320,"height":240}]}
public void test_3() throws Exception {
V1 vo = new V1();
vo.setDate(new Date());
assertEquals(toCompatibleJSONString(vo), toJSONLibString(vo));
}
public void test_4() throws Exception {
V1 vo = new V1();
vo.setF2('中');
assertEquals(toCompatibleJSONString(vo), toJSONLibString(vo));
}
public void test_5() throws Exception {
V2 vo = new V2();
vo.setF1(0.2f);
vo.setF2(33.3);
assertEquals(toCompatibleJSONString(vo), toJSONLibString(vo));
}
public void test_6() throws Exception {
V2 vo = new V2();
vo.setF1(0.1f);
vo.setF2(33.3);
assertEquals(toCompatibleJSONString(vo), toJSONLibString(vo));
}
public void test_7() throws Exception {
V2 vo = new V2();
vo.setF2(0.1D);
vo.setF1(33.3f);
assertEquals(toCompatibleJSONString(vo), toJSONLibString(vo));
}
public void test_8() throws Exception {
V3 vo = new V3();
assertEquals(toCompatibleJSONString(vo), toJSONLibString(vo));
}
public void test_9() throws Exception {
V4 vo = new V4();
assertEquals(toCompatibleJSONString(vo), toJSONLibString(vo));
}
public void test_10() throws Exception {
Object vo = null;
assertEquals(toCompatibleJSONString(vo), toJSONLibString(vo));
}
public void test_11() throws Exception {
Object vo = new HashMap();
assertEquals(toCompatibleJSONString(vo), toJSONLibString(vo));
}
public static void assertEquals(String fastJSON, String jsonLib) {
System.out.println("fastjson: " + fastJSON);
System.out.println("json-lib: " + jsonLib);
Assert.assertEquals(JSON.parse(fastJSON), JSON.parse(jsonLib));
}
private static final SerializeConfig mapping;
static {
mapping = new SerializeConfig();
mapping.put(Date.class, new JSONLibDataFormatSerializer()); // 使用和json-lib兼容的日期输出格式
}
private static final SerializerFeature[] features = { SerializerFeature.WriteMapNullValue, // 输出空置字段
SerializerFeature.WriteNullListAsEmpty, // list字段如果为null,输出为[],而不是null
SerializerFeature.WriteNullNumberAsZero, // 数值字段如果为null,输出为0,而不是null
SerializerFeature.WriteNullBooleanAsFalse, // Boolean字段如果为null,输出为false,而不是null
SerializerFeature.WriteNullStringAsEmpty // 字符类型字段如果为null,输出为"",而不是null
};
// 序列化为和JSON-LIB兼容的字符串
public static String toCompatibleJSONString(Object object) {
return JSON.toJSONString(object, mapping, features);
}
public static String toJSONLibString(Object object) {
net.sf.json.JSONObject obj = net.sf.json.JSONObject.fromObject(object);
return obj.toString();
}
public static | CompatibleTest0 |
java | apache__kafka | server-common/src/main/java/org/apache/kafka/server/share/persister/PartitionData.java | {
"start": 959,
"end": 1115
} | interface ____ {@link Persister}. The various interfaces
* reflect the ways in which a subset of the data can be accessed for different purposes.
*/
public | to |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/RolloverActionTests.java | {
"start": 825,
"end": 8630
} | class ____ extends AbstractActionTestCase<RolloverAction> {
@Override
protected RolloverAction doParseInstance(XContentParser parser) throws IOException {
return RolloverAction.parse(parser);
}
@Override
protected RolloverAction createTestInstance() {
return randomInstance();
}
public static RolloverAction randomInstance() {
// Ensure that at least one max* condition will be defined to produce a valid instance
int useCondition = randomIntBetween(0, 4);
ByteSizeValue maxSize = (useCondition == 0 || randomBoolean()) ? randomByteSizeValue() : null;
ByteSizeValue maxPrimaryShardSize = (useCondition == 1 || randomBoolean()) ? randomByteSizeValue() : null;
Long maxDocs = (useCondition == 2 || randomBoolean()) ? randomNonNegativeLong() : null;
TimeValue maxAge = (useCondition == 3 || randomBoolean()) ? TimeValue.timeValueMillis(randomMillisUpToYear9999()) : null;
Long maxPrimaryShardDocs = (useCondition == 4 || randomBoolean()) ? randomNonNegativeLong() : null;
ByteSizeValue minSize = randomBoolean() ? randomByteSizeValue() : null;
ByteSizeValue minPrimaryShardSize = randomBoolean() ? randomByteSizeValue() : null;
Long minDocs = randomBoolean() ? randomNonNegativeLong() : null;
TimeValue minAge = randomBoolean() ? randomPositiveTimeValue() : null;
Long minPrimaryShardDocs = randomBoolean() ? randomNonNegativeLong() : null;
return new RolloverAction(
maxSize,
maxPrimaryShardSize,
maxAge,
maxDocs,
maxPrimaryShardDocs,
minSize,
minPrimaryShardSize,
minAge,
minDocs,
minPrimaryShardDocs
);
}
@Override
protected Reader<RolloverAction> instanceReader() {
return RolloverAction::read;
}
@Override
protected RolloverAction mutateInstance(RolloverAction instance) {
RolloverConditions conditions = instance.getConditions();
ByteSizeValue maxSize = conditions.getMaxSize();
ByteSizeValue maxPrimaryShardSize = conditions.getMaxPrimaryShardSize();
TimeValue maxAge = conditions.getMaxAge();
Long maxDocs = conditions.getMaxDocs();
Long maxPrimaryShardDocs = conditions.getMaxPrimaryShardDocs();
ByteSizeValue minSize = conditions.getMinSize();
ByteSizeValue minPrimaryShardSize = conditions.getMinPrimaryShardSize();
TimeValue minAge = conditions.getMinAge();
Long minDocs = conditions.getMinDocs();
Long minPrimaryShardDocs = conditions.getMinPrimaryShardDocs();
switch (between(0, 9)) {
case 0 -> maxSize = randomValueOtherThan(maxSize, RolloverActionTests::randomByteSizeValue);
case 1 -> maxPrimaryShardSize = randomValueOtherThan(maxPrimaryShardSize, RolloverActionTests::randomByteSizeValue);
case 2 -> maxAge = randomValueOtherThan(maxAge, () -> TimeValue.timeValueMillis(randomMillisUpToYear9999()));
case 3 -> maxDocs = maxDocs == null ? randomNonNegativeLong() : maxDocs + 1;
case 4 -> maxPrimaryShardDocs = maxPrimaryShardDocs == null ? randomNonNegativeLong() : maxPrimaryShardDocs + 1;
case 5 -> minSize = randomValueOtherThan(minSize, RolloverActionTests::randomByteSizeValue);
case 6 -> minPrimaryShardSize = randomValueOtherThan(minPrimaryShardSize, RolloverActionTests::randomByteSizeValue);
case 7 -> minAge = randomValueOtherThan(minAge, () -> TimeValue.timeValueMillis(randomMillisUpToYear9999()));
case 8 -> minDocs = minDocs == null ? randomNonNegativeLong() : minDocs + 1;
case 9 -> minPrimaryShardDocs = minPrimaryShardDocs == null ? randomNonNegativeLong() : minPrimaryShardDocs + 1;
default -> throw new AssertionError("Illegal randomisation branch");
}
return new RolloverAction(
maxSize,
maxPrimaryShardSize,
maxAge,
maxDocs,
maxPrimaryShardDocs,
minSize,
minPrimaryShardSize,
minAge,
minDocs,
minPrimaryShardDocs
);
}
public void testNoConditions() {
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> new RolloverAction(null, null, null, null, null, null, null, null, null, null)
);
assertEquals("At least one max_* rollover condition must be set.", exception.getMessage());
}
public void testToSteps() {
RolloverAction action = createTestInstance();
RolloverConditions conditions = action.getConditions();
String phase = randomAlphaOfLengthBetween(1, 10);
StepKey nextStepKey = new StepKey(
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10)
);
List<Step> steps = action.toSteps(null, phase, nextStepKey);
assertNotNull(steps);
assertEquals(5, steps.size());
StepKey expectedFirstStepKey = new StepKey(phase, RolloverAction.NAME, WaitForRolloverReadyStep.NAME);
StepKey expectedSecondStepKey = new StepKey(phase, RolloverAction.NAME, RolloverStep.NAME);
StepKey expectedThirdStepKey = new StepKey(phase, RolloverAction.NAME, WaitForActiveShardsStep.NAME);
StepKey expectedFourthStepKey = new StepKey(phase, RolloverAction.NAME, UpdateRolloverLifecycleDateStep.NAME);
StepKey expectedFifthStepKey = new StepKey(phase, RolloverAction.NAME, RolloverAction.INDEXING_COMPLETE_STEP_NAME);
WaitForRolloverReadyStep firstStep = (WaitForRolloverReadyStep) steps.get(0);
RolloverStep secondStep = (RolloverStep) steps.get(1);
WaitForActiveShardsStep thirdStep = (WaitForActiveShardsStep) steps.get(2);
UpdateRolloverLifecycleDateStep fourthStep = (UpdateRolloverLifecycleDateStep) steps.get(3);
UpdateSettingsStep fifthStep = (UpdateSettingsStep) steps.get(4);
assertEquals(expectedFirstStepKey, firstStep.getKey());
assertEquals(expectedSecondStepKey, secondStep.getKey());
assertEquals(expectedThirdStepKey, thirdStep.getKey());
assertEquals(expectedFourthStepKey, fourthStep.getKey());
assertEquals(expectedFifthStepKey, fifthStep.getKey());
assertEquals(secondStep.getKey(), firstStep.getNextStepKey());
assertEquals(thirdStep.getKey(), secondStep.getNextStepKey());
assertEquals(fourthStep.getKey(), thirdStep.getNextStepKey());
assertEquals(fifthStep.getKey(), fourthStep.getNextStepKey());
assertEquals(conditions, firstStep.getConditions());
assertEquals(nextStepKey, fifthStep.getNextStepKey());
}
public void testBwcSerializationWithMaxPrimaryShardDocs() throws Exception {
// In case of serializing to node with older version, replace maxPrimaryShardDocs with maxDocs.
RolloverAction instance = new RolloverAction(null, null, null, null, 1L, null, null, null, null, null);
RolloverAction deserializedInstance = copyInstance(instance, TransportVersions.V_8_1_0);
assertThat(deserializedInstance.getConditions().getMaxPrimaryShardDocs(), nullValue());
// But not if maxDocs is also specified:
instance = new RolloverAction(null, null, null, 2L, 1L, null, null, null, null, null);
deserializedInstance = copyInstance(instance, TransportVersions.V_8_1_0);
assertThat(deserializedInstance.getConditions().getMaxPrimaryShardDocs(), nullValue());
assertThat(deserializedInstance.getConditions().getMaxDocs(), equalTo(instance.getConditions().getMaxDocs()));
}
}
| RolloverActionTests |
java | micronaut-projects__micronaut-core | http-client-core/src/main/java/io/micronaut/http/client/annotation/Client.java | {
"start": 1562,
"end": 1880
} | interface ____ {
/**
* @return The URL or service ID of the remote service
*/
@AliasFor(member = "id") // <1>
String value() default "";
/**
* @return The ID of the client
*/
@AliasFor(member = "value") // <2>
String id() default "";
// end::value[]
/**
* The | Client |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/Banner.java | {
"start": 800,
"end": 975
} | class ____ writing a banner programmatically.
*
* @author Phillip Webb
* @author Michael Stummvoll
* @author Jeremy Rickard
* @since 1.2.0
*/
@FunctionalInterface
public | for |
java | quarkusio__quarkus | integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftWithDeploymentResourceAndLocalLookupTest.java | {
"start": 1124,
"end": 4686
} | class ____ {
private static final String NAME = "openshift-with-deployment-resource-and-local-lookup";
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.withApplicationRoot((jar) -> jar.addClasses(GreetingResource.class))
.setApplicationName(NAME)
.setApplicationVersion("0.1-SNAPSHOT")
.overrideConfigKey("quarkus.openshift.deployment-kind", "Deployment")
.setLogFileName("k8s.log")
.setForcedDependencies(List.of(Dependency.of("io.quarkus", "quarkus-openshift", Version.getVersion())));
@ProdBuildResults
private ProdModeTestResults prodModeTestResults;
@Test
public void assertGeneratedResources() throws IOException {
final Path kubernetesDir = prodModeTestResults.getBuildDir().resolve("kubernetes");
assertThat(kubernetesDir)
.isDirectoryContaining(p -> p.getFileName().endsWith("openshift.json"))
.isDirectoryContaining(p -> p.getFileName().endsWith("openshift.yml"));
List<HasMetadata> kubernetesList = DeserializationUtil
.deserializeAsList(kubernetesDir.resolve("openshift.yml"));
assertThat(kubernetesList).filteredOn(h -> "BuildConfig".equals(h.getKind())).hasSize(1);
assertThat(kubernetesList).filteredOn(h -> "ImageStream".equals(h.getKind())).hasSize(2);
assertThat(kubernetesList).filteredOn(h -> "ImageStream".equals(h.getKind())
&& h.getMetadata().getName().equals(NAME)).hasSize(1);
assertThat(kubernetesList).filteredOn(i -> i instanceof Deployment).singleElement().satisfies(i -> {
assertThat(i).isInstanceOfSatisfying(Deployment.class, d -> {
assertThat(d.getMetadata()).satisfies(m -> {
assertThat(m.getName()).isEqualTo(NAME);
});
assertThat(d.getSpec()).satisfies(deploymentSpec -> {
assertThat(deploymentSpec.getTemplate()).satisfies(t -> {
assertThat(t.getMetadata()).satisfies(metadata -> assertThat(metadata.getAnnotations()).contains(
entry("alpha.image.policy.openshift.io/resolve-names", "*")));
assertThat(t.getMetadata()).satisfies(metadata -> assertThat(metadata.getLabels()).containsAnyOf(
entry("app.kubernetes.io/name", NAME),
entry("app.kubernetes.io/version", "0.1-SNAPSHOT")));
assertThat(t.getSpec()).satisfies(podSpec -> {
assertThat(podSpec.getContainers()).singleElement().satisfies(container -> {
assertThat(container.getImage())
.isEqualTo("openshift-with-deployment-resource-and-local-lookup:0.1-SNAPSHOT");
});
});
});
});
});
});
assertThat(kubernetesList).filteredOn(r -> r instanceof ImageStream && r.getMetadata().getName().equals(NAME))
.singleElement().satisfies(r -> {
assertThat(r).isInstanceOfSatisfying(ImageStream.class, i -> {
assertThat(i.getSpec()).satisfies(spec -> {
assertThat(spec.getLookupPolicy().getLocal()).isEqualTo(true);
});
});
});
}
}
| OpenshiftWithDeploymentResourceAndLocalLookupTest |
java | apache__camel | core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedEndpointServiceRegistry.java | {
"start": 1674,
"end": 4332
} | class ____ extends ManagedService implements ManagedEndpointServiceRegistryMBean {
private final EndpointServiceRegistry registry;
private boolean sanitize;
public ManagedEndpointServiceRegistry(CamelContext context, EndpointServiceRegistry registry) {
super(context, registry);
this.registry = registry;
}
@Override
public void init(ManagementStrategy strategy) {
super.init(strategy);
sanitize = strategy.getManagementAgent().getMask() != null ? strategy.getManagementAgent().getMask() : true;
}
public EndpointServiceRegistry getRegistry() {
return registry;
}
@Override
public int getNumberOfEndpointServices() {
return registry.size();
}
@Override
public TabularData listEndpointServices() {
try {
TabularData answer = new TabularDataSupport(CamelOpenMBeanTypes.listEndpointServicesTabularType());
List<EndpointServiceRegistry.EndpointService> services = registry.listAllEndpointServices();
for (EndpointServiceRegistry.EndpointService entry : services) {
CompositeType ct = CamelOpenMBeanTypes.listEndpointServicesCompositeType();
String component = entry.getComponent();
String dir = entry.getDirection();
String protocol = entry.getServiceProtocol();
String serviceUrl = entry.getServiceUrl();
String metadata = null;
String endpointUri = entry.getEndpointUri();
if (sanitize) {
endpointUri = URISupport.sanitizeUri(endpointUri);
}
long hits = entry.getHits();
String routeId = entry.getRouteId();
var m = entry.getServiceMetadata();
if (m != null) {
StringJoiner sj = new StringJoiner(" ");
m.forEach((k, v) -> sj.add(k + "=" + v));
metadata = sj.toString();
}
CompositeData data = new CompositeDataSupport(
ct,
new String[] {
"component", "dir", "protocol", "serviceUrl", "metadata", "endpointUri", "routeId", "hits" },
new Object[] {
component, dir, protocol, serviceUrl, metadata, endpointUri, routeId, hits });
answer.put(data);
}
return answer;
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
}
| ManagedEndpointServiceRegistry |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/IndexType.java | {
"start": 882,
"end": 6542
} | class ____ {
/**
* An IndexType with no index structures or doc values
*/
public static final IndexType NONE = new IndexType(false, false, false, false, false, false);
private final boolean hasTerms;
private final boolean hasPoints;
private final boolean hasPointsMetadata;
private final boolean hasVectors;
private final boolean hasDocValues;
private final boolean hasDocValuesSkipper;
private IndexType(
boolean hasTerms,
boolean hasPoints,
boolean hasPointsMetadata,
boolean hasVectors,
boolean hasDocValues,
boolean hasDocValuesSkipper
) {
this.hasTerms = hasTerms;
this.hasPoints = hasPoints;
this.hasPointsMetadata = hasPointsMetadata;
this.hasVectors = hasVectors;
this.hasDocValues = hasDocValues;
this.hasDocValuesSkipper = hasDocValuesSkipper;
}
/**
* @return {@code true} if this IndexType has a Points index
*/
public boolean hasPoints() {
return hasPoints;
}
/**
* @return {@code true} if this IndexType has Points metadata
*/
public boolean hasPointsMetadata() {
return hasPointsMetadata;
}
/**
* @return {@code true} if this IndexType has an inverted index
*/
public boolean hasTerms() {
return hasTerms;
}
/**
* @return {@code true} if this IndexType has a vector index
*/
public boolean hasVectors() {
return hasVectors;
}
/**
* @return {@code true} if this IndexType has doc values
*/
public boolean hasDocValues() {
return hasDocValues;
}
/**
* @return {@code true} if this IndexType has a doc values skipper
*/
public boolean hasDocValuesSkipper() {
return hasDocValuesSkipper;
}
/**
* @return {@code true} if this IndexType has doc values but no index
*/
public boolean hasOnlyDocValues() {
return hasDocValues && hasDenseIndex() == false;
}
/**
* @return {@code true} if this IndexType has a dense index structure
*/
public boolean hasDenseIndex() {
return hasPoints || hasTerms || hasVectors;
}
/**
* @return {@code true} if this IndexType has index structures that support sort-based early termination
*/
public boolean supportsSortShortcuts() {
return hasTerms || hasPoints;
}
/**
* @return an inverted-index based IndexType
*/
public static IndexType terms(boolean isIndexed, boolean hasDocValues) {
if (isIndexed == false && hasDocValues == false) {
return NONE;
}
return new IndexType(isIndexed, false, false, false, hasDocValues, false);
}
/**
* @return a terms-based IndexType from a lucene FieldType
*/
public static IndexType terms(FieldType fieldType) {
if (fieldType.indexOptions() == IndexOptions.NONE) {
if (fieldType.docValuesType() == DocValuesType.NONE) {
return NONE;
}
if (fieldType.docValuesSkipIndexType() == DocValuesSkipIndexType.NONE) {
return docValuesOnly();
}
return skippers();
}
if (fieldType.docValuesType() == DocValuesType.NONE) {
return terms(true, false);
}
return terms(true, true);
}
/**
* @return an IndexType with docValuesSkippers
*/
public static IndexType skippers() {
return new IndexType(false, false, false, false, true, true);
}
/**
* @return a point-based IndexType
*/
public static IndexType points(boolean isIndexed, boolean hasDocValues) {
if (isIndexed == false && hasDocValues == false) {
return IndexType.NONE;
}
return new IndexType(false, isIndexed, isIndexed, false, hasDocValues, false);
}
/**
* @return an IndexType representing archive data, with points metadata extracted from doc values
*/
public static IndexType archivedPoints() {
return new IndexType(false, false, true, false, true, false);
}
/**
* @return an IndexType with doc values but no index
*/
public static IndexType docValuesOnly() {
return new IndexType(false, false, false, false, true, false);
}
/**
* @return an IndexType with a vector index
*/
public static IndexType vectors() {
return new IndexType(false, false, false, true, false, false);
}
@Override
public String toString() {
return "IndexType{"
+ "hasTerms="
+ hasTerms
+ ", hasPoints="
+ hasPoints
+ ", hasPointsMetadata="
+ hasPointsMetadata
+ ", hasVectors="
+ hasVectors
+ ", hasDocValues="
+ hasDocValues
+ ", hasDocValuesSkipper="
+ hasDocValuesSkipper
+ '}';
}
@Override
public boolean equals(Object o) {
if (o instanceof IndexType indexType) {
return hasTerms == indexType.hasTerms
&& hasPoints == indexType.hasPoints
&& hasPointsMetadata == indexType.hasPointsMetadata
&& hasVectors == indexType.hasVectors
&& hasDocValues == indexType.hasDocValues
&& hasDocValuesSkipper == indexType.hasDocValuesSkipper;
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(hasTerms, hasPoints, hasPointsMetadata, hasVectors, hasDocValues, hasDocValuesSkipper);
}
}
| IndexType |
java | google__guice | core/test/com/google/inject/ProvisionExceptionTest.java | {
"start": 14090,
"end": 14314
} | class ____ {
@Inject @Green String green;
@Inject
@Green
String green() {
return green;
}
}
@Retention(RUNTIME)
@Target({FIELD, PARAMETER, CONSTRUCTOR, METHOD})
@BindingAnnotation
@ | LikeScala |
java | apache__maven | impl/maven-impl/src/main/java/org/apache/maven/impl/DefaultDependencyResolverResult.java | {
"start": 7184,
"end": 7405
} | class ____ too.</li>
* </ul>
* </li>
* </ul>
*
* This method must be invoked before {@link #addDependency(Node, Dependency, Predicate, Path)}
* if output directories are desired on the | path |
java | junit-team__junit5 | junit-platform-engine/src/main/java/org/junit/platform/engine/discovery/ClassNameFilter.java | {
"start": 1760,
"end": 1883
} | class ____ be included in the result set.
*
* @param patterns regular expressions to match against fully qualified
* | will |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JUnit4EmptyMethodsTest.java | {
"start": 880,
"end": 1403
} | class ____ {
private final BugCheckerRefactoringTestHelper refactoringHelper =
BugCheckerRefactoringTestHelper.newInstance(JUnit4EmptyMethods.class, getClass());
@Test
public void emptyMethods() {
refactoringHelper
.addInputLines(
"FooTest.java",
"""
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
| JUnit4EmptyMethodsTest |
java | apache__kafka | connect/runtime/src/test/java/org/apache/kafka/connect/runtime/WorkerTest.java | {
"start": 174978,
"end": 175386
} | class ____ extends SinkTask {
public TestSinkTask() {
}
@Override
public String version() {
return "1.0";
}
@Override
public void start(Map<String, String> props) {
}
@Override
public void put(Collection<SinkRecord> records) {
}
@Override
public void stop() {
}
}
}
| TestSinkTask |
java | quarkusio__quarkus | extensions/infinispan-client/runtime-dev/src/main/java/io/quarkus/infinispan/client/runtime/dev/ui/InfinispanClientsContainer.java | {
"start": 635,
"end": 2075
} | class ____ {
/**
* Used in Dev UI
*
* @return info about Infinispan clients
*/
public List<InfinispanClientInfo> clientsInfo() {
List<InstanceHandle<RemoteCacheManager>> instanceHandles = Arc.container().listAll(RemoteCacheManager.class);
List<InfinispanClientInfo> infinispanClientInfos = new ArrayList<>();
for (InstanceHandle<RemoteCacheManager> ih : instanceHandles) {
InjectableBean<RemoteCacheManager> bean = ih.getBean();
Set<Annotation> annotationSet = bean.getQualifiers();
String identifier = InfinispanClientUtil.DEFAULT_INFINISPAN_CLIENT_NAME;
for (Annotation annotation : annotationSet) {
if (annotation instanceof io.quarkus.infinispan.client.InfinispanClientName) {
// client name found is found
identifier = ((io.quarkus.infinispan.client.InfinispanClientName) annotation).value();
}
}
List<ServerConfiguration> servers = ih.get().getConfiguration().servers();
if (!servers.isEmpty()) {
ServerConfiguration firstServer = servers.get(0);
infinispanClientInfos.add(
new InfinispanClientInfo(identifier, firstServer.host() + ":" + firstServer.port()));
}
}
return infinispanClientInfos;
}
public static | InfinispanClientsContainer |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JUnit4SetUpNotRunTest.java | {
"start": 4712,
"end": 5373
} | class ____ {
@Before
public void initMocks() {}
@Before
public void badVisibility() {}
}
""")
.doTest();
}
@Test
public void positiveCase_customBeforeDifferentName() {
compilationHelper
.addSourceLines(
"JUnit4SetUpNotRunPositiveCaseCustomBefore2.java",
"""
package com.google.errorprone.bugpatterns.testdata;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Test case with a custom Before annotation. */
@RunWith(JUnit4.class)
public | Foo |
java | apache__flink | flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/ConfigurableRocksDBOptionsFactory.java | {
"start": 930,
"end": 1131
} | class ____ been moved to {@link
* org.apache.flink.state.rocksdb.ConfigurableRocksDBOptionsFactory}. Please use the one under
* the new package instead.
*/
@Deprecated
@PublicEvolving
public | has |
java | apache__camel | components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/NettyHttpBindingPreservePostFormUrlEncodedBodyTest.java | {
"start": 1140,
"end": 3667
} | class ____ extends BaseNettyTest {
@Test
public void testSendToNetty() {
Exchange exchange
= template.request("netty-http:http://localhost:{{port}}/myapp/myservice?query1=a&query2=b", exchange1 -> {
exchange1.getIn().setBody("b1=x&b2=y");
exchange1.getIn().setHeader("content-type", "application/x-www-form-urlencoded");
exchange1.getIn().setHeader(Exchange.HTTP_METHOD, HttpMethods.POST);
});
// convert the response to a String
String body = exchange.getMessage().getBody(String.class);
assertEquals("Request message is OK", body);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("netty-http:http://localhost:{{port}}/myapp/myservice").process(exchange -> {
String body = exchange.getIn().getBody(String.class);
// for unit testing make sure we got right message
assertEquals("b1=x&b2=y", body, "The body message is wrong");
assertEquals("a", exchange.getIn().getHeader("query1"),
"Get a wrong query parameter from the message header");
assertEquals("b", exchange.getIn().getHeader("query2"),
"Get a wrong query parameter from the message header");
assertEquals("x", exchange.getIn().getHeader("b1"), "Get a wrong form parameter from the message header");
assertEquals("y", exchange.getIn().getHeader("b2"), "Get a wrong form parameter from the message header");
assertEquals("localhost:" + getPort(), exchange.getIn().getHeader("host"),
"Get a wrong form parameter from the message header");
NettyHttpMessage in = (NettyHttpMessage) exchange.getIn();
FullHttpRequest request = in.getHttpRequest();
assertEquals("/myapp/myservice?query1=a&query2=b", request.uri(), "Relative path should be used");
// send a response
exchange.getMessage().getHeaders().clear();
exchange.getMessage().setHeader(Exchange.CONTENT_TYPE, "text/plain");
exchange.getMessage().setBody("Request message is OK");
});
}
};
}
}
| NettyHttpBindingPreservePostFormUrlEncodedBodyTest |
java | apache__camel | components/camel-jpa/src/test/java/org/apache/camel/processor/jpa/JpaProducerWithQueryParametersHeaderTest.java | {
"start": 1335,
"end": 3201
} | class ____ {
protected DefaultCamelContext camelContext;
protected ProducerTemplate template;
@Test
@SuppressWarnings("rawtypes")
public void testProducerWithNamedQuery() {
template.sendBody("direct:deleteCustomers", "");
Customer c1 = new Customer();
c1.setName("Willem");
template.sendBody("direct:addCustomer", c1);
Customer c2 = new Customer();
c2.setName("Dummy");
template.sendBody("direct:addCustomer", c2);
Map<String, Object> params = new HashMap<>();
params.put("custName", "${body}");
List list = template.requestBodyAndHeader("direct:namedQuery", "Willem", JpaConstants.JPA_PARAMETERS_HEADER, params,
List.class);
assertEquals(1, list.size());
assertEquals("Willem", ((Customer) list.get(0)).getName());
int integer = template.requestBody("direct:deleteCustomers", null, int.class);
assertEquals(2, integer);
}
@BeforeEach
public void setUp() throws Exception {
camelContext = new DefaultCamelContext();
camelContext.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:namedQuery")
.to("jpa://" + Customer.class.getName() + "?namedQuery=findAllCustomersWithName");
from("direct:addCustomer")
.to("jpa://" + Customer.class.getName());
from("direct:deleteCustomers")
.to("jpa://" + Customer.class.getName() + "?query=delete from " + Customer.class.getName());
}
});
camelContext.start();
template = camelContext.createProducerTemplate();
}
@AfterEach
public void tearDown() {
camelContext.stop();
}
}
| JpaProducerWithQueryParametersHeaderTest |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/response/ElasticInferenceServiceSparseEmbeddingsResponseEntityTests.java | {
"start": 860,
"end": 8358
} | class ____ extends ESTestCase {
public void testSparseEmbeddingsResponse_SingleEmbeddingInData_NoMeta_NoTruncation() throws Exception {
String responseJson = """
{
"data": [
{
"a": 1.23,
"is": 4.56,
"it": 7.89
}
]
}
""";
SparseEmbeddingResults parsedResults = ElasticInferenceServiceSparseEmbeddingsResponseEntity.fromResponse(
mock(Request.class),
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
);
assertThat(
parsedResults.embeddings(),
is(
List.of(
SparseEmbeddingResults.Embedding.create(
List.of(new WeightedToken("a", 1.23F), new WeightedToken("is", 4.56F), new WeightedToken("it", 7.89F)),
false
)
)
)
);
}
public void testSparseEmbeddingsResponse_MultipleEmbeddingsInData_NoMeta_NoTruncation() throws Exception {
String responseJson = """
{
"data": [
{
"a": 1.23,
"is": 4.56,
"it": 7.89
},
{
"b": 1.23,
"it": 4.56,
"is": 7.89
}
]
}
""";
SparseEmbeddingResults parsedResults = ElasticInferenceServiceSparseEmbeddingsResponseEntity.fromResponse(
mock(Request.class),
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
);
assertThat(
parsedResults.embeddings(),
is(
List.of(
SparseEmbeddingResults.Embedding.create(
List.of(new WeightedToken("a", 1.23F), new WeightedToken("is", 4.56F), new WeightedToken("it", 7.89F)),
false
),
SparseEmbeddingResults.Embedding.create(
List.of(new WeightedToken("b", 1.23F), new WeightedToken("it", 4.56F), new WeightedToken("is", 7.89F)),
false
)
)
)
);
}
public void testSparseEmbeddingsResponse_SingleEmbeddingInData_NoMeta_Truncated() throws Exception {
String responseJson = """
{
"data": [
{
"a": 1.23,
"is": 4.56,
"it": 7.89
}
]
}
""";
var request = mock(Request.class);
when(request.getTruncationInfo()).thenReturn(new boolean[] { true });
SparseEmbeddingResults parsedResults = ElasticInferenceServiceSparseEmbeddingsResponseEntity.fromResponse(
request,
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
);
assertThat(
parsedResults.embeddings(),
is(
List.of(
SparseEmbeddingResults.Embedding.create(
List.of(new WeightedToken("a", 1.23F), new WeightedToken("is", 4.56F), new WeightedToken("it", 7.89F)),
true
)
)
)
);
}
public void testSparseEmbeddingsResponse_MultipleEmbeddingsInData_NoMeta_Truncated() throws Exception {
String responseJson = """
{
"data": [
{
"a": 1.23,
"is": 4.56,
"it": 7.89
},
{
"b": 1.23,
"it": 4.56,
"is": 7.89
}
]
}
""";
var request = mock(Request.class);
when(request.getTruncationInfo()).thenReturn(new boolean[] { true, false });
SparseEmbeddingResults parsedResults = ElasticInferenceServiceSparseEmbeddingsResponseEntity.fromResponse(
request,
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
);
assertThat(
parsedResults.embeddings(),
is(
List.of(
SparseEmbeddingResults.Embedding.create(
List.of(new WeightedToken("a", 1.23F), new WeightedToken("is", 4.56F), new WeightedToken("it", 7.89F)),
true
),
SparseEmbeddingResults.Embedding.create(
List.of(new WeightedToken("b", 1.23F), new WeightedToken("it", 4.56F), new WeightedToken("is", 7.89F)),
false
)
)
)
);
}
public void testSparseEmbeddingsResponse_SingleEmbeddingInData_IgnoresMetaBeforeData_NoTruncation() throws Exception {
String responseJson = """
{
"meta": {
"processing_latency": 1.23
},
"data": [
{
"a": 1.23,
"is": 4.56,
"it": 7.89
}
]
}
""";
SparseEmbeddingResults parsedResults = ElasticInferenceServiceSparseEmbeddingsResponseEntity.fromResponse(
mock(Request.class),
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
);
assertThat(
parsedResults.embeddings(),
is(
List.of(
SparseEmbeddingResults.Embedding.create(
List.of(new WeightedToken("a", 1.23F), new WeightedToken("is", 4.56F), new WeightedToken("it", 7.89F)),
false
)
)
)
);
}
public void testSparseEmbeddingsResponse_SingleEmbeddingInData_IgnoresMetaAfterData_NoTruncation() throws Exception {
String responseJson = """
{
"data": [
{
"a": 1.23,
"is": 4.56,
"it": 7.89
}
],
"meta": {
"processing_latency": 1.23
}
}
""";
SparseEmbeddingResults parsedResults = ElasticInferenceServiceSparseEmbeddingsResponseEntity.fromResponse(
mock(Request.class),
new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8))
);
assertThat(
parsedResults.embeddings(),
is(
List.of(
SparseEmbeddingResults.Embedding.create(
List.of(new WeightedToken("a", 1.23F), new WeightedToken("is", 4.56F), new WeightedToken("it", 7.89F)),
false
)
)
)
);
}
}
| ElasticInferenceServiceSparseEmbeddingsResponseEntityTests |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/LegacySemanticSparseVectorQueryRewriteInterceptorTests.java | {
"start": 1422,
"end": 7271
} | class ____ extends ESTestCase {
private TestThreadPool threadPool;
private NoOpClient client;
private Index index;
private static final String FIELD_NAME = "fieldName";
private static final String INFERENCE_ID = "inferenceId";
private static final String QUERY = "query";
@Before
public void setup() {
threadPool = createThreadPool();
client = new NoOpClient(threadPool);
index = new Index(randomAlphaOfLength(10), randomAlphaOfLength(10));
}
@After
public void cleanup() {
threadPool.close();
}
public void testSparseVectorQueryOnInferenceFieldIsInterceptedAndRewritten() throws IOException {
Map<String, InferenceFieldMetadata> inferenceFields = Map.of(
FIELD_NAME,
new InferenceFieldMetadata(index.getName(), "inferenceId", new String[] { FIELD_NAME }, null)
);
QueryRewriteContext context = createQueryRewriteContext(inferenceFields);
QueryBuilder original = new SparseVectorQueryBuilder(FIELD_NAME, INFERENCE_ID, QUERY);
if (randomBoolean()) {
float boost = randomFloatBetween(1, 10, randomBoolean());
original.boost(boost);
}
if (randomBoolean()) {
String queryName = randomAlphaOfLength(5);
original.queryName(queryName);
}
testRewrittenInferenceQuery(context, original);
}
public void testSparseVectorQueryOnInferenceFieldWithoutInferenceIdIsInterceptedAndRewritten() throws IOException {
Map<String, InferenceFieldMetadata> inferenceFields = Map.of(
FIELD_NAME,
new InferenceFieldMetadata(index.getName(), "inferenceId", new String[] { FIELD_NAME }, null)
);
QueryRewriteContext context = createQueryRewriteContext(inferenceFields);
QueryBuilder original = new SparseVectorQueryBuilder(FIELD_NAME, null, QUERY);
if (randomBoolean()) {
float boost = randomFloatBetween(1, 10, randomBoolean());
original.boost(boost);
}
if (randomBoolean()) {
String queryName = randomAlphaOfLength(5);
original.queryName(queryName);
}
testRewrittenInferenceQuery(context, original);
}
public void testSparseVectorQueryOnNonInferenceFieldRemainsUnchanged() throws IOException {
QueryRewriteContext context = createQueryRewriteContext(Map.of()); // No inference fields
QueryBuilder original = new SparseVectorQueryBuilder(FIELD_NAME, INFERENCE_ID, QUERY);
QueryBuilder rewritten = original.rewrite(context);
assertTrue(
"Expected query to remain sparse_vector but was [" + rewritten.getClass().getName() + "]",
rewritten instanceof SparseVectorQueryBuilder
);
assertEquals(original, rewritten);
}
private void testRewrittenInferenceQuery(QueryRewriteContext context, QueryBuilder original) throws IOException {
QueryBuilder rewritten = original.rewrite(context);
assertTrue(
"Expected query to be intercepted, but was [" + rewritten.getClass().getName() + "]",
rewritten instanceof InterceptedQueryBuilderWrapper
);
InterceptedQueryBuilderWrapper intercepted = (InterceptedQueryBuilderWrapper) rewritten;
assertEquals(original.boost(), intercepted.boost(), 0.0f);
assertEquals(original.queryName(), intercepted.queryName());
assertTrue(intercepted.queryBuilder instanceof NestedQueryBuilder);
NestedQueryBuilder nestedQueryBuilder = (NestedQueryBuilder) intercepted.queryBuilder;
assertEquals(SemanticTextField.getChunksFieldName(FIELD_NAME), nestedQueryBuilder.path());
assertEquals(original.boost(), nestedQueryBuilder.boost(), 0.0f);
assertEquals(original.queryName(), nestedQueryBuilder.queryName());
QueryBuilder innerQuery = nestedQueryBuilder.query();
assertTrue(innerQuery instanceof SparseVectorQueryBuilder);
SparseVectorQueryBuilder sparseVectorQueryBuilder = (SparseVectorQueryBuilder) innerQuery;
assertEquals(SemanticTextField.getEmbeddingsFieldName(FIELD_NAME), sparseVectorQueryBuilder.getFieldName());
assertEquals(INFERENCE_ID, sparseVectorQueryBuilder.getInferenceId());
assertEquals(QUERY, sparseVectorQueryBuilder.getQuery());
assertEquals(1.0f, sparseVectorQueryBuilder.boost(), 0.0f);
assertNull(sparseVectorQueryBuilder.queryName());
}
private QueryRewriteContext createQueryRewriteContext(Map<String, InferenceFieldMetadata> inferenceFields) {
IndexMetadata indexMetadata = IndexMetadata.builder(index.getName())
.settings(
Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID())
)
.numberOfShards(1)
.numberOfReplicas(0)
.putInferenceFields(inferenceFields)
.build();
ResolvedIndices resolvedIndices = new MockResolvedIndices(
Map.of(),
new OriginalIndices(new String[] { index.getName() }, IndicesOptions.DEFAULT),
Map.of(index, indexMetadata)
);
return new QueryRewriteContext(
null,
client,
null,
TransportVersion.current(),
RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY,
resolvedIndices,
null,
createRewriteInterceptor(),
null
);
}
@SuppressWarnings("deprecation")
private QueryRewriteInterceptor createRewriteInterceptor() {
return new LegacySemanticSparseVectorQueryRewriteInterceptor();
}
}
| LegacySemanticSparseVectorQueryRewriteInterceptorTests |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/session/MockitoSessionLoggerAdapter.java | {
"start": 258,
"end": 551
} | class ____ implements MockitoSessionLogger {
private final MockitoLogger logger;
public MockitoSessionLoggerAdapter(MockitoLogger logger) {
this.logger = logger;
}
@Override
public void log(String hint) {
logger.log(hint);
}
}
| MockitoSessionLoggerAdapter |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/authentication/jaas/event/JaasAuthenticationSuccessEvent.java | {
"start": 1069,
"end": 1302
} | class ____ extends JaasAuthenticationEvent {
@Serial
private static final long serialVersionUID = 2236826715750256181L;
public JaasAuthenticationSuccessEvent(Authentication auth) {
super(auth);
}
}
| JaasAuthenticationSuccessEvent |
java | apache__flink | flink-connectors/flink-connector-base/src/test/java/org/apache/flink/connector/base/source/hybrid/HybridSourceSplitEnumeratorTest.java | {
"start": 1867,
"end": 17197
} | class ____ {
private static final int SUBTASK0 = 0;
private static final int SUBTASK1 = 1;
private static final MockBaseSource MOCK_SOURCE = new MockBaseSource(1, 1, Boundedness.BOUNDED);
private HybridSource<Integer> source;
private MockSplitEnumeratorContext<HybridSourceSplit> context;
private HybridSourceSplitEnumerator enumerator;
private HybridSourceSplit splitFromSource0;
private HybridSourceSplit splitFromSource1;
private void setupEnumeratorAndTriggerSourceSwitch() {
context = new MockSplitEnumeratorContext<>(2);
source = HybridSource.builder(MOCK_SOURCE).addSource(MOCK_SOURCE).build();
enumerator = (HybridSourceSplitEnumerator) source.createEnumerator(context);
enumerator.start();
// mock enumerator assigns splits once all readers are registered
registerReader(context, enumerator, SUBTASK0);
assertThat(context.getSplitsAssignmentSequence()).isEmpty();
registerReader(context, enumerator, SUBTASK1);
assertThat(context.getSplitsAssignmentSequence()).isEmpty();
enumerator.handleSourceEvent(SUBTASK0, new SourceReaderFinishedEvent(-1));
assertThat(context.getSplitsAssignmentSequence()).isEmpty();
enumerator.handleSourceEvent(SUBTASK1, new SourceReaderFinishedEvent(-1));
assertThat(context.getSplitsAssignmentSequence()).hasSize(1);
splitFromSource0 =
context.getSplitsAssignmentSequence().get(0).assignment().get(SUBTASK0).get(0);
assertThat(splitFromSource0.sourceIndex()).isEqualTo(0);
assertThat(getCurrentSourceIndex(enumerator)).isEqualTo(0);
// trigger source switch
enumerator.handleSourceEvent(SUBTASK0, new SourceReaderFinishedEvent(0));
assertThat(getCurrentSourceIndex(enumerator)).as("one reader finished").isEqualTo(0);
enumerator.handleSourceEvent(SUBTASK1, new SourceReaderFinishedEvent(0));
assertThat(getCurrentSourceIndex(enumerator)).as("both readers finished").isEqualTo(1);
assertThat(context.getSplitsAssignmentSequence())
.as("switch triggers split assignment")
.hasSize(2);
splitFromSource1 =
context.getSplitsAssignmentSequence().get(1).assignment().get(SUBTASK0).get(0);
assertThat(splitFromSource1.sourceIndex()).isEqualTo(1);
enumerator.handleSourceEvent(SUBTASK1, new SourceReaderFinishedEvent(SUBTASK1));
assertThat(getCurrentSourceIndex(enumerator)).as("reader without assignment").isEqualTo(1);
}
@Test
void testHighCardinalitySources() {
context = new MockSplitEnumeratorContext<>(2);
HybridSource.HybridSourceBuilder<Integer, MockSplitEnumerator> hybridSourceBuilder =
HybridSource.builder(MOCK_SOURCE);
final int maxSources = 130;
for (int i = 1; i < maxSources; i++) {
hybridSourceBuilder = hybridSourceBuilder.addSource(MOCK_SOURCE);
}
source = hybridSourceBuilder.build();
enumerator = (HybridSourceSplitEnumerator) source.createEnumerator(context);
enumerator.start();
// mock enumerator assigns splits once all readers are registered
registerReader(context, enumerator, SUBTASK0);
assertThat(context.getSplitsAssignmentSequence()).isEmpty();
registerReader(context, enumerator, SUBTASK1);
assertThat(context.getSplitsAssignmentSequence()).isEmpty();
enumerator.handleSourceEvent(SUBTASK0, new SourceReaderFinishedEvent(-1));
assertThat(context.getSplitsAssignmentSequence()).isEmpty();
enumerator.handleSourceEvent(SUBTASK1, new SourceReaderFinishedEvent(-1));
assertThat(context.getSplitsAssignmentSequence()).hasSize(1);
splitFromSource0 =
context.getSplitsAssignmentSequence().get(0).assignment().get(SUBTASK0).get(0);
assertThat(splitFromSource0.sourceIndex()).isEqualTo(0);
assertThat(getCurrentSourceIndex(enumerator)).isEqualTo(0);
for (int i = 0; i < maxSources; i++) {
enumerator.handleSourceEvent(SUBTASK0, new SourceReaderFinishedEvent(i));
assertThat(getCurrentSourceIndex(enumerator)).as("one reader finished").isEqualTo(i);
enumerator.handleSourceEvent(SUBTASK1, new SourceReaderFinishedEvent(i));
if (i < maxSources - 1) {
assertThat(getCurrentSourceIndex(enumerator))
.as("both readers finished")
.isEqualTo(i + 1);
assertThat(context.getSplitsAssignmentSequence())
.as("switch triggers split assignment")
.hasSize(i + 2);
splitFromSource1 =
context.getSplitsAssignmentSequence()
.get(i)
.assignment()
.get(SUBTASK0)
.get(0);
assertThat(splitFromSource1.sourceIndex()).isEqualTo(i);
} else {
assertThat(getCurrentSourceIndex(enumerator))
.as("both readers finished")
.isEqualTo(maxSources - 1);
}
}
enumerator.handleSourceEvent(SUBTASK1, new SourceReaderFinishedEvent(SUBTASK1));
assertThat(getCurrentSourceIndex(enumerator))
.as("reader without assignment")
.isEqualTo(maxSources - 1);
}
@Test
void testRegisterReaderAfterSwitchAndReaderReset() {
setupEnumeratorAndTriggerSourceSwitch();
// add split of previous source back (simulates reader reset during recovery)
context.getSplitsAssignmentSequence().clear();
enumerator.addReader(SUBTASK0);
enumerator.addSplitsBack(Collections.singletonList(splitFromSource0), SUBTASK0);
assertThat(context.getSplitsAssignmentSequence()).isEmpty();
enumerator.handleSourceEvent(SUBTASK0, new SourceReaderFinishedEvent(-1));
assertSplitAssignment(
"addSplitsBack triggers assignment when reader registered",
context,
1,
splitFromSource0,
SUBTASK0);
// remove reader from context
context.getSplitsAssignmentSequence().clear();
context.unregisterReader(SUBTASK0);
enumerator.addSplitsBack(Collections.singletonList(splitFromSource0), SUBTASK0);
assertThat(context.getSplitsAssignmentSequence())
.as("addSplitsBack doesn't trigger assignment when reader not registered")
.isEmpty();
registerReader(context, enumerator, SUBTASK0);
assertThat(context.getSplitsAssignmentSequence()).isEmpty();
enumerator.handleSourceEvent(SUBTASK0, new SourceReaderFinishedEvent(-1));
assertSplitAssignment(
"registerReader triggers assignment", context, 1, splitFromSource0, SUBTASK0);
}
@Test
void testHandleSplitRequestAfterSwitchAndReaderReset() {
setupEnumeratorAndTriggerSourceSwitch();
UnderlyingEnumeratorWrapper underlyingEnumeratorWrapper =
new UnderlyingEnumeratorWrapper(getCurrentEnumerator(enumerator));
Whitebox.setInternalState(enumerator, "currentEnumerator", underlyingEnumeratorWrapper);
List<MockSourceSplit> mockSourceSplits =
Whitebox.getInternalState(underlyingEnumeratorWrapper.enumerator, "splits");
assertThat(mockSourceSplits).isEmpty();
// simulate reader reset to before switch by adding split of previous source back
context.getSplitsAssignmentSequence().clear();
assertThat(getCurrentSourceIndex(enumerator)).as("current enumerator").isEqualTo(1);
assertThat(underlyingEnumeratorWrapper.handleSplitRequests).isEmpty();
enumerator.handleSplitRequest(SUBTASK0, "fakehostname");
SwitchedSources switchedSources = new SwitchedSources();
switchedSources.put(1, MOCK_SOURCE);
assertSplitAssignment(
"handleSplitRequest triggers assignment of split by underlying enumerator",
context,
1,
HybridSourceSplit.wrapSplit(
UnderlyingEnumeratorWrapper.SPLIT_1, 1, switchedSources),
SUBTASK0);
// handleSplitRequest invalid during reset
enumerator.addSplitsBack(Collections.singletonList(splitFromSource0), SUBTASK0);
assertThatThrownBy(() -> enumerator.handleSplitRequest(SUBTASK0, "fakehostname"))
.isInstanceOf(IllegalStateException.class);
}
@Test
void testRestoreEnumerator() throws Exception {
setupEnumeratorAndTriggerSourceSwitch();
enumerator = (HybridSourceSplitEnumerator) source.createEnumerator(context);
enumerator.start();
HybridSourceEnumeratorState enumeratorState = enumerator.snapshotState(0);
MockSplitEnumerator underlyingEnumerator = getCurrentEnumerator(enumerator);
assertThat(
(List<MockSourceSplit>)
Whitebox.getInternalState(underlyingEnumerator, "splits"))
.hasSize(1);
enumerator =
(HybridSourceSplitEnumerator) source.restoreEnumerator(context, enumeratorState);
enumerator.start();
underlyingEnumerator = getCurrentEnumerator(enumerator);
assertThat(
(List<MockSourceSplit>)
Whitebox.getInternalState(underlyingEnumerator, "splits"))
.hasSize(1);
}
@Test
void testRestoreEnumeratorAfterFirstSourceWithoutRestoredSplits() throws Exception {
setupEnumeratorAndTriggerSourceSwitch();
HybridSourceEnumeratorState enumeratorState = enumerator.snapshotState(0);
MockSplitEnumerator underlyingEnumerator = getCurrentEnumerator(enumerator);
assertThat(
(List<MockSourceSplit>)
Whitebox.getInternalState(underlyingEnumerator, "splits"))
.hasSize(0);
enumerator =
(HybridSourceSplitEnumerator) source.restoreEnumerator(context, enumeratorState);
enumerator.start();
// subtask starts at -1 since it has no splits after restore
enumerator.handleSourceEvent(SUBTASK0, new SourceReaderFinishedEvent(-1));
underlyingEnumerator = getCurrentEnumerator(enumerator);
assertThat(
(List<MockSourceSplit>)
Whitebox.getInternalState(underlyingEnumerator, "splits"))
.hasSize(0);
}
@Test
void testDefaultMethodDelegation() throws Exception {
setupEnumeratorAndTriggerSourceSwitch();
SplitEnumerator<MockSourceSplit, Object> underlyingEnumeratorSpy =
Mockito.spy((SplitEnumerator) getCurrentEnumerator(enumerator));
Whitebox.setInternalState(enumerator, "currentEnumerator", underlyingEnumeratorSpy);
enumerator.notifyCheckpointComplete(1);
Mockito.verify(underlyingEnumeratorSpy).notifyCheckpointComplete(1);
enumerator.notifyCheckpointAborted(2);
Mockito.verify(underlyingEnumeratorSpy).notifyCheckpointAborted(2);
SwitchSourceEvent se = new SwitchSourceEvent(0, null, false);
enumerator.handleSourceEvent(0, se);
Mockito.verify(underlyingEnumeratorSpy).handleSourceEvent(0, se);
}
@Test
void testInterceptNoMoreSplitEvent() {
context = new MockSplitEnumeratorContext<>(2);
source = HybridSource.builder(MOCK_SOURCE).addSource(MOCK_SOURCE).build();
enumerator = (HybridSourceSplitEnumerator) source.createEnumerator(context);
enumerator.start();
// mock enumerator assigns splits once all readers are registered
// At this time, hasNoMoreSplit check will call context.signalIntermediateNoMoreSplits
registerReader(context, enumerator, SUBTASK0);
registerReader(context, enumerator, SUBTASK1);
enumerator.handleSourceEvent(SUBTASK0, new SourceReaderFinishedEvent(-1));
enumerator.handleSourceEvent(SUBTASK1, new SourceReaderFinishedEvent(-1));
assertThat(context.hasNoMoreSplits(0)).isFalse();
assertThat(context.hasNoMoreSplits(1)).isFalse();
splitFromSource0 =
context.getSplitsAssignmentSequence().get(0).assignment().get(SUBTASK0).get(0);
// task read finished, hasNoMoreSplit check will call context.signalNoMoreSplits, this is
// final finished event
enumerator.handleSourceEvent(SUBTASK0, new SourceReaderFinishedEvent(0));
enumerator.handleSourceEvent(SUBTASK1, new SourceReaderFinishedEvent(0));
assertThat(context.hasNoMoreSplits(0)).isTrue();
assertThat(context.hasNoMoreSplits(1)).isTrue();
// test add splits back, then SUBTASK0 restore splitFromSource0 split
// reset splits assignment & previous subtaskHasNoMoreSplits flag.
context.getSplitsAssignmentSequence().clear();
context.resetNoMoreSplits(0);
enumerator.addReader(SUBTASK0);
enumerator.addSplitsBack(Collections.singletonList(splitFromSource0), SUBTASK0);
enumerator.handleSourceEvent(SUBTASK0, new SourceReaderFinishedEvent(-1));
assertThat(context.hasNoMoreSplits(0)).isFalse();
enumerator.handleSourceEvent(SUBTASK0, new SourceReaderFinishedEvent(0));
assertThat(context.hasNoMoreSplits(0)).isTrue();
}
@Test
void testMultiSubtaskSwitchEnumerator() {
context = new MockSplitEnumeratorContext<>(2);
source =
HybridSource.builder(MOCK_SOURCE)
.addSource(MOCK_SOURCE)
.addSource(MOCK_SOURCE)
.build();
enumerator = (HybridSourceSplitEnumerator) source.createEnumerator(context);
enumerator.start();
registerReader(context, enumerator, SUBTASK0);
registerReader(context, enumerator, SUBTASK1);
enumerator.handleSourceEvent(SUBTASK0, new SourceReaderFinishedEvent(-1));
enumerator.handleSourceEvent(SUBTASK1, new SourceReaderFinishedEvent(-1));
assertThat(getCurrentSourceIndex(enumerator)).isEqualTo(0);
enumerator.handleSourceEvent(SUBTASK0, new SourceReaderFinishedEvent(0));
assertThat(getCurrentSourceIndex(enumerator)).isEqualTo(0);
enumerator.handleSourceEvent(SUBTASK1, new SourceReaderFinishedEvent(0));
assertThat(getCurrentSourceIndex(enumerator))
.as("all reader finished source-0")
.isEqualTo(1);
enumerator.handleSourceEvent(SUBTASK0, new SourceReaderFinishedEvent(1));
assertThat(getCurrentSourceIndex(enumerator))
.as(
"only reader-0 has finished reading, reader-1 is not yet done, so do not switch to the next source")
.isEqualTo(1);
enumerator.handleSourceEvent(SUBTASK1, new SourceReaderFinishedEvent(1));
assertThat(getCurrentSourceIndex(enumerator))
.as("all reader finished source-1")
.isEqualTo(2);
}
private static | HybridSourceSplitEnumeratorTest |
java | apache__camel | components/camel-jte/src/test/java/org/apache/camel/component/jte/JteTemplateFromHeaderTest.java | {
"start": 1118,
"end": 2448
} | class ____ extends CamelTestSupport {
private final String tmp = """
@import org.apache.camel.component.jte.Model
@param Model model
Hello ${model.header("name")}.
""";
@Test
public void testHeader() throws Exception {
Exchange exchange = template.request("direct:a", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(JteConstants.JTE_TEMPLATE, tmp);
exchange.getIn().setHeader("name", "Scott");
}
});
if (exchange.isFailed()) {
throw exchange.getException();
}
assertEquals("Hello Scott.", exchange.getMessage().getBody());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
JteComponent jte = context.getComponent("jte", JteComponent.class);
jte.setWorkDir("target/jte-classes");
// START SNIPPET: example
from("direct:a").to(
"jte:org/apache/camel/component/jte/dummy.jte?allowTemplateFromHeader=true");
// END SNIPPET: example
}
};
}
}
| JteTemplateFromHeaderTest |
java | apache__camel | components/camel-http/src/main/java/org/apache/camel/component/http/HttpConstants.java | {
"start": 1021,
"end": 4892
} | class ____ {
public static final String CONTENT_TYPE_JAVA_SERIALIZED_OBJECT = "application/x-java-serialized-object";
public static final ContentType JAVA_SERIALIZED_OBJECT = ContentType.create(CONTENT_TYPE_JAVA_SERIALIZED_OBJECT);
public static final String CONTENT_TYPE_WWW_FORM_URLENCODED = "application/x-www-form-urlencoded";
@Metadata(description = "The HTTP content encoding. Is set on both the IN and OUT message to\n" +
"provide a content encoding, such as `gzip`.",
javaType = "String")
public static final String CONTENT_ENCODING = Exchange.CONTENT_ENCODING;
@Metadata(description = "The HTTP response code from the external server. Is 200 for OK.", javaType = "int",
important = true)
public static final String HTTP_RESPONSE_CODE = Exchange.HTTP_RESPONSE_CODE;
@Metadata(description = "The HTTP response text from the external server.", javaType = "String", important = true)
public static final String HTTP_RESPONSE_TEXT = Exchange.HTTP_RESPONSE_TEXT;
@Metadata(label = "producer", description = "URI parameters. Will override existing URI parameters set directly on\n" +
"the endpoint.",
javaType = "String")
public static final String HTTP_QUERY = Exchange.HTTP_QUERY;
@Metadata(label = "producer", description = "The version of the http protocol used.", javaType = "String")
public static final String HTTP_PROTOCOL_VERSION = Exchange.HTTP_PROTOCOL_VERSION;
@Metadata(label = "producer", description = "The target host.", javaType = "String")
public static final String HTTP_HEADER_HOST = HttpHeaders.HOST;
@Metadata(label = "producer", description = "The rest http URI.", javaType = "String")
public static final String REST_HTTP_URI = Exchange.REST_HTTP_URI;
@Metadata(label = "producer", description = "URI to call. Will override existing URI set directly on the endpoint.\n" +
"This uri is the uri of the http server to call. Its not the same as the\n" +
"Camel endpoint uri, where you can configure endpoint options such as\n" +
"security etc. This header does not support that, its only the uri of the\n" +
"http server.",
javaType = "String")
public static final String HTTP_URI = Exchange.HTTP_URI;
@Metadata(label = "producer", description = "Request URI's path, the header will be used to build the request URI\n" +
"with the HTTP_URI.",
javaType = "String")
public static final String HTTP_PATH = Exchange.HTTP_PATH;
@Metadata(label = "producer", description = "The rest http query.", javaType = "String")
public static final String REST_HTTP_QUERY = Exchange.REST_HTTP_QUERY;
@Metadata(label = "producer", description = "The http raw query.", javaType = "String")
public static final String HTTP_RAW_QUERY = "CamelHttpRawQuery";
@Metadata(label = "producer", description = "The http method to use.",
javaType = "org.apache.camel.component.http.HttpMethods")
public static final String HTTP_METHOD = Exchange.HTTP_METHOD;
@Metadata(description = "The character encoding.", javaType = "String")
public static final String HTTP_CHARACTER_ENCODING = Exchange.HTTP_CHARACTER_ENCODING;
@Metadata(description = "The HTTP content type. Is set on both the IN and OUT message to provide\n" +
"a content type, such as `text/html`.",
javaType = "String", important = true)
public static final String CONTENT_TYPE = Exchange.CONTENT_TYPE;
private HttpConstants() {
}
}
| HttpConstants |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/codec/xml/Jaxb2XmlEncoderTests.java | {
"start": 4838,
"end": 4869
} | class ____ {}
public static | Model |
java | spring-projects__spring-security | test/src/test/java/org/springframework/security/test/web/reactive/server/SecurityMockServerConfigurerOpaqueTokenTests.java | {
"start": 1972,
"end": 6752
} | class ____ extends AbstractMockServerConfigurersTests {
private GrantedAuthority authority1 = new SimpleGrantedAuthority("one");
private GrantedAuthority authority2 = new SimpleGrantedAuthority("two");
private WebTestClient client = WebTestClient.bindToController(this.securityContextController)
.webFilter(new SecurityContextServerWebExchangeWebFilter())
.argumentResolvers((resolvers) -> resolvers
.addCustomResolver(new CurrentSecurityContextArgumentResolver(new ReactiveAdapterRegistry())))
.apply(SecurityMockServerConfigurers.springSecurity())
.configureClient()
.defaultHeader(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON_VALUE)
.build();
@Test
public void mockOpaqueTokenWhenUsingDefaultsThenBearerTokenAuthentication() {
this.client.mutateWith(SecurityMockServerConfigurers.mockOpaqueToken()).get().exchange().expectStatus().isOk();
SecurityContext context = this.securityContextController.removeSecurityContext();
assertThat(context.getAuthentication()).isInstanceOf(BearerTokenAuthentication.class);
BearerTokenAuthentication token = (BearerTokenAuthentication) context.getAuthentication();
assertThat(token.getAuthorities()).isNotEmpty();
assertThat(token.getToken()).isNotNull();
assertThat(token.getTokenAttributes()).containsEntry(OAuth2TokenIntrospectionClaimNames.SUB, "user");
}
@Test
public void mockOpaqueTokenWhenAuthoritiesThenBearerTokenAuthentication() {
this.client
.mutateWith(SecurityMockServerConfigurers.mockOpaqueToken().authorities(this.authority1, this.authority2))
.get()
.exchange()
.expectStatus()
.isOk();
SecurityContext context = this.securityContextController.removeSecurityContext();
assertThat((List<GrantedAuthority>) context.getAuthentication().getAuthorities()).containsOnly(this.authority1,
this.authority2);
}
@Test
public void mockOpaqueTokenWhenAttributesThenBearerTokenAuthentication() {
String sub = new String("my-subject");
this.client
.mutateWith(SecurityMockServerConfigurers.mockOpaqueToken()
.attributes((attributes) -> attributes.put(OAuth2TokenIntrospectionClaimNames.SUB, sub)))
.get()
.exchange()
.expectStatus()
.isOk();
SecurityContext context = this.securityContextController.removeSecurityContext();
assertThat(context.getAuthentication()).isInstanceOf(BearerTokenAuthentication.class);
BearerTokenAuthentication token = (BearerTokenAuthentication) context.getAuthentication();
assertThat(token.getTokenAttributes().get(OAuth2TokenIntrospectionClaimNames.SUB)).isSameAs(sub);
}
@Test
public void mockOpaqueTokenWhenPrincipalThenBearerTokenAuthentication() {
OAuth2AuthenticatedPrincipal principal = TestOAuth2AuthenticatedPrincipals.active();
this.client.mutateWith(SecurityMockServerConfigurers.mockOpaqueToken().principal(principal))
.get()
.exchange()
.expectStatus()
.isOk();
SecurityContext context = this.securityContextController.removeSecurityContext();
assertThat(context.getAuthentication()).isInstanceOf(BearerTokenAuthentication.class);
BearerTokenAuthentication token = (BearerTokenAuthentication) context.getAuthentication();
assertThat(token.getPrincipal()).isSameAs(principal);
}
@Test
public void mockOpaqueTokenWhenPrincipalSpecifiedThenLastCalledTakesPrecedence() {
OAuth2AuthenticatedPrincipal principal = TestOAuth2AuthenticatedPrincipals
.active((a) -> a.put("scope", "user"));
this.client
.mutateWith(SecurityMockServerConfigurers.mockOpaqueToken()
.attributes((a) -> a.put(OAuth2TokenIntrospectionClaimNames.SUB, "foo"))
.principal(principal))
.get()
.exchange()
.expectStatus()
.isOk();
SecurityContext context = this.securityContextController.removeSecurityContext();
assertThat(context.getAuthentication()).isInstanceOf(BearerTokenAuthentication.class);
BearerTokenAuthentication token = (BearerTokenAuthentication) context.getAuthentication();
assertThat((String) ((OAuth2AuthenticatedPrincipal) token.getPrincipal())
.getAttribute(OAuth2TokenIntrospectionClaimNames.SUB))
.isEqualTo(principal.getAttribute(OAuth2TokenIntrospectionClaimNames.SUB));
this.client
.mutateWith(SecurityMockServerConfigurers.mockOpaqueToken()
.principal(principal)
.attributes((a) -> a.put(OAuth2TokenIntrospectionClaimNames.SUB, "bar")))
.get()
.exchange()
.expectStatus()
.isOk();
context = this.securityContextController.removeSecurityContext();
assertThat(context.getAuthentication()).isInstanceOf(BearerTokenAuthentication.class);
token = (BearerTokenAuthentication) context.getAuthentication();
assertThat((String) ((OAuth2AuthenticatedPrincipal) token.getPrincipal())
.getAttribute(OAuth2TokenIntrospectionClaimNames.SUB)).isEqualTo("bar");
}
}
| SecurityMockServerConfigurerOpaqueTokenTests |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.