language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/superbuilder/Vehicle.java | {
"start": 200,
"end": 560
} | class ____ {
private final int amountOfTires;
protected Vehicle(VehicleBuilder<?, ?> b) {
this.amountOfTires = b.amountOfTires;
}
public static VehicleBuilder<?, ?> builder() {
return new VehicleBuilderImpl();
}
public int getAmountOfTires() {
return this.amountOfTires;
}
public abstract static | Vehicle |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webmvc/src/test/java/org/springframework/cloud/gateway/server/mvc/ServerMvcIntegrationTests.java | {
"start": 59395,
"end": 60631
} | class ____ implements Filter, Ordered {
@Override
public int getOrder() {
return FormFilter.FORM_FILTER_ORDER - 1;
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain)
throws IOException, ServletException {
if (isFormPost((HttpServletRequest) request)) {
// test for formUrlencodedWorks and
// https://github.com/spring-cloud/spring-cloud-gateway/issues/3244
assertThat(request.getParameter("foo")).isEqualTo("fooquery");
assertThat(request.getParameter("foo")).isEqualTo("fooquery");
}
filterChain.doFilter(request, response);
if (isFormPost((HttpServletRequest) request)) {
assertThat(request.getParameter("foo")).isEqualTo("fooquery");
assertThat(request.getParameter("foo")).isEqualTo("fooquery");
}
}
static boolean isFormPost(HttpServletRequest request) {
String contentType = request.getContentType();
return (contentType != null && contentType.contains(MediaType.APPLICATION_FORM_URLENCODED_VALUE)
&& HttpMethod.POST.matches(request.getMethod()));
}
}
protected record Hello(String message) {
}
protected record Event(String foo, String bar) {
}
@RestController
protected static | MyFilter |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/NamespaceHttpFormLoginTests.java | {
"start": 6674,
"end": 7634
} | class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
SavedRequestAwareAuthenticationSuccessHandler successHandler = new SavedRequestAwareAuthenticationSuccessHandler();
successHandler.setDefaultTargetUrl("/custom/targetUrl");
// @formatter:off
http
.authorizeHttpRequests((requests) -> requests
.anyRequest().hasRole("USER"))
.formLogin((login) -> login
.loginPage("/login")
.failureHandler(new SimpleUrlAuthenticationFailureHandler("/custom/failure")) // form-login@authentication-failure-handler-ref
.successHandler(successHandler) // form-login@authentication-success-handler-ref
.authenticationDetailsSource(authenticationDetailsSource()));
return http.build();
// @formatter:on
}
@Bean
WebAuthenticationDetailsSource authenticationDetailsSource() {
return spy(WebAuthenticationDetailsSource.class);
}
}
@Configuration
static | FormLoginCustomRefsConfig |
java | micronaut-projects__micronaut-core | router/src/main/java/io/micronaut/web/router/RouteBuilder.java | {
"start": 26806,
"end": 30885
} | class ____ ID. The URI route is built by the configured {@link UriNamingStrategy}.</p>
*
* @param type The class
* @param id The route id
* @return The route
*/
default UriRoute DELETE(Class<?> type, PropertyConvention id) {
return DELETE(getUriNamingStrategy().resolveUri(type, id), type, MethodConvention.DELETE.methodName(), Object.class);
}
/**
* <p>Route the specified URI template to the specified target.</p>
*
* <p>The number of variables in the template should match the number of method arguments</p>
*
* @param uri The URI
* @param method The method
* @return The route
*/
default UriRoute DELETE(String uri, ExecutableMethod<?, ?> method) {
return DELETE(uri, method.getDeclaringType(), method.getMethodName(), method.getArgumentTypes());
}
/**
* <p>Route the specified URI template to the specified target.</p>
*
* <p>The number of variables in the template should match the number of method arguments</p>
*
* @param beanDefinition The bean definition
* @param uri The URI
* @param method The method
* @return The route
*/
default UriRoute DELETE(String uri, BeanDefinition<?> beanDefinition, ExecutableMethod<?, ?> method) {
return DELETE(uri, beanDefinition.getBeanType(), method.getMethodName(), method.getArgumentTypes());
}
/**
* <p>Route the specified URI template to the specified target.</p>
*
* <p>The number of variables in the template should match the number of method arguments</p>
*
* @param uri The URI
* @param target The target
* @param method The method
* @param parameterTypes The parameter types for the target method
* @return The route
*/
UriRoute DELETE(String uri, Object target, String method, Class<?>... parameterTypes);
/**
* <p>Route the specified URI template to the specified target.</p>
*
* <p>The number of variables in the template should match the number of method arguments</p>
*
* @param uri The URI
* @param type The type
* @param method The method
* @param parameterTypes The parameter types for the target method
* @return The route
*/
UriRoute DELETE(String uri, Class<?> type, String method, Class<?>... parameterTypes);
/**
* Route the specified URI to the specified target for an HTTP OPTIONS. Since the method to execute is not
* specified "index" is used by default.
*
* @param uri The URI
* @param target The target object
* @return The route
*/
default UriRoute OPTIONS(String uri, Object target) {
return OPTIONS(uri, target, MethodConvention.OPTIONS.methodName());
}
/**
* <p>Route to the specified object. The URI route is built by the configured {@link UriNamingStrategy}.</p>
*
* @param target The object
* @return The route
*/
default UriRoute OPTIONS(Object target) {
Class<?> type = target.getClass();
return OPTIONS(getUriNamingStrategy().resolveUri(type), target);
}
/**
* <p>Route to the specified object and ID. The URI route is built by the configured {@link UriNamingStrategy}.</p>
*
* @param target The object
* @param id The route id
* @return The route
*/
default UriRoute OPTIONS(Object target, PropertyConvention id) {
Class<?> type = target.getClass();
return OPTIONS(getUriNamingStrategy().resolveUri(type, id), target, MethodConvention.OPTIONS.methodName());
}
/**
* <p>Route to the specified class. The URI route is built by the configured {@link UriNamingStrategy}.</p>
*
* @param type The class
* @return The route
*/
default UriRoute OPTIONS(Class<?> type) {
return OPTIONS(getUriNamingStrategy().resolveUri(type), type, MethodConvention.OPTIONS.methodName());
}
/**
* <p>Route to the specified | and |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/file/FileConsumerFilterDirectoryTest.java | {
"start": 1113,
"end": 2467
} | class ____ extends ContextTestSupport {
@Test
public void testFilterFiles() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(0);
template.sendBodyAndHeader(fileUri("foo"), "This is a file to be filtered", Exchange.FILE_NAME,
"skipme.txt");
mock.setResultWaitTime(100);
mock.assertIsSatisfied();
}
@Test
public void testFilterFilesWithARegularFile() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.expectedBodiesReceived("Hello World");
template.sendBodyAndHeader(fileUri("foo"), "This is a file to be filtered", Exchange.FILE_NAME,
"skipme.txt");
template.sendBodyAndHeader(fileUri("barbar"), "Hello World", Exchange.FILE_NAME, "hello.txt");
mock.assertIsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(fileUri(
"?initialDelay=0&delay=10&recursive=true&filterDirectory=${header.CamelFileNameOnly.length()} > 4"))
.convertBodyTo(String.class).to("mock:result");
}
};
}
}
| FileConsumerFilterDirectoryTest |
java | apache__camel | components/camel-lra/src/main/java/org/apache/camel/service/lra/LRAClient.java | {
"start": 1925,
"end": 8650
} | class ____ implements Closeable {
public static final String CONTENT_TYPE = "Content-Type";
public static final String TEXT_PLAIN_CONTENT = "text/plain";
private final LRASagaService sagaService;
private final HttpClient client;
private final String lraUrl;
private static final Logger LOG = LoggerFactory.getLogger(LRAClient.class);
public LRAClient(LRASagaService sagaService) {
this(sagaService, HttpClient.newHttpClient());
}
public LRAClient(LRASagaService sagaService, HttpClient client) {
if (client == null) {
throw new IllegalArgumentException("HttpClient must not be null");
}
this.sagaService = sagaService;
this.client = client;
lraUrl = new LRAUrlBuilder()
.host(sagaService.getCoordinatorUrl())
.path(sagaService.getCoordinatorContextPath())
.build();
}
public CompletableFuture<URL> newLRA(Exchange exchange) {
HttpRequest request = prepareRequest(URI.create(lraUrl + COORDINATOR_PATH_START), exchange)
.POST(HttpRequest.BodyPublishers.ofString(""))
.build();
CompletableFuture<HttpResponse<String>> future = client.sendAsync(request, HttpResponse.BodyHandlers.ofString());
return future.thenApply(res -> {
if (res.statusCode() >= HttpURLConnection.HTTP_BAD_REQUEST) {
LOG.debug("LRA coordinator responded with error code {}. Message: {}", res.statusCode(), res.body());
throw new IllegalStateException(
"Cannot obtain LRA id from LRA coordinator due to response status code " + res.statusCode());
}
// See if there's a location header containing the LRA URL
List<String> location = res.headers().map().get("Location");
if (ObjectHelper.isNotEmpty(location)) {
return toURL(location.get(0));
}
// If there's no location header try the Long-Running-Action header, assuming there's only one present in the response
List<String> lraHeaders = res.headers().map().get(Exchange.SAGA_LONG_RUNNING_ACTION);
if (ObjectHelper.isNotEmpty(lraHeaders) && lraHeaders.size() == 1) {
return toURL(lraHeaders.get(0));
}
// Fallback to reading the URL from the response body
String responseBody = res.body();
if (ObjectHelper.isNotEmpty(responseBody)) {
return toURL(responseBody);
}
throw new IllegalStateException("Cannot obtain LRA id from LRA coordinator");
});
}
public CompletableFuture<Void> join(final URL lra, LRASagaStep step, Exchange exchange) {
return CompletableFuture.supplyAsync(() -> {
LRAUrlBuilder participantBaseUrl = new LRAUrlBuilder()
.host(sagaService.getLocalParticipantUrl())
.path(sagaService.getLocalParticipantContextPath())
.options(step.getOptions())
.compensation(step.getCompensation())
.completion(step.getCompletion());
String compensationURL = participantBaseUrl.path(PARTICIPANT_PATH_COMPENSATE).build();
String completionURL = participantBaseUrl.path(PARTICIPANT_PATH_COMPLETE).build();
StringBuilder link = new StringBuilder();
link.append('<').append(compensationURL).append('>').append("; rel=compensate");
link.append(',');
link.append('<').append(completionURL).append('>').append("; rel=complete");
String lraEndpoint = lra.toString();
if (step.getTimeoutInMilliseconds().isPresent()) {
lraEndpoint = lraEndpoint + "?" + HEADER_TIME_LIMIT + "=" + step.getTimeoutInMilliseconds().get();
}
HttpRequest request = prepareRequest(URI.create(lraEndpoint), exchange)
.setHeader(HEADER_LINK, link.toString())
.setHeader(Exchange.SAGA_LONG_RUNNING_ACTION, lra.toString())
.setHeader(CONTENT_TYPE, TEXT_PLAIN_CONTENT)
.PUT(HttpRequest.BodyPublishers.ofString(link.toString()))
.build();
return client.sendAsync(request, HttpResponse.BodyHandlers.ofString());
}, sagaService.getExecutorService())
.thenCompose(Function.identity())
.thenApply(response -> {
if (response.statusCode() != HttpURLConnection.HTTP_OK) {
throw new RuntimeCamelException("Cannot join LRA");
}
return null;
});
}
public CompletableFuture<Void> complete(URL lra, Exchange exchange) {
HttpRequest request = prepareRequest(URI.create(lra.toString() + COORDINATOR_PATH_CLOSE), exchange)
.setHeader(CONTENT_TYPE, TEXT_PLAIN_CONTENT)
.PUT(HttpRequest.BodyPublishers.ofString(""))
.build();
CompletableFuture<HttpResponse<String>> future = client.sendAsync(request, HttpResponse.BodyHandlers.ofString());
return future.thenApply(response -> {
if (response.statusCode() != HttpURLConnection.HTTP_OK) {
throw new RuntimeCamelException("Cannot complete LRA");
}
return null;
});
}
public CompletableFuture<Void> compensate(URL lra, Exchange exchange) {
HttpRequest request = prepareRequest(URI.create(lra.toString() + COORDINATOR_PATH_CANCEL), exchange)
.setHeader(CONTENT_TYPE, TEXT_PLAIN_CONTENT)
.PUT(HttpRequest.BodyPublishers.ofString(""))
.build();
CompletableFuture<HttpResponse<String>> future = client.sendAsync(request, HttpResponse.BodyHandlers.ofString());
return future.thenApply(response -> {
if (response.statusCode() != HttpURLConnection.HTTP_OK) {
throw new RuntimeCamelException("Cannot compensate LRA");
}
return null;
});
}
protected HttpRequest.Builder prepareRequest(URI uri, Exchange exchange) {
return HttpRequest.newBuilder().uri(uri);
}
private URL toURL(Object url) {
if (url == null) {
return null;
}
if (url instanceof URL) {
return URL.class.cast(url);
}
try {
return URI.create(url.toString()).toURL();
} catch (Exception ex) {
throw new RuntimeCamelException(ex);
}
}
@Override
public void close() throws IOException {
}
}
| LRAClient |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/processor/RestBindingConfiguration.java | {
"start": 956,
"end": 6012
} | class ____ {
private String consumes;
private String produces;
private String bindingMode;
private String bindingPackageScan;
private boolean skipBindingOnErrorCode;
private boolean clientRequestValidation;
private boolean clientResponseValidation;
private boolean enableCORS;
private boolean enableNoContentResponse;
private Map<String, String> corsHeaders;
private Map<String, String> queryDefaultValues;
private Map<String, String> queryAllowedValues;
private boolean requiredBody;
private Set<String> requiredQueryParameters;
private Set<String> requiredHeaders;
private String type;
private Class<?> typeClass;
private String outType;
private Class<?> outTypeClass;
private Map<String, String> responseCodes;
private Set<String> responseHeaders;
public String getConsumes() {
return consumes;
}
public void setConsumes(String consumes) {
this.consumes = consumes;
}
public String getProduces() {
return produces;
}
public void setProduces(String produces) {
this.produces = produces;
}
public String getBindingMode() {
return bindingMode;
}
public void setBindingMode(String bindingMode) {
this.bindingMode = bindingMode;
}
public String getBindingPackageScan() {
return bindingPackageScan;
}
public void setBindingPackageScan(String bindingPackageScan) {
this.bindingPackageScan = bindingPackageScan;
}
public boolean isSkipBindingOnErrorCode() {
return skipBindingOnErrorCode;
}
public void setSkipBindingOnErrorCode(boolean skipBindingOnErrorCode) {
this.skipBindingOnErrorCode = skipBindingOnErrorCode;
}
public boolean isClientRequestValidation() {
return clientRequestValidation;
}
public void setClientRequestValidation(boolean clientRequestValidation) {
this.clientRequestValidation = clientRequestValidation;
}
public boolean isClientResponseValidation() {
return clientResponseValidation;
}
public void setClientResponseValidation(boolean clientResponseValidation) {
this.clientResponseValidation = clientResponseValidation;
}
public boolean isEnableCORS() {
return enableCORS;
}
public void setEnableCORS(boolean enableCORS) {
this.enableCORS = enableCORS;
}
public boolean isEnableNoContentResponse() {
return enableNoContentResponse;
}
public void setEnableNoContentResponse(boolean enableNoContentResponse) {
this.enableNoContentResponse = enableNoContentResponse;
}
public Map<String, String> getCorsHeaders() {
return corsHeaders;
}
public void setCorsHeaders(Map<String, String> corsHeaders) {
this.corsHeaders = corsHeaders;
}
public Map<String, String> getQueryDefaultValues() {
return queryDefaultValues;
}
public void setQueryDefaultValues(Map<String, String> queryDefaultValues) {
this.queryDefaultValues = queryDefaultValues;
}
public Map<String, String> getQueryAllowedValues() {
return queryAllowedValues;
}
public void setQueryAllowedValues(Map<String, String> queryAllowedValues) {
this.queryAllowedValues = queryAllowedValues;
}
public boolean isRequiredBody() {
return requiredBody;
}
public void setRequiredBody(boolean requiredBody) {
this.requiredBody = requiredBody;
}
public Set<String> getRequiredQueryParameters() {
return requiredQueryParameters;
}
public void setRequiredQueryParameters(Set<String> requiredQueryParameters) {
this.requiredQueryParameters = requiredQueryParameters;
}
public Set<String> getRequiredHeaders() {
return requiredHeaders;
}
public void setRequiredHeaders(Set<String> requiredHeaders) {
this.requiredHeaders = requiredHeaders;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public Class<?> getTypeClass() {
return typeClass;
}
public void setTypeClass(Class<?> typeClass) {
this.typeClass = typeClass;
}
public String getOutType() {
return outType;
}
public void setOutType(String outType) {
this.outType = outType;
}
public Class<?> getOutTypeClass() {
return outTypeClass;
}
public void setOutTypeClass(Class<?> outTypeClass) {
this.outTypeClass = outTypeClass;
}
public Map<String, String> getResponseCodes() {
return responseCodes;
}
public void setResponseCodes(Map<String, String> responseCodes) {
this.responseCodes = responseCodes;
}
public Set<String> getResponseHeaders() {
return responseHeaders;
}
public void setResponseHeaders(Set<String> responseHeaders) {
this.responseHeaders = responseHeaders;
}
}
| RestBindingConfiguration |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/annotatewith/AnnotateValueMappingMethodMapper.java | {
"start": 413,
"end": 744
} | interface ____ {
@ValueMappings({
@ValueMapping(target = "EXISTING", source = "EXISTING"),
@ValueMapping( source = MappingConstants.ANY_REMAINING, target = "OTHER_EXISTING" )
})
@AnnotateWith(CustomMethodOnlyAnnotation.class)
AnnotateWithEnum map(String str);
}
| AnnotateValueMappingMethodMapper |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/histogram/HistogramPercentileTests.java | {
"start": 1413,
"end": 4787
} | class ____ extends AbstractScalarFunctionTestCase {
@Before
public void setup() {
assumeTrue(
"Only when esql_exponential_histogram feature flag is enabled",
EsqlCorePlugin.EXPONENTIAL_HISTOGRAM_FEATURE_FLAG.isEnabled()
);
}
public HistogramPercentileTests(@Name("TestCase") Supplier<TestCaseSupplier.TestCase> testCaseSupplier) {
this.testCase = testCaseSupplier.get();
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
List<TestCaseSupplier> suppliers = new ArrayList<>();
List<TestCaseSupplier.TypedDataSupplier> validPercentileSuppliers = Stream.of(
DataType.DOUBLE,
DataType.INTEGER,
DataType.LONG,
DataType.UNSIGNED_LONG
).filter(DataType::isNumeric).flatMap(type -> getSuppliersForNumericType(type, 0.0, 100.0, true).stream()).toList();
List<Double> invalidPercentileValues = List.of(-0.01, 100.05, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
List<TestCaseSupplier.TypedDataSupplier> invalidPercentileSuppliers = invalidPercentileValues.stream()
.map(value -> new TestCaseSupplier.TypedDataSupplier("<" + value + " double>", () -> value, DataType.DOUBLE))
.toList();
List<TestCaseSupplier.TypedDataSupplier> allPercentiles = Stream.concat(
validPercentileSuppliers.stream(),
invalidPercentileSuppliers.stream()
).toList();
TestCaseSupplier.casesCrossProduct((histogramObj, percentileObj) -> {
ExponentialHistogram histogram = (ExponentialHistogram) histogramObj;
Number percentile = (Number) percentileObj;
double percVal = percentile.doubleValue();
if (percVal < 0 || percVal > 100 || Double.isNaN(percVal)) {
return null;
}
double result = ExponentialHistogramQuantile.getQuantile(histogram, percVal / 100.0);
return Double.isNaN(result) ? null : result;
},
TestCaseSupplier.exponentialHistogramCases(),
allPercentiles,
(histoType, percentileType) -> equalTo(
"HistogramPercentileEvaluator[value=Attribute[channel=0], percentile="
+ getCastEvaluator("Attribute[channel=1]", percentileType, DataType.DOUBLE)
+ "]"
),
(typedHistoData, typedPercentileData) -> {
Object percentile = typedPercentileData.getValue();
if (invalidPercentileValues.contains(percentile)) {
return List.of(
"Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.",
"Line 1:1: java.lang.ArithmeticException: Percentile value must be in the range [0, 100], got: " + percentile
);
} else {
return List.of();
}
},
suppliers,
DataType.DOUBLE,
false
);
return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers);
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new HistogramPercentile(source, args.get(0), args.get(1));
}
}
| HistogramPercentileTests |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/LegacySemanticMatchQueryRewriteInterceptorTests.java | {
"start": 1347,
"end": 6368
} | class ____ extends ESTestCase {
private TestThreadPool threadPool;
private NoOpClient client;
private Index index;
private static final String FIELD_NAME = "fieldName";
private static final String VALUE = "value";
private static final String QUERY_NAME = "match_query";
private static final float BOOST = 5.0f;
@Before
public void setup() {
threadPool = createThreadPool();
client = new NoOpClient(threadPool);
index = new Index(randomAlphaOfLength(10), randomAlphaOfLength(10));
}
@After
public void cleanup() {
threadPool.close();
}
public void testMatchQueryOnInferenceFieldIsInterceptedAndRewrittenToSemanticQuery() throws IOException {
Map<String, InferenceFieldMetadata> inferenceFields = Map.of(
FIELD_NAME,
new InferenceFieldMetadata(index.getName(), "inferenceId", new String[] { FIELD_NAME }, null)
);
QueryRewriteContext context = createQueryRewriteContext(inferenceFields);
QueryBuilder original = createTestQueryBuilder();
QueryBuilder rewritten = original.rewrite(context);
assertTrue(
"Expected query to be intercepted, but was [" + rewritten.getClass().getName() + "]",
rewritten instanceof InterceptedQueryBuilderWrapper
);
InterceptedQueryBuilderWrapper intercepted = (InterceptedQueryBuilderWrapper) rewritten;
assertTrue(intercepted.queryBuilder instanceof SemanticQueryBuilder);
SemanticQueryBuilder semanticQueryBuilder = (SemanticQueryBuilder) intercepted.queryBuilder;
assertEquals(FIELD_NAME, semanticQueryBuilder.getFieldName());
assertEquals(VALUE, semanticQueryBuilder.getQuery());
}
public void testMatchQueryOnNonInferenceFieldRemainsMatchQuery() throws IOException {
QueryRewriteContext context = createQueryRewriteContext(Map.of()); // No inference fields
QueryBuilder original = createTestQueryBuilder();
QueryBuilder rewritten = original.rewrite(context);
assertTrue(
"Expected query to remain match but was [" + rewritten.getClass().getName() + "]",
rewritten instanceof MatchQueryBuilder
);
assertEquals(original, rewritten);
}
public void testBoostAndQueryNameInMatchQueryRewrite() throws IOException {
Map<String, InferenceFieldMetadata> inferenceFields = Map.of(
FIELD_NAME,
new InferenceFieldMetadata(index.getName(), "inferenceId", new String[] { FIELD_NAME }, null)
);
QueryRewriteContext context = createQueryRewriteContext(inferenceFields);
QueryBuilder original = createTestQueryBuilder();
original.boost(BOOST);
original.queryName(QUERY_NAME);
QueryBuilder rewritten = original.rewrite(context);
assertTrue(
"Expected query to be intercepted, but was [" + rewritten.getClass().getName() + "]",
rewritten instanceof InterceptedQueryBuilderWrapper
);
InterceptedQueryBuilderWrapper intercepted = (InterceptedQueryBuilderWrapper) rewritten;
assertEquals(BOOST, intercepted.boost(), 0.0f);
assertEquals(QUERY_NAME, intercepted.queryName());
assertTrue(intercepted.queryBuilder instanceof SemanticQueryBuilder);
SemanticQueryBuilder semanticQueryBuilder = (SemanticQueryBuilder) intercepted.queryBuilder;
assertEquals(FIELD_NAME, semanticQueryBuilder.getFieldName());
assertEquals(VALUE, semanticQueryBuilder.getQuery());
}
private MatchQueryBuilder createTestQueryBuilder() {
return new MatchQueryBuilder(FIELD_NAME, VALUE);
}
private QueryRewriteContext createQueryRewriteContext(Map<String, InferenceFieldMetadata> inferenceFields) {
IndexMetadata indexMetadata = IndexMetadata.builder(index.getName())
.settings(
Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID())
)
.numberOfShards(1)
.numberOfReplicas(0)
.putInferenceFields(inferenceFields)
.build();
ResolvedIndices resolvedIndices = new MockResolvedIndices(
Map.of(),
new OriginalIndices(new String[] { index.getName() }, IndicesOptions.DEFAULT),
Map.of(index, indexMetadata)
);
return new QueryRewriteContext(
null,
client,
null,
TransportVersion.current(),
RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY,
resolvedIndices,
null,
createRewriteInterceptor(),
null
);
}
@SuppressWarnings("deprecation")
private QueryRewriteInterceptor createRewriteInterceptor() {
return new LegacySemanticMatchQueryRewriteInterceptor();
}
}
| LegacySemanticMatchQueryRewriteInterceptorTests |
java | elastic__elasticsearch | test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java | {
"start": 3225,
"end": 3611
} | interface ____ extends Predicate<ClientYamlTestExecutionContext> {}
private static Optional<Boolean> checkCapabilities(ClientYamlTestExecutionContext context, CapabilitiesCheck check, boolean any) {
Optional<Boolean> b = context.clusterHasCapabilities(check.method(), check.path(), check.parameters(), check.capabilities(), any);
return b;
}
}
| CapabilitiesPredicate |
java | apache__dubbo | dubbo-config/dubbo-config-spring6/src/test/java/org/apache/dubbo/config/spring6/utils/CircularDependencyDemoService.java | {
"start": 857,
"end": 931
} | interface ____ {
String sayHello(DemoA a);
}
| CircularDependencyDemoService |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_issue_469.java | {
"start": 487,
"end": 726
} | class ____ {
private String sPhotoUrl;
public String getsPhotoUrl() {
return sPhotoUrl;
}
public void setsPhotoUrl(String sPhotoUrl) {
this.sPhotoUrl = sPhotoUrl;
}
}
}
| VO |
java | google__guava | android/guava/src/com/google/common/base/CharMatcher.java | {
"start": 52305,
"end": 53037
} | class ____ extends FastMatcher {
private final char match1;
private final char match2;
IsEither(char match1, char match2) {
this.match1 = match1;
this.match2 = match2;
}
@Override
public boolean matches(char c) {
return c == match1 || c == match2;
}
@GwtIncompatible // used only from other GwtIncompatible code
@Override
void setBits(BitSet table) {
table.set(match1);
table.set(match2);
}
@Override
public String toString() {
return "CharMatcher.anyOf(\"" + showCharacter(match1) + showCharacter(match2) + "\")";
}
}
/** Implementation of {@link #anyOf(CharSequence)} for three or more characters. */
private static final | IsEither |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hql/TruncConvertedDatetimeAttributeTest.java | {
"start": 1372,
"end": 2980
} | class ____ {
private static final Date DATE = new GregorianCalendar( 2017, Calendar.JANUARY, 24 ).getTime();
private static final Instant INSTANT = ZonedDateTime.of( 2020, 10, 15, 20, 34, 45, 0, ZoneOffset.UTC ).toInstant();
@BeforeAll
public void setUp(SessionFactoryScope scope) {
scope.inTransaction( session -> session.persist( new TestEntity( 1L, DATE.getTime(), INSTANT ) ) );
}
@AfterAll
public void tearDown(SessionFactoryScope scope) {
scope.inTransaction( session -> session.createMutationQuery( "delete from TestEntity" ).executeUpdate() );
}
@Test
public void testTruncSelection(SessionFactoryScope scope) {
scope.inSession( session -> {
assertThat( session.createQuery(
"select trunc(instantCol, minute) from TestEntity",
Instant.class
).getSingleResult() ).isEqualTo( INSTANT.truncatedTo( ChronoUnit.MINUTES ) );
assertThat( session.createQuery(
"select trunc(dateCol, month) from TestEntity",
Long.class
).getSingleResult() ).isEqualTo( new GregorianCalendar( 2017, Calendar.JANUARY, 1 ).getTime().getTime() );
} );
}
@Test
public void testTruncComparison(SessionFactoryScope scope) {
scope.inSession( session -> {
assertThat( session.createQuery(
"from TestEntity where trunc(instantCol, hour) < current_date",
TestEntity.class
).getResultList() ).hasSize( 1 );
assertThat( session.createQuery(
"from TestEntity where trunc(dateCol, year) < current_timestamp",
TestEntity.class
).getResultList() ).hasSize( 1 );
} );
}
@Entity( name = "TestEntity" )
public static | TruncConvertedDatetimeAttributeTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java | {
"start": 1202,
"end": 1258
} | class ____ lz4 compressors/decompressors.
*/
public | creates |
java | dropwizard__dropwizard | dropwizard-configuration/src/test/java/io/dropwizard/configuration/Issue3796Test.java | {
"start": 655,
"end": 1307
} | class ____ {
@Test
void configurationWithCustomDeserializerCanBeRead() throws IOException, ConfigurationException {
final ConfigurationFactory<CustomConfiguration> factory = new YamlConfigurationFactory<>(CustomConfiguration.class, BaseValidator.newValidator(), Jackson.newObjectMapper(), "dw");
final CustomConfiguration testObject = factory.build(new ResourceConfigurationSourceProvider(), "issue-3796.yml");
assertThat(testObject).isNotNull();
assertThat(testObject.customProperty).isNotNull();
assertThat(testObject.customProperty.customString).isEqualTo("hello, world");
}
static | Issue3796Test |
java | quarkusio__quarkus | core/deployment/src/test/java/io/quarkus/deployment/dev/ClassComparisonUtilTest.java | {
"start": 553,
"end": 3933
} | class ____ {
@Test
public void annotationsEqual() {
AnnotationInstance instance1 = methodParameterAnnotation(AnnotationForTest1.class);
AnnotationInstance instance2 = methodParameterAnnotation(AnnotationForTest1.class);
List<AnnotationInstance> instances1 = List.of(instance1);
List<AnnotationInstance> instances2 = List.of(instance2);
Assertions.assertTrue(ClassComparisonUtil.compareMethodAnnotations(instances1, instances2));
}
@Test
public void annotationsNotEqual() {
AnnotationInstance instance1 = methodParameterAnnotation(AnnotationForTest1.class);
AnnotationInstance instance2 = methodParameterAnnotation(AnnotationForTest2.class);
List<AnnotationInstance> instances1 = List.of(instance1);
List<AnnotationInstance> instances2 = List.of(instance2);
Assertions.assertFalse(ClassComparisonUtil.compareMethodAnnotations(instances1, instances2));
}
@Test
public void compareMethodAnnotationsSizeDiffer() {
AnnotationInstance instance = methodParameterAnnotation(AnnotationForTest1.class);
List<AnnotationInstance> instances = List.of(instance);
Assertions.assertFalse(ClassComparisonUtil.compareMethodAnnotations(instances, List.of()));
Assertions.assertFalse(ClassComparisonUtil.compareMethodAnnotations(List.of(), instances));
}
@Test
public void multipleAnnotationsAtSamePosition() {
List<AnnotationInstance> instances1 = List.of(
methodParameterAnnotation(AnnotationForTest1.class),
methodParameterAnnotation(AnnotationForTest2.class));
List<AnnotationInstance> instances2 = List.of(
methodParameterAnnotation(AnnotationForTest2.class),
methodParameterAnnotation(AnnotationForTest1.class));
Assertions.assertTrue(ClassComparisonUtil.compareMethodAnnotations(instances1, instances2));
}
@Test
public void multipleAnnotations() {
List<AnnotationInstance> instances1 = List.of(
methodParameterAnnotation(AnnotationForTest1.class, 1),
methodParameterAnnotation(AnnotationForTest2.class, 2));
List<AnnotationInstance> instances2 = List.of(
methodParameterAnnotation(AnnotationForTest1.class, 2),
methodParameterAnnotation(AnnotationForTest2.class, 1));
Assertions.assertFalse(ClassComparisonUtil.compareMethodAnnotations(instances1, instances2));
}
private static AnnotationInstance methodParameterAnnotation(
Class<? extends Annotation> annotation) {
return methodParameterAnnotation(annotation, 1);
}
private static AnnotationInstance methodParameterAnnotation(
Class<? extends Annotation> annotation, int position) {
MethodParameterInfo target = MethodParameterInfo.create(null, (short) position);
return AnnotationInstance.builder(annotation).buildWithTarget(target);
}
@Target({ ElementType.PARAMETER, ElementType.TYPE_USE })
@Retention(RetentionPolicy.RUNTIME)
@Documented
private @ | CompareMethodAnnotations |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestGetDataFrameAnalyticsStatsAction.java | {
"start": 1191,
"end": 3099
} | class ____ extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(
new Route(GET, BASE_PATH + "data_frame/analytics/_stats"),
new Route(GET, BASE_PATH + "data_frame/analytics/{" + DataFrameAnalyticsConfig.ID + "}/_stats")
);
}
@Override
public String getName() {
return "xpack_ml_get_data_frame_analytics_stats_action";
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
String id = restRequest.param(DataFrameAnalyticsConfig.ID.getPreferredName());
GetDataFrameAnalyticsStatsAction.Request request = new GetDataFrameAnalyticsStatsAction.Request();
if (Strings.isNullOrEmpty(id) == false) {
request.setId(id);
}
if (restRequest.hasParam(PageParams.FROM.getPreferredName()) || restRequest.hasParam(PageParams.SIZE.getPreferredName())) {
request.setPageParams(
new PageParams(
restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM),
restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE)
)
);
}
request.setAllowNoMatch(
restRequest.paramAsBoolean(GetDataFrameAnalyticsStatsAction.Request.ALLOW_NO_MATCH.getPreferredName(), request.isAllowNoMatch())
);
return channel -> new RestCancellableNodeClient(client, restRequest.getHttpChannel()).execute(
GetDataFrameAnalyticsStatsAction.INSTANCE,
request,
new RestToXContentListener<>(channel)
);
}
@Override
protected Set<String> responseParams() {
return Collections.singleton(GetDataFrameAnalyticsStatsAction.Response.VERBOSE);
}
}
| RestGetDataFrameAnalyticsStatsAction |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/handle_by_jdbc_type/BooleanCharTypeHandler.java | {
"start": 1010,
"end": 1847
} | class ____ extends BaseTypeHandler<Boolean> {
private static final String TRUE = "T";
private static final String FALSE = "F";
@Override
public void setNonNullParameter(PreparedStatement ps, int i, Boolean parameter, JdbcType jdbcType)
throws SQLException {
ps.setString(i, parameter.booleanValue() ? TRUE : FALSE);
}
@Override
public Boolean getNullableResult(ResultSet rs, String columnName) throws SQLException {
return TRUE.equals(rs.getString(columnName));
}
@Override
public Boolean getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
return TRUE.equals(rs.getString(columnIndex));
}
@Override
public Boolean getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {
return TRUE.equals(cs.getString(columnIndex));
}
}
| BooleanCharTypeHandler |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/internal/TypeMessages.java | {
"start": 1569,
"end": 2212
} | class ____ which to find an error message
* @return the most relevant error message, or {@code null} if no message could be found
*/
public String getMessageForType(Class<?> clazz) {
return super.get(clazz);
}
/**
* Checks, whether an any custom error message is associated with the giving type.
*
* @param type the type for which to check a error message
* @return is the giving type associated with any custom error message
*/
public boolean hasMessageForType(Class<?> type) {
return super.hasEntity(type);
}
/**
* Puts the {@code message} for the given {@code clazz}.
*
* @param clazz the | for |
java | apache__spark | core/src/main/java/org/apache/spark/util/collection/TimSort.java | {
"start": 11561,
"end": 34654
} | class ____ {
/**
* The Buffer being sorted.
*/
private final Buffer a;
/**
* Length of the sort Buffer.
*/
private final int aLength;
/**
* The comparator for this sort.
*/
private final Comparator<? super K> c;
/**
* When we get into galloping mode, we stay there until both runs win less
* often than MIN_GALLOP consecutive times.
*/
private static final int MIN_GALLOP = 7;
/**
* This controls when we get *into* galloping mode. It is initialized
* to MIN_GALLOP. The mergeLo and mergeHi methods nudge it higher for
* random data, and lower for highly structured data.
*/
private int minGallop = MIN_GALLOP;
/**
* Maximum initial size of tmp array, which is used for merging. The array
* can grow to accommodate demand.
*
* Unlike Tim's original C version, we do not allocate this much storage
* when sorting smaller arrays. This change was required for performance.
*/
private static final int INITIAL_TMP_STORAGE_LENGTH = 256;
/**
* Temp storage for merges.
*/
private Buffer tmp; // Actual runtime type will be Object[], regardless of T
/**
* Length of the temp storage.
*/
private int tmpLength = 0;
/**
* A stack of pending runs yet to be merged. Run i starts at
* address base[i] and extends for len[i] elements. It's always
* true (so long as the indices are in bounds) that:
*
* runBase[i] + runLen[i] == runBase[i + 1]
*
* so we could cut the storage for this, but it's a minor amount,
* and keeping all the info explicit simplifies the code.
*/
private int stackSize = 0; // Number of pending runs on stack
private final int[] runBase;
private final int[] runLen;
/**
* Creates a TimSort instance to maintain the state of an ongoing sort.
*
* @param a the array to be sorted
* @param c the comparator to determine the order of the sort
*/
private SortState(Buffer a, Comparator<? super K> c, int len) {
this.aLength = len;
this.a = a;
this.c = c;
// Allocate temp storage (which may be increased later if necessary)
tmpLength = len < 2 * INITIAL_TMP_STORAGE_LENGTH ? len >>> 1 : INITIAL_TMP_STORAGE_LENGTH;
tmp = s.allocate(tmpLength);
/*
* Allocate runs-to-be-merged stack (which cannot be expanded). The
* stack length requirements are described in listsort.txt. The C
* version always uses the same stack length (85), but this was
* measured to be too expensive when sorting "mid-sized" arrays (e.g.,
* 100 elements) in Java. Therefore, we use smaller (but sufficiently
* large) stack lengths for smaller arrays. The "magic numbers" in the
* computation below must be changed if MIN_MERGE is decreased. See
* the MIN_MERGE declaration above for more information.
* The maximum value of 49 allows for an array up to length
* Integer.MAX_VALUE-4, if array is filled by the worst case stack size
* increasing scenario. More explanations are given in section 4 of:
* http://envisage-project.eu/wp-content/uploads/2015/02/sorting.pdf
*/
int stackLen = (len < 120 ? 5 :
len < 1542 ? 10 :
len < 119151 ? 24 : 49);
runBase = new int[stackLen];
runLen = new int[stackLen];
}
/**
* Pushes the specified run onto the pending-run stack.
*
* @param runBase index of the first element in the run
* @param runLen the number of elements in the run
*/
private void pushRun(int runBase, int runLen) {
this.runBase[stackSize] = runBase;
this.runLen[stackSize] = runLen;
stackSize++;
}
/**
* Examines the stack of runs waiting to be merged and merges adjacent runs
* until the stack invariants are reestablished:
*
* 1. runLen[i - 3] > runLen[i - 2] + runLen[i - 1]
* 2. runLen[i - 2] > runLen[i - 1]
*
* This method is called each time a new run is pushed onto the stack,
* so the invariants are guaranteed to hold for i < stackSize upon
* entry to the method.
*
* Thanks to Stijn de Gouw, Jurriaan Rot, Frank S. de Boer,
* Richard Bubel and Reiner Hahnle, this is fixed with respect to
* the analysis in "On the Worst-Case Complexity of TimSort" by
* Nicolas Auger, Vincent Jug, Cyril Nicaud, and Carine Pivoteau.
*/
private void mergeCollapse() {
while (stackSize > 1) {
int n = stackSize - 2;
if (n > 0 && runLen[n-1] <= runLen[n] + runLen[n+1] ||
n > 1 && runLen[n-2] <= runLen[n] + runLen[n-1]) {
if (runLen[n - 1] < runLen[n + 1])
n--;
} else if (n < 0 || runLen[n] > runLen[n + 1]) {
break; // Invariant is established
}
mergeAt(n);
}
}
/**
* Merges all runs on the stack until only one remains. This method is
* called once, to complete the sort.
*/
private void mergeForceCollapse() {
while (stackSize > 1) {
int n = stackSize - 2;
if (n > 0 && runLen[n - 1] < runLen[n + 1])
n--;
mergeAt(n);
}
}
/**
* Merges the two runs at stack indices i and i+1. Run i must be
* the penultimate or antepenultimate run on the stack. In other words,
* i must be equal to stackSize-2 or stackSize-3.
*
* @param i stack index of the first of the two runs to merge
*/
private void mergeAt(int i) {
assert stackSize >= 2;
assert i >= 0;
assert i == stackSize - 2 || i == stackSize - 3;
int base1 = runBase[i];
int len1 = runLen[i];
int base2 = runBase[i + 1];
int len2 = runLen[i + 1];
assert len1 > 0 && len2 > 0;
assert base1 + len1 == base2;
/*
* Record the length of the combined runs; if i is the 3rd-last
* run now, also slide over the last run (which isn't involved
* in this merge). The current run (i+1) goes away in any case.
*/
runLen[i] = len1 + len2;
if (i == stackSize - 3) {
runBase[i + 1] = runBase[i + 2];
runLen[i + 1] = runLen[i + 2];
}
stackSize--;
K key0 = s.newKey();
/*
* Find where the first element of run2 goes in run1. Prior elements
* in run1 can be ignored (because they're already in place).
*/
int k = gallopRight(s.getKey(a, base2, key0), a, base1, len1, 0, c);
assert k >= 0;
base1 += k;
len1 -= k;
if (len1 == 0)
return;
/*
* Find where the last element of run1 goes in run2. Subsequent elements
* in run2 can be ignored (because they're already in place).
*/
len2 = gallopLeft(s.getKey(a, base1 + len1 - 1, key0), a, base2, len2, len2 - 1, c);
assert len2 >= 0;
if (len2 == 0)
return;
// Merge remaining runs, using tmp array with min(len1, len2) elements
if (len1 <= len2)
mergeLo(base1, len1, base2, len2);
else
mergeHi(base1, len1, base2, len2);
}
/**
* Locates the position at which to insert the specified key into the
* specified sorted range; if the range contains an element equal to key,
* returns the index of the leftmost equal element.
*
* @param key the key whose insertion point to search for
* @param a the array in which to search
* @param base the index of the first element in the range
* @param len the length of the range; must be > 0
* @param hint the index at which to begin the search, 0 <= hint < n.
* The closer hint is to the result, the faster this method will run.
* @param c the comparator used to order the range, and to search
* @return the int k, 0 <= k <= n such that a[b + k - 1] < key <= a[b + k],
* pretending that a[b - 1] is minus infinity and a[b + n] is infinity.
* In other words, key belongs at index b + k; or in other words,
* the first k elements of a should precede key, and the last n - k
* should follow it.
*/
private int gallopLeft(K key, Buffer a, int base, int len, int hint, Comparator<? super K> c) {
assert len > 0 && hint >= 0 && hint < len;
int lastOfs = 0;
int ofs = 1;
K key0 = s.newKey();
if (c.compare(key, s.getKey(a, base + hint, key0)) > 0) {
// Gallop right until a[base+hint+lastOfs] < key <= a[base+hint+ofs]
int maxOfs = len - hint;
while (ofs < maxOfs && c.compare(key, s.getKey(a, base + hint + ofs, key0)) > 0) {
lastOfs = ofs;
ofs = (ofs << 1) + 1;
if (ofs <= 0) // int overflow
ofs = maxOfs;
}
if (ofs > maxOfs)
ofs = maxOfs;
// Make offsets relative to base
lastOfs += hint;
ofs += hint;
} else { // key <= a[base + hint]
// Gallop left until a[base+hint-ofs] < key <= a[base+hint-lastOfs]
final int maxOfs = hint + 1;
while (ofs < maxOfs && c.compare(key, s.getKey(a, base + hint - ofs, key0)) <= 0) {
lastOfs = ofs;
ofs = (ofs << 1) + 1;
if (ofs <= 0) // int overflow
ofs = maxOfs;
}
if (ofs > maxOfs)
ofs = maxOfs;
// Make offsets relative to base
int tmp = lastOfs;
lastOfs = hint - ofs;
ofs = hint - tmp;
}
assert -1 <= lastOfs && lastOfs < ofs && ofs <= len;
/*
* Now a[base+lastOfs] < key <= a[base+ofs], so key belongs somewhere
* to the right of lastOfs but no farther right than ofs. Do a binary
* search, with invariant a[base + lastOfs - 1] < key <= a[base + ofs].
*/
lastOfs++;
while (lastOfs < ofs) {
int m = lastOfs + ((ofs - lastOfs) >>> 1);
if (c.compare(key, s.getKey(a, base + m, key0)) > 0)
lastOfs = m + 1; // a[base + m] < key
else
ofs = m; // key <= a[base + m]
}
assert lastOfs == ofs; // so a[base + ofs - 1] < key <= a[base + ofs]
return ofs;
}
/**
* Like gallopLeft, except that if the range contains an element equal to
* key, gallopRight returns the index after the rightmost equal element.
*
* @param key the key whose insertion point to search for
* @param a the array in which to search
* @param base the index of the first element in the range
* @param len the length of the range; must be > 0
* @param hint the index at which to begin the search, 0 <= hint < n.
* The closer hint is to the result, the faster this method will run.
* @param c the comparator used to order the range, and to search
* @return the int k, 0 <= k <= n such that a[b + k - 1] <= key < a[b + k]
*/
private int gallopRight(K key, Buffer a, int base, int len, int hint, Comparator<? super K> c) {
assert len > 0 && hint >= 0 && hint < len;
int ofs = 1;
int lastOfs = 0;
K key1 = s.newKey();
if (c.compare(key, s.getKey(a, base + hint, key1)) < 0) {
// Gallop left until a[b+hint - ofs] <= key < a[b+hint - lastOfs]
int maxOfs = hint + 1;
while (ofs < maxOfs && c.compare(key, s.getKey(a, base + hint - ofs, key1)) < 0) {
lastOfs = ofs;
ofs = (ofs << 1) + 1;
if (ofs <= 0) // int overflow
ofs = maxOfs;
}
if (ofs > maxOfs)
ofs = maxOfs;
// Make offsets relative to b
int tmp = lastOfs;
lastOfs = hint - ofs;
ofs = hint - tmp;
} else { // a[b + hint] <= key
// Gallop right until a[b+hint + lastOfs] <= key < a[b+hint + ofs]
int maxOfs = len - hint;
while (ofs < maxOfs && c.compare(key, s.getKey(a, base + hint + ofs, key1)) >= 0) {
lastOfs = ofs;
ofs = (ofs << 1) + 1;
if (ofs <= 0) // int overflow
ofs = maxOfs;
}
if (ofs > maxOfs)
ofs = maxOfs;
// Make offsets relative to b
lastOfs += hint;
ofs += hint;
}
assert -1 <= lastOfs && lastOfs < ofs && ofs <= len;
/*
* Now a[b + lastOfs] <= key < a[b + ofs], so key belongs somewhere to
* the right of lastOfs but no farther right than ofs. Do a binary
* search, with invariant a[b + lastOfs - 1] <= key < a[b + ofs].
*/
lastOfs++;
while (lastOfs < ofs) {
int m = lastOfs + ((ofs - lastOfs) >>> 1);
if (c.compare(key, s.getKey(a, base + m, key1)) < 0)
ofs = m; // key < a[b + m]
else
lastOfs = m + 1; // a[b + m] <= key
}
assert lastOfs == ofs; // so a[b + ofs - 1] <= key < a[b + ofs]
return ofs;
}
/**
* Merges two adjacent runs in place, in a stable fashion. The first
* element of the first run must be greater than the first element of the
* second run (a[base1] > a[base2]), and the last element of the first run
* (a[base1 + len1-1]) must be greater than all elements of the second run.
*
* For performance, this method should be called only when len1 <= len2;
* its twin, mergeHi should be called if len1 >= len2. (Either method
* may be called if len1 == len2.)
*
* @param base1 index of first element in first run to be merged
* @param len1 length of first run to be merged (must be > 0)
* @param base2 index of first element in second run to be merged
* (must be aBase + aLen)
* @param len2 length of second run to be merged (must be > 0)
*/
private void mergeLo(int base1, int len1, int base2, int len2) {
assert len1 > 0 && len2 > 0 && base1 + len1 == base2;
// Copy first run into temp array
Buffer a = this.a; // For performance
Buffer tmp = ensureCapacity(len1);
s.copyRange(a, base1, tmp, 0, len1);
int cursor1 = 0; // Indexes into tmp array
int cursor2 = base2; // Indexes int a
int dest = base1; // Indexes int a
// Move first element of second run and deal with degenerate cases
s.copyElement(a, cursor2++, a, dest++);
if (--len2 == 0) {
s.copyRange(tmp, cursor1, a, dest, len1);
return;
}
if (len1 == 1) {
s.copyRange(a, cursor2, a, dest, len2);
s.copyElement(tmp, cursor1, a, dest + len2); // Last elt of run 1 to end of merge
return;
}
K key0 = s.newKey();
K key1 = s.newKey();
Comparator<? super K> c = this.c; // Use local variable for performance
int minGallop = this.minGallop; // " " " " "
outer:
while (true) {
int count1 = 0; // Number of times in a row that first run won
int count2 = 0; // Number of times in a row that second run won
/*
* Do the straightforward thing until (if ever) one run starts
* winning consistently.
*/
do {
assert len1 > 1 && len2 > 0;
if (c.compare(s.getKey(a, cursor2, key0), s.getKey(tmp, cursor1, key1)) < 0) {
s.copyElement(a, cursor2++, a, dest++);
count2++;
count1 = 0;
if (--len2 == 0)
break outer;
} else {
s.copyElement(tmp, cursor1++, a, dest++);
count1++;
count2 = 0;
if (--len1 == 1)
break outer;
}
} while ((count1 | count2) < minGallop);
/*
* One run is winning so consistently that galloping may be a
* huge win. So try that, and continue galloping until (if ever)
* neither run appears to be winning consistently anymore.
*/
do {
assert len1 > 1 && len2 > 0;
count1 = gallopRight(s.getKey(a, cursor2, key0), tmp, cursor1, len1, 0, c);
if (count1 != 0) {
s.copyRange(tmp, cursor1, a, dest, count1);
dest += count1;
cursor1 += count1;
len1 -= count1;
if (len1 <= 1) // len1 == 1 || len1 == 0
break outer;
}
s.copyElement(a, cursor2++, a, dest++);
if (--len2 == 0)
break outer;
count2 = gallopLeft(s.getKey(tmp, cursor1, key0), a, cursor2, len2, 0, c);
if (count2 != 0) {
s.copyRange(a, cursor2, a, dest, count2);
dest += count2;
cursor2 += count2;
len2 -= count2;
if (len2 == 0)
break outer;
}
s.copyElement(tmp, cursor1++, a, dest++);
if (--len1 == 1)
break outer;
minGallop--;
} while (count1 >= MIN_GALLOP | count2 >= MIN_GALLOP);
if (minGallop < 0)
minGallop = 0;
minGallop += 2; // Penalize for leaving gallop mode
} // End of "outer" loop
this.minGallop = minGallop < 1 ? 1 : minGallop; // Write back to field
if (len1 == 1) {
assert len2 > 0;
s.copyRange(a, cursor2, a, dest, len2);
s.copyElement(tmp, cursor1, a, dest + len2); // Last elt of run 1 to end of merge
} else if (len1 == 0) {
throw new IllegalArgumentException(
"Comparison method violates its general contract!");
} else {
assert len2 == 0;
assert len1 > 1;
s.copyRange(tmp, cursor1, a, dest, len1);
}
}
/**
* Like mergeLo, except that this method should be called only if
* len1 >= len2; mergeLo should be called if len1 <= len2. (Either method
* may be called if len1 == len2.)
*
* @param base1 index of first element in first run to be merged
* @param len1 length of first run to be merged (must be > 0)
* @param base2 index of first element in second run to be merged
* (must be aBase + aLen)
* @param len2 length of second run to be merged (must be > 0)
*/
private void mergeHi(int base1, int len1, int base2, int len2) {
assert len1 > 0 && len2 > 0 && base1 + len1 == base2;
// Copy second run into temp array
Buffer a = this.a; // For performance
Buffer tmp = ensureCapacity(len2);
s.copyRange(a, base2, tmp, 0, len2);
int cursor1 = base1 + len1 - 1; // Indexes into a
int cursor2 = len2 - 1; // Indexes into tmp array
int dest = base2 + len2 - 1; // Indexes into a
K key0 = s.newKey();
K key1 = s.newKey();
// Move last element of first run and deal with degenerate cases
s.copyElement(a, cursor1--, a, dest--);
if (--len1 == 0) {
s.copyRange(tmp, 0, a, dest - (len2 - 1), len2);
return;
}
if (len2 == 1) {
dest -= len1;
cursor1 -= len1;
s.copyRange(a, cursor1 + 1, a, dest + 1, len1);
s.copyElement(tmp, cursor2, a, dest);
return;
}
Comparator<? super K> c = this.c; // Use local variable for performance
int minGallop = this.minGallop; // " " " " "
outer:
while (true) {
int count1 = 0; // Number of times in a row that first run won
int count2 = 0; // Number of times in a row that second run won
/*
* Do the straightforward thing until (if ever) one run
* appears to win consistently.
*/
do {
assert len1 > 0 && len2 > 1;
if (c.compare(s.getKey(tmp, cursor2, key0), s.getKey(a, cursor1, key1)) < 0) {
s.copyElement(a, cursor1--, a, dest--);
count1++;
count2 = 0;
if (--len1 == 0)
break outer;
} else {
s.copyElement(tmp, cursor2--, a, dest--);
count2++;
count1 = 0;
if (--len2 == 1)
break outer;
}
} while ((count1 | count2) < minGallop);
/*
* One run is winning so consistently that galloping may be a
* huge win. So try that, and continue galloping until (if ever)
* neither run appears to be winning consistently anymore.
*/
do {
assert len1 > 0 && len2 > 1;
count1 = len1 - gallopRight(s.getKey(tmp, cursor2, key0), a, base1, len1, len1 - 1, c);
if (count1 != 0) {
dest -= count1;
cursor1 -= count1;
len1 -= count1;
s.copyRange(a, cursor1 + 1, a, dest + 1, count1);
if (len1 == 0)
break outer;
}
s.copyElement(tmp, cursor2--, a, dest--);
if (--len2 == 1)
break outer;
count2 = len2 - gallopLeft(s.getKey(a, cursor1, key0), tmp, 0, len2, len2 - 1, c);
if (count2 != 0) {
dest -= count2;
cursor2 -= count2;
len2 -= count2;
s.copyRange(tmp, cursor2 + 1, a, dest + 1, count2);
if (len2 <= 1) // len2 == 1 || len2 == 0
break outer;
}
s.copyElement(a, cursor1--, a, dest--);
if (--len1 == 0)
break outer;
minGallop--;
} while (count1 >= MIN_GALLOP | count2 >= MIN_GALLOP);
if (minGallop < 0)
minGallop = 0;
minGallop += 2; // Penalize for leaving gallop mode
} // End of "outer" loop
this.minGallop = minGallop < 1 ? 1 : minGallop; // Write back to field
if (len2 == 1) {
assert len1 > 0;
dest -= len1;
cursor1 -= len1;
s.copyRange(a, cursor1 + 1, a, dest + 1, len1);
s.copyElement(tmp, cursor2, a, dest); // Move first elt of run2 to front of merge
} else if (len2 == 0) {
throw new IllegalArgumentException(
"Comparison method violates its general contract!");
} else {
assert len1 == 0;
assert len2 > 0;
s.copyRange(tmp, 0, a, dest - (len2 - 1), len2);
}
}
/**
* Ensures that the external array tmp has at least the specified
* number of elements, increasing its size if necessary. The size
* increases exponentially to ensure amortized linear time complexity.
*
* @param minCapacity the minimum required capacity of the tmp array
* @return tmp, whether or not it grew
*/
private Buffer ensureCapacity(int minCapacity) {
if (tmpLength < minCapacity) {
// Compute smallest power of 2 > minCapacity
int newSize = minCapacity;
newSize |= newSize >> 1;
newSize |= newSize >> 2;
newSize |= newSize >> 4;
newSize |= newSize >> 8;
newSize |= newSize >> 16;
newSize++;
if (newSize < 0) // Not bloody likely!
newSize = minCapacity;
else
newSize = Math.min(newSize, aLength >>> 1);
tmp = s.allocate(newSize);
tmpLength = newSize;
}
return tmp;
}
}
}
| SortState |
java | quarkusio__quarkus | integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftWithEnvFromConfigMapWithPrefixTest.java | {
"start": 594,
"end": 3006
} | class ____ {
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.withApplicationRoot((jar) -> jar.addClasses(GreetingResource.class))
.setApplicationName("env-from-config-map-with-prefix")
.setApplicationVersion("0.1-SNAPSHOT")
.withConfigurationResource("openshift-with-env-from-configmap-with-prefix.properties")
.overrideConfigKey("quarkus.openshift.deployment-kind", "Deployment")
.setForcedDependencies(List.of(Dependency.of("io.quarkus", "quarkus-openshift", Version.getVersion())));
@ProdBuildResults
private ProdModeTestResults prodModeTestResults;
@Test
public void assertGeneratedResources() throws IOException {
Path kubernetesDir = prodModeTestResults.getBuildDir().resolve("kubernetes");
assertThat(kubernetesDir)
.isDirectoryContaining(p -> p.getFileName().endsWith("openshift.json"))
.isDirectoryContaining(p -> p.getFileName().endsWith("openshift.yml"));
List<HasMetadata> kubernetesList = DeserializationUtil
.deserializeAsList(kubernetesDir.resolve("openshift.yml"));
assertThat(kubernetesList).filteredOn(i -> i instanceof Deployment).singleElement()
.isInstanceOfSatisfying(Deployment.class, d -> {
assertThat(d.getMetadata()).satisfies(m -> {
assertThat(m.getName()).isEqualTo("env-from-config-map-with-prefix");
});
assertThat(d.getSpec()).satisfies(deploymentSpec -> {
assertThat(deploymentSpec.getTemplate()).satisfies(t -> {
assertThat(t.getSpec()).satisfies(podSpec -> {
assertThat(podSpec.getContainers()).singleElement().satisfies(container -> {
assertThat(container.getEnvFrom()).satisfies(env -> {
assertThat(env).anyMatch(item -> item.getPrefix().equals("QUARKUS") &&
item.getConfigMapRef().getName().equals("my-config-map"));
});
});
});
});
});
});
}
}
| OpenshiftWithEnvFromConfigMapWithPrefixTest |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ConfigurationClassWithConditionTests.java | {
"start": 8102,
"end": 8233
} | class ____ {
@Bean
public ExampleBean bean1() {
return new ExampleBean();
}
}
@Configuration
static | NonConfigurationClass |
java | alibaba__nacos | config/src/test/java/com/alibaba/nacos/config/server/utils/ConfigExecutorTest.java | {
"start": 911,
"end": 3966
} | class ____ {
@Test
void testScheduleConfigTask() throws InterruptedException {
AtomicInteger atomicInteger = new AtomicInteger();
Runnable runnable = atomicInteger::incrementAndGet;
ConfigExecutor.scheduleConfigTask(runnable, 0, 10, TimeUnit.MILLISECONDS);
TimeUnit.MILLISECONDS.sleep(10);
assertTrue(atomicInteger.get() >= 1);
}
@Test
void testScheduleCorrectUsageTask() throws InterruptedException {
AtomicInteger atomicInteger = new AtomicInteger();
Runnable runnable = atomicInteger::incrementAndGet;
ConfigExecutor.scheduleCorrectUsageTask(runnable, 0, 10, TimeUnit.MILLISECONDS);
TimeUnit.MILLISECONDS.sleep(10);
assertTrue(atomicInteger.get() >= 1);
}
@Test
void testExecuteAsyncNotify() throws InterruptedException {
AtomicInteger atomicInteger = new AtomicInteger();
Runnable runnable = atomicInteger::incrementAndGet;
ConfigExecutor.executeAsyncNotify(runnable);
TimeUnit.MILLISECONDS.sleep(20);
assertEquals(1, atomicInteger.get());
}
@Test
void testScheduleAsyncNotify() throws InterruptedException {
AtomicInteger atomicInteger = new AtomicInteger();
Runnable runnable = atomicInteger::incrementAndGet;
ConfigExecutor.scheduleAsyncNotify(runnable, 20, TimeUnit.MILLISECONDS);
assertEquals(0, atomicInteger.get());
TimeUnit.MILLISECONDS.sleep(40);
assertEquals(1, atomicInteger.get());
}
@Test
void testScheduleLongPollingV1() throws InterruptedException {
AtomicInteger atomicInteger = new AtomicInteger();
Runnable runnable = atomicInteger::incrementAndGet;
ConfigExecutor.scheduleLongPolling(runnable, 0, 10, TimeUnit.MILLISECONDS);
TimeUnit.MILLISECONDS.sleep(10);
assertTrue(atomicInteger.get() >= 1);
}
@Test
void testScheduleLongPollingV2() throws InterruptedException {
AtomicInteger atomicInteger = new AtomicInteger();
Runnable runnable = atomicInteger::incrementAndGet;
ConfigExecutor.scheduleLongPolling(runnable, 20, TimeUnit.MILLISECONDS);
assertEquals(0, atomicInteger.get());
TimeUnit.MILLISECONDS.sleep(40);
assertEquals(1, atomicInteger.get());
}
@Test
void testExecuteLongPolling() throws InterruptedException {
AtomicInteger atomicInteger = new AtomicInteger();
Runnable runnable = atomicInteger::incrementAndGet;
ConfigExecutor.executeLongPolling(runnable);
TimeUnit.MILLISECONDS.sleep(20);
assertEquals(1, atomicInteger.get());
}
}
| ConfigExecutorTest |
java | google__truth | core/src/main/java/com/google/common/truth/MultimapSubject.java | {
"start": 25266,
"end": 40660
} | class ____<
A extends @Nullable Object, E extends @Nullable Object> {
private final MultimapSubject subject;
private final Correspondence<? super A, ? super E> correspondence;
private final @Nullable Multimap<?, ?> actual;
private UsingCorrespondence(
MultimapSubject subject, Correspondence<? super A, ? super E> correspondence) {
this.subject = subject;
this.correspondence = checkNotNull(correspondence);
this.actual = subject.actual;
}
/**
* Checks that the actual multimap contains an entry with the given key and a value that
* corresponds to the given value.
*/
@SuppressWarnings("nullness") // TODO: b/423853632 - Remove after checker is fixed.
public void containsEntry(@Nullable Object key, E value) {
Entry<Object, E> entry = immutableEntry(key, value);
if (actual == null) {
failWithActual("expected a multimap that contains entry", entry);
return;
}
Collection<A> actualValues = castActual(actual).asMap().get(key);
if (actualValues != null) {
// Found matching key.
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forMapValues();
for (A actualValue : actualValues) {
if (correspondence.safeCompare(actualValue, value, exceptions)) {
// Found matching key and value, but we still need to fail if we hit an exception along
// the way.
if (exceptions.hasCompareException()) {
failWithoutActual(
factsBuilder()
.addAll(exceptions.describeAsMainCause())
.add(fact("expected to contain entry", entry))
.addAll(correspondence.describeForMapValues())
.add(
fact(
"found match (but failing because of exception)",
immutableEntry(key, actualValue)))
.add(
fact(
"full contents",
actualCustomStringRepresentationForPackageMembersToCall()))
.build());
}
return;
}
}
// Found matching key with non-matching values.
failWithoutActual(
factsBuilder()
.add(fact("expected to contain entry", entry))
.addAll(correspondence.describeForMapValues())
.add(simpleFact("but did not"))
.add(fact("though it did contain values for that key", actualValues))
.add(
fact(
"full contents", actualCustomStringRepresentationForPackageMembersToCall()))
.addAll(exceptions.describeAsAdditionalInfo())
.build());
} else {
// Did not find matching key.
Set<Entry<?, ?>> entries = new LinkedHashSet<>();
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forMapValues();
for (Entry<?, A> actualEntry : castActual(actual).entries()) {
if (correspondence.safeCompare(actualEntry.getValue(), value, exceptions)) {
entries.add(actualEntry);
}
}
if (!entries.isEmpty()) {
// Found matching values with non-matching keys.
failWithoutActual(
factsBuilder()
.add(fact("expected to contain entry", entry))
.addAll(correspondence.describeForMapValues())
.add(simpleFact("but did not"))
// The corresponding failure in the non-Correspondence case reports the keys
// mapping to the expected value. Here, we show the full entries, because for some
// Correspondences it may not be obvious which of the actual values it was that
// corresponded to the expected value.
.add(fact("though it did contain entries with matching values", entries))
.add(
fact(
"full contents",
actualCustomStringRepresentationForPackageMembersToCall()))
.addAll(exceptions.describeAsAdditionalInfo())
.build());
} else {
// Did not find matching key or value.
failWithoutActual(
factsBuilder()
.add(fact("expected to contain entry", entry))
.addAll(correspondence.describeForMapValues())
.add(simpleFact("but did not"))
.add(
fact(
"full contents",
actualCustomStringRepresentationForPackageMembersToCall()))
.addAll(exceptions.describeAsAdditionalInfo())
.build());
}
}
}
/**
* Checks that the actual multimap does not contain an entry with the given key and a value that
* corresponds to the given value.
*/
public void doesNotContainEntry(@Nullable Object key, E value) {
Entry<?, E> entry = immutableEntry(key, value);
if (actual == null) {
failWithActual("expected a multimap that does not contain entry", entry);
return;
}
Collection<A> actualValues = castActual(actual).asMap().get(key);
if (actualValues != null) {
List<A> matchingValues = new ArrayList<>();
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forMapValues();
for (A actualValue : actualValues) {
if (correspondence.safeCompare(actualValue, value, exceptions)) {
matchingValues.add(actualValue);
}
}
// Fail if we found a matching value for the key.
if (!matchingValues.isEmpty()) {
failWithoutActual(
factsBuilder()
.add(fact("expected not to contain entry", entry))
.addAll(correspondence.describeForMapValues())
.add(fact("but contained that key with matching values", matchingValues))
.add(
fact(
"full contents",
actualCustomStringRepresentationForPackageMembersToCall()))
.addAll(exceptions.describeAsAdditionalInfo())
.build());
} else {
// No value matched, but we still need to fail if we hit an exception along the way.
if (exceptions.hasCompareException()) {
failWithoutActual(
factsBuilder()
.addAll(exceptions.describeAsMainCause())
.add(fact("expected not to contain entry", entry))
.addAll(correspondence.describeForMapValues())
.add(simpleFact("found no match (but failing because of exception)"))
.add(
fact(
"full contents",
actualCustomStringRepresentationForPackageMembersToCall()))
.build());
}
}
}
}
/**
* Checks that the actual multimap contains exactly the keys in the given multimap, mapping to
* values that correspond to the values of the given multimap.
*
* <p>A subsequent call to {@link Ordered#inOrder} may be made if the caller wishes to verify
* that the two Multimaps iterate fully in the same order. That is, their key sets iterate in
* the same order, and the corresponding value collections for each key iterate in the same
* order.
*/
@CanIgnoreReturnValue
public Ordered containsExactlyEntriesIn(@Nullable Multimap<?, ? extends E> expected) {
if (expected == null) {
failWithoutActual(
simpleFact("could not perform containment check because expected multimap was null"),
actualContents());
return ALREADY_FAILED;
} else if (actual == null) {
failWithActual("expected a multimap that contains exactly", expected);
return ALREADY_FAILED;
}
return internalContainsExactlyEntriesIn(actual, expected);
}
/*
* This helper exists so that we can declare the simpler, type-parameter-free signature for the
* public containsExactlyEntriesIn method. This is recommended by Effective Java item 31 (3rd
* edition).
*/
private <K extends @Nullable Object, V extends E> Ordered internalContainsExactlyEntriesIn(
Multimap<?, ?> actual, Multimap<K, V> expected) {
// Note: The non-fuzzy MultimapSubject.containsExactlyEntriesIn has a custom implementation
// and produces somewhat better failure messages simply asserting about the iterables of
// entries would: it formats the expected values as k=[v1, v2] rather than k=v1, k=v2; and in
// the case where inOrder() fails it says the keys and/or the values for some keys are out of
// order. We don't bother with that here. It would be nice, but it would be a lot of added
// complexity for little gain.
return subject
.substituteCheck()
.about(subject.iterableEntries())
.that(actual.entries())
.comparingElementsUsing(MultimapSubject.<K, A, V>entryCorrespondence(correspondence))
.containsExactlyElementsIn(expected.entries());
}
/**
* Checks that the actual multimap contains at least the keys in the given multimap, mapping to
* values that correspond to the values of the given multimap.
*
* <p>A subsequent call to {@link Ordered#inOrder} may be made if the caller wishes to verify
* that the two Multimaps iterate fully in the same order. That is, their key sets iterate in
* the same order, and the corresponding value collections for each key iterate in the same
* order.
*/
@CanIgnoreReturnValue
public Ordered containsAtLeastEntriesIn(@Nullable Multimap<?, ? extends E> expected) {
if (expected == null) {
failWithoutActual(
simpleFact("could not perform containment check because expected multimap was null"),
actualContents());
return ALREADY_FAILED;
} else if (actual == null) {
failWithActual("expected a multimap that contains at least", expected);
return ALREADY_FAILED;
}
return internalContainsAtLeastEntriesIn(actual, expected);
}
/*
* This helper exists so that we can declare the simpler, type-parameter-free signature for the
* public containsAtLeastEntriesIn method. This is recommended by Effective Java item 31 (3rd
* edition).
*/
private <K extends @Nullable Object, V extends E> Ordered internalContainsAtLeastEntriesIn(
Multimap<?, ?> actual, Multimap<K, V> expected) {
// Note: The non-fuzzy MultimapSubject.containsAtLeastEntriesIn has a custom implementation
// and produces somewhat better failure messages simply asserting about the iterables of
// entries would: it formats the expected values as k=[v1, v2] rather than k=v1, k=v2; and in
// the case where inOrder() fails it says the keys and/or the values for some keys are out of
// order. We don't bother with that here. It would be nice, but it would be a lot of added
// complexity for little gain.
return subject
.substituteCheck()
.about(subject.iterableEntries())
.that(actual.entries())
.comparingElementsUsing(MultimapSubject.<K, A, V>entryCorrespondence(correspondence))
.containsAtLeastElementsIn(expected.entries());
}
/**
* Checks that the actual multimap contains exactly the given set of key/value pairs.
*
* <p><b>Warning:</b> the use of varargs means that we cannot guarantee an equal number of
* key/value pairs at compile time. Please make sure you provide varargs in key/value pairs!
*/
@CanIgnoreReturnValue
public Ordered containsExactly(@Nullable Object k0, @Nullable E v0, @Nullable Object... rest) {
@SuppressWarnings("unchecked")
Multimap<?, E> expectedMultimap = (Multimap<?, E>) accumulateMultimap(k0, v0, rest);
return containsExactlyEntriesIn(expectedMultimap);
}
/** Checks that the actual multimap is empty. */
@CanIgnoreReturnValue
public Ordered containsExactly() {
return subject.containsExactly();
}
/**
* Checks that the actual multimap contains at least the given key/value pairs.
*
* <p><b>Warning:</b> the use of varargs means that we cannot guarantee an equal number of
* key/value pairs at compile time. Please make sure you provide varargs in key/value pairs!
*/
@CanIgnoreReturnValue
public Ordered containsAtLeast(@Nullable Object k0, @Nullable E v0, @Nullable Object... rest) {
@SuppressWarnings("unchecked")
Multimap<?, E> expectedMultimap = (Multimap<?, E>) accumulateMultimap(k0, v0, rest);
return containsAtLeastEntriesIn(expectedMultimap);
}
@SuppressWarnings("unchecked") // throwing ClassCastException is the correct behaviour
private Multimap<?, A> castActual(Multimap<?, ?> actual) {
return (Multimap<?, A>) actual;
}
private String actualCustomStringRepresentationForPackageMembersToCall() {
return subject.actualCustomStringRepresentationForPackageMembersToCall();
}
private Fact actualContents() {
return subject.actualContents();
}
private void failWithActual(String key, @Nullable Object value) {
subject.failWithActual(key, value);
}
private void failWithoutActual(Iterable<Fact> facts) {
subject.failWithoutActual(facts);
}
private void failWithoutActual(Fact first, Fact... rest) {
subject.failWithoutActual(first, rest);
}
static <E extends @Nullable Object, A extends @Nullable Object>
UsingCorrespondence<A, E> create(
MultimapSubject subject, Correspondence<? super A, ? super E> correspondence) {
return new UsingCorrespondence<>(subject, correspondence);
}
}
private static <
K extends @Nullable Object, A extends @Nullable Object, E extends @Nullable Object>
Correspondence<Entry<K, A>, Entry<K, E>> entryCorrespondence(
Correspondence<? super A, ? super E> valueCorrespondence) {
return Correspondence.from(
(actual, expected) ->
Objects.equals(actual.getKey(), expected.getKey())
&& valueCorrespondence.compare(actual.getValue(), expected.getValue()),
lenientFormat(
"has a key that is equal to and a value that %s the value of", valueCorrespondence));
}
private Fact fullContents() {
return actualValue("full contents");
}
private Fact actualContents() {
return actualValue("actual contents");
}
/** Ordered implementation that does nothing because it's already known to be true. */
private static final Ordered IN_ORDER = () -> {};
/** Ordered implementation that does nothing because an earlier check already caused a failure. */
private static final Ordered ALREADY_FAILED = () -> {};
static Factory<MultimapSubject, Multimap<?, ?>> multimaps() {
return MultimapSubject::new;
}
}
| UsingCorrespondence |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MinAggFunction.java | {
"start": 3644,
"end": 3874
} | class ____ extends MinAggFunction {
@Override
public DataType getResultType() {
return DataTypes.INT();
}
}
/** Built-in Byte Min aggregate function. */
public static | IntMinAggFunction |
java | apache__hadoop | hadoop-tools/hadoop-federation-balance/src/main/java/org/apache/hadoop/tools/fedbalance/FedBalanceContext.java | {
"start": 1291,
"end": 1371
} | class ____ the basic information needed when Federation Balance.
*/
public | contains |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/internal/client/DefaultCompositeApiClient.java | {
"start": 2594,
"end": 10355
} | class ____ extends AbstractClientBase implements CompositeApiClient {
private ObjectMapper mapper;
public DefaultCompositeApiClient(
final SalesforceEndpointConfig configuration,
final String version, final SalesforceSession session,
final SalesforceHttpClient httpClient, final SalesforceLoginConfig loginConfig)
throws SalesforceException {
super(version, session, httpClient, loginConfig);
if (configuration.getObjectMapper() != null) {
mapper = configuration.getObjectMapper();
} else {
mapper = JsonUtils.createObjectMapper();
}
}
public void submitCompositeRaw(
final InputStream raw, final Map<String, List<String>> headers,
final ResponseCallback<InputStream> callback, String compositeMethod)
throws SalesforceException {
final String url = versionUrl() + "composite";
if (compositeMethod == null) {
compositeMethod = HttpMethod.POST.asString();
}
Request request = createRequest(compositeMethod, url, headers);
final Request.Content content = new InputStreamRequestContent(raw);
request.body(content);
doHttpRequest(request, new ClientResponseCallback() {
@Override
public void onResponse(InputStream response, Map<String, String> headers, SalesforceException ex) {
callback.onResponse(Optional.ofNullable(response), headers, ex);
}
});
}
@Override
public void submitComposite(
final SObjectComposite composite, final Map<String, List<String>> headers,
final ResponseCallback<SObjectCompositeResponse> callback)
throws SalesforceException {
final String url = versionUrl() + "composite";
final Request post = createRequest(HttpMethod.POST, url, headers);
final Request.Content content = serialize(composite, composite.objectTypes());
post.body(content);
doHttpRequest(post,
(response, responseHeaders, exception) -> callback.onResponse(
tryToReadResponse(SObjectCompositeResponse.class, response), responseHeaders,
exception));
}
@Override
public void submitCompositeBatch(
final SObjectBatch batch, final Map<String, List<String>> headers,
final ResponseCallback<SObjectBatchResponse> callback)
throws SalesforceException {
checkCompositeBatchVersion(version, batch.getVersion());
final String url = versionUrl() + "composite/batch";
final Request post = createRequest(HttpMethod.POST, url, headers);
final Request.Content content = serialize(batch, batch.objectTypes());
post.body(content);
doHttpRequest(post,
(response, responseHeaders, exception) -> callback.onResponse(
tryToReadResponse(SObjectBatchResponse.class, response),
responseHeaders, exception));
}
@Override
public void submitCompositeTree(
final SObjectTree tree, final Map<String, List<String>> headers,
final ResponseCallback<SObjectTreeResponse> callback)
throws SalesforceException {
final String url = versionUrl() + "composite/tree/" + tree.getObjectType();
final Request post = createRequest(HttpMethod.POST, url, headers);
final Request.Content content = serialize(tree, tree.objectTypes());
post.body(content);
doHttpRequest(post,
(response, responseHeaders, exception) -> callback.onResponse(
tryToReadResponse(SObjectTreeResponse.class, response),
responseHeaders, exception));
}
Request createRequest(final String method, final String url, final Map<String, List<String>> headers) {
final Request request = getRequest(method, url, headers);
return populateRequest(request);
}
Request createRequest(final HttpMethod method, final String url, final Map<String, List<String>> headers) {
final Request request = getRequest(method, url, headers);
return populateRequest(request);
}
private Request populateRequest(Request request) {
// setup authorization
setAccessToken(request);
request.headers(h -> h.add(HttpHeader.CONTENT_TYPE, APPLICATION_JSON_UTF8));
request.headers(h -> h.add(HttpHeader.ACCEPT, APPLICATION_JSON_UTF8));
request.headers(h -> h.add(HttpHeader.ACCEPT_CHARSET, StandardCharsets.UTF_8.name()));
return request;
}
<T> T fromJson(final Class<T> expectedType, final InputStream responseStream) throws IOException {
return jsonReaderFor(expectedType).readValue(responseStream);
}
<T> List<T> fromJsonList(final Class<T> expectedType, final InputStream responseStream) throws IOException {
final CollectionType collectionType = mapper.getTypeFactory().constructCollectionType(List.class, expectedType);
return mapper.readValue(responseStream, collectionType);
}
ObjectReader jsonReaderFor(final Class<?> type) {
return mapper.readerFor(type);
}
ObjectWriter jsonWriterFor(final Object obj) {
final Class<?> type = obj.getClass();
return mapper.writerFor(type);
}
Request.Content serialize(final Object body, final Class<?>... additionalTypes)
throws SalesforceException {
// input stream as entity content is needed for authentication retries
return new InputStreamRequestContent(toJson(body));
}
String servicesDataUrl() {
return instanceUrl + "/services/data/";
}
InputStream toJson(final Object obj) throws SalesforceException {
byte[] jsonBytes;
try {
jsonBytes = jsonWriterFor(obj).writeValueAsBytes(obj);
} catch (final JsonProcessingException e) {
throw new SalesforceException("Unable to serialize given SObjectTree to JSON", e);
}
return new ByteArrayInputStream(jsonBytes);
}
<T> Optional<T> tryToReadResponse(final Class<T> expectedType, final InputStream responseStream) {
if (responseStream == null) {
return Optional.empty();
}
try {
return Optional.of(fromJson(expectedType, responseStream));
} catch (IOException e) {
log.warn("Unable to read response from the Composite API", e);
return Optional.empty();
} finally {
IOHelper.close(responseStream);
}
}
String versionUrl() {
ObjectHelper.notNull(version, "version");
return servicesDataUrl() + "v" + version + "/";
}
@Override
protected void setAccessToken(final Request request) {
request.headers(h -> h.add("Authorization", "Bearer " + accessToken));
}
static void checkCompositeBatchVersion(final String configuredVersion, final Version batchVersion)
throws SalesforceException {
if (Version.create(configuredVersion).compareTo(batchVersion) < 0) {
throw new SalesforceException(
"Component is configured with Salesforce API version " + configuredVersion
+ ", but the payload of the Composite API batch operation requires at least "
+ batchVersion,
0);
}
}
}
| DefaultCompositeApiClient |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FileNameGenerator.java | {
"start": 1299,
"end": 2984
} | class ____ {
private static final int DEFAULT_FILES_PER_DIRECTORY = 32;
private final int[] pathIndecies = new int[20]; // this will support up to 32**20 = 2**100 = 10**30 files
private final String baseDir;
private String currentDir;
private final int filesPerDirectory;
private long fileCount;
FileNameGenerator(String baseDir) {
this(baseDir, DEFAULT_FILES_PER_DIRECTORY);
}
FileNameGenerator(String baseDir, int filesPerDir) {
this.baseDir = baseDir;
this.filesPerDirectory = filesPerDir;
reset();
}
String getNextDirName(String prefix) {
int depth = 0;
while(pathIndecies[depth] >= 0)
depth++;
int level;
for(level = depth-1;
level >= 0 && pathIndecies[level] == filesPerDirectory-1; level--)
pathIndecies[level] = 0;
if(level < 0)
pathIndecies[depth] = 0;
else
pathIndecies[level]++;
level = 0;
String next = baseDir;
while(pathIndecies[level] >= 0)
next = next + "/" + prefix + pathIndecies[level++];
return next;
}
synchronized String getNextFileName(String fileNamePrefix) {
long fNum = fileCount % filesPerDirectory;
if(fNum == 0) {
currentDir = getNextDirName(fileNamePrefix + "Dir");
}
String fn = currentDir + "/" + fileNamePrefix + fileCount;
fileCount++;
return fn;
}
public long getFileCount() {
return fileCount;
}
private synchronized void reset() {
Arrays.fill(pathIndecies, -1);
fileCount = 0L;
currentDir = "";
}
synchronized int getFilesPerDirectory() {
return filesPerDirectory;
}
synchronized String getCurrentDir() {
return currentDir;
}
}
| FileNameGenerator |
java | google__guava | android/guava/src/com/google/common/collect/CompactHashSet.java | {
"start": 3484,
"end": 24423
} | class ____<E extends @Nullable Object> extends AbstractSet<E> implements Serializable {
// TODO(user): cache all field accesses in local vars
/** Creates an empty {@code CompactHashSet} instance. */
public static <E extends @Nullable Object> CompactHashSet<E> create() {
return new CompactHashSet<>();
}
/**
* Creates a <i>mutable</i> {@code CompactHashSet} instance containing the elements of the given
* collection in unspecified order.
*
* @param collection the elements that the set should contain
* @return a new {@code CompactHashSet} containing those elements (minus duplicates)
*/
public static <E extends @Nullable Object> CompactHashSet<E> create(
Collection<? extends E> collection) {
CompactHashSet<E> set = createWithExpectedSize(collection.size());
set.addAll(collection);
return set;
}
/**
* Creates a <i>mutable</i> {@code CompactHashSet} instance containing the given elements in
* unspecified order.
*
* @param elements the elements that the set should contain
* @return a new {@code CompactHashSet} containing those elements (minus duplicates)
*/
@SafeVarargs
public static <E extends @Nullable Object> CompactHashSet<E> create(E... elements) {
CompactHashSet<E> set = createWithExpectedSize(elements.length);
Collections.addAll(set, elements);
return set;
}
/**
* Creates a {@code CompactHashSet} instance, with a high enough "initial capacity" that it
* <i>should</i> hold {@code expectedSize} elements without growth.
*
* @param expectedSize the number of elements you expect to add to the returned set
* @return a new, empty {@code CompactHashSet} with enough capacity to hold {@code expectedSize}
* elements without resizing
* @throws IllegalArgumentException if {@code expectedSize} is negative
*/
public static <E extends @Nullable Object> CompactHashSet<E> createWithExpectedSize(
int expectedSize) {
return new CompactHashSet<>(expectedSize);
}
/**
* Maximum allowed false positive probability of detecting a hash flooding attack given random
* input.
*/
@VisibleForTesting(
)
static final double HASH_FLOODING_FPP = 0.001;
/**
* Maximum allowed length of a hash table bucket before falling back to a j.u.LinkedHashSet based
* implementation. Experimentally determined.
*/
private static final int MAX_HASH_BUCKET_LENGTH = 9;
// See CompactHashMap for a detailed description of how the following fields work. That
// description talks about `keys`, `values`, and `entries`; here the `keys` and `values` arrays
// are replaced by a single `elements` array but everything else works similarly.
/**
* The hashtable object. This can be either:
*
* <ul>
* <li>a byte[], short[], or int[], with size a power of two, created by
* CompactHashing.createTable, whose values are either
* <ul>
* <li>UNSET, meaning "null pointer"
* <li>one plus an index into the entries and elements array
* </ul>
* <li>another java.util.Set delegate implementation. In most modern JDKs, normal java.util hash
* collections intelligently fall back to a binary search tree if hash table collisions are
* detected. Rather than going to all the trouble of reimplementing this ourselves, we
* simply switch over to use the JDK implementation wholesale if probable hash flooding is
* detected, sacrificing the compactness guarantee in very rare cases in exchange for much
* more reliable worst-case behavior.
* <li>null, if no entries have yet been added to the map
* </ul>
*/
private transient @Nullable Object table;
/**
* Contains the logical entries, in the range of [0, size()). The high bits of each int are the
* part of the smeared hash of the element not covered by the hashtable mask, whereas the low bits
* are the "next" pointer (pointing to the next entry in the bucket chain), which will always be
* less than or equal to the hashtable mask.
*
* <pre>
* hash = aaaaaaaa
* mask = 00000fff
* next = 00000bbb
* entry = aaaaabbb
* </pre>
*
* <p>The pointers in [size(), entries.length) are all "null" (UNSET).
*/
private transient int @Nullable [] entries;
/**
* The elements contained in the set, in the range of [0, size()). The elements in [size(),
* elements.length) are all {@code null}.
*/
@VisibleForTesting transient @Nullable Object @Nullable [] elements;
/**
* Keeps track of metadata like the number of hash table bits and modifications of this data
* structure (to make it possible to throw ConcurrentModificationException in the iterator). Note
* that we choose not to make this volatile, so we do less of a "best effort" to track such
* errors, for better performance.
*/
private transient int metadata;
/** The number of elements contained in the set. */
private transient int size;
/** Constructs a new empty instance of {@code CompactHashSet}. */
CompactHashSet() {
init(CompactHashing.DEFAULT_SIZE);
}
/**
* Constructs a new instance of {@code CompactHashSet} with the specified capacity.
*
* @param expectedSize the initial capacity of this {@code CompactHashSet}.
*/
CompactHashSet(int expectedSize) {
init(expectedSize);
}
/** Pseudoconstructor for serialization support. */
void init(int expectedSize) {
Preconditions.checkArgument(expectedSize >= 0, "Expected size must be >= 0");
// Save expectedSize for use in allocArrays()
this.metadata = Ints.constrainToRange(expectedSize, 1, CompactHashing.MAX_SIZE);
}
/** Returns whether arrays need to be allocated. */
boolean needsAllocArrays() {
return table == null;
}
/** Handle lazy allocation of arrays. */
@CanIgnoreReturnValue
int allocArrays() {
Preconditions.checkState(needsAllocArrays(), "Arrays already allocated");
int expectedSize = metadata;
int buckets = CompactHashing.tableSize(expectedSize);
this.table = CompactHashing.createTable(buckets);
setHashTableMask(buckets - 1);
this.entries = new int[expectedSize];
this.elements = new Object[expectedSize];
return expectedSize;
}
@SuppressWarnings("unchecked")
@VisibleForTesting
@Nullable Set<E> delegateOrNull() {
if (table instanceof Set) {
return (Set<E>) table;
}
return null;
}
private Set<E> createHashFloodingResistantDelegate(int tableSize) {
return new LinkedHashSet<>(tableSize, 1.0f);
}
@CanIgnoreReturnValue
Set<E> convertToHashFloodingResistantImplementation() {
Set<E> newDelegate = createHashFloodingResistantDelegate(hashTableMask() + 1);
for (int i = firstEntryIndex(); i >= 0; i = getSuccessor(i)) {
newDelegate.add(element(i));
}
this.table = newDelegate;
this.entries = null;
this.elements = null;
incrementModCount();
return newDelegate;
}
@VisibleForTesting
boolean isUsingHashFloodingResistance() {
return delegateOrNull() != null;
}
/** Stores the hash table mask as the number of bits needed to represent an index. */
private void setHashTableMask(int mask) {
int hashTableBits = Integer.SIZE - Integer.numberOfLeadingZeros(mask);
metadata =
CompactHashing.maskCombine(metadata, hashTableBits, CompactHashing.HASH_TABLE_BITS_MASK);
}
/** Gets the hash table mask using the stored number of hash table bits. */
private int hashTableMask() {
return (1 << (metadata & CompactHashing.HASH_TABLE_BITS_MASK)) - 1;
}
void incrementModCount() {
metadata += CompactHashing.MODIFICATION_COUNT_INCREMENT;
}
@CanIgnoreReturnValue
@Override
public boolean add(@ParametricNullness E object) {
if (needsAllocArrays()) {
allocArrays();
}
Set<E> delegate = delegateOrNull();
if (delegate != null) {
return delegate.add(object);
}
int[] entries = requireEntries();
@Nullable Object[] elements = requireElements();
int newEntryIndex = this.size; // current size, and pointer to the entry to be appended
int newSize = newEntryIndex + 1;
int hash = smearedHash(object);
int mask = hashTableMask();
int tableIndex = hash & mask;
int next = CompactHashing.tableGet(requireTable(), tableIndex);
if (next == UNSET) { // uninitialized bucket
if (newSize > mask) {
// Resize and add new entry
mask = resizeTable(mask, CompactHashing.newCapacity(mask), hash, newEntryIndex);
} else {
CompactHashing.tableSet(requireTable(), tableIndex, newEntryIndex + 1);
}
} else {
int entryIndex;
int entry;
int hashPrefix = CompactHashing.getHashPrefix(hash, mask);
int bucketLength = 0;
do {
entryIndex = next - 1;
entry = entries[entryIndex];
if (CompactHashing.getHashPrefix(entry, mask) == hashPrefix
&& Objects.equals(object, elements[entryIndex])) {
return false;
}
next = CompactHashing.getNext(entry, mask);
bucketLength++;
} while (next != UNSET);
if (bucketLength >= MAX_HASH_BUCKET_LENGTH) {
return convertToHashFloodingResistantImplementation().add(object);
}
if (newSize > mask) {
// Resize and add new entry
mask = resizeTable(mask, CompactHashing.newCapacity(mask), hash, newEntryIndex);
} else {
entries[entryIndex] = CompactHashing.maskCombine(entry, newEntryIndex + 1, mask);
}
}
resizeMeMaybe(newSize);
insertEntry(newEntryIndex, object, hash, mask);
this.size = newSize;
incrementModCount();
return true;
}
/**
* Creates a fresh entry with the specified object at the specified position in the entry arrays.
*/
void insertEntry(int entryIndex, @ParametricNullness E object, int hash, int mask) {
setEntry(entryIndex, CompactHashing.maskCombine(hash, UNSET, mask));
setElement(entryIndex, object);
}
/** Resizes the entries storage if necessary. */
private void resizeMeMaybe(int newSize) {
int entriesSize = requireEntries().length;
if (newSize > entriesSize) {
// 1.5x but round up to nearest odd (this is optimal for memory consumption on Android)
int newCapacity = min(CompactHashing.MAX_SIZE, (entriesSize + max(1, entriesSize >>> 1)) | 1);
if (newCapacity != entriesSize) {
resizeEntries(newCapacity);
}
}
}
/**
* Resizes the internal entries array to the specified capacity, which may be greater or less than
* the current capacity.
*/
void resizeEntries(int newCapacity) {
this.entries = Arrays.copyOf(requireEntries(), newCapacity);
this.elements = Arrays.copyOf(requireElements(), newCapacity);
}
@CanIgnoreReturnValue
private int resizeTable(int oldMask, int newCapacity, int targetHash, int targetEntryIndex) {
Object newTable = CompactHashing.createTable(newCapacity);
int newMask = newCapacity - 1;
if (targetEntryIndex != UNSET) {
// Add target first; it must be last in the chain because its entry hasn't yet been created
CompactHashing.tableSet(newTable, targetHash & newMask, targetEntryIndex + 1);
}
Object oldTable = requireTable();
int[] entries = requireEntries();
// Loop over current hashtable
for (int oldTableIndex = 0; oldTableIndex <= oldMask; oldTableIndex++) {
int oldNext = CompactHashing.tableGet(oldTable, oldTableIndex);
while (oldNext != UNSET) {
int entryIndex = oldNext - 1;
int oldEntry = entries[entryIndex];
// Rebuild hash using entry hashPrefix and tableIndex ("hashSuffix")
int hash = CompactHashing.getHashPrefix(oldEntry, oldMask) | oldTableIndex;
int newTableIndex = hash & newMask;
int newNext = CompactHashing.tableGet(newTable, newTableIndex);
CompactHashing.tableSet(newTable, newTableIndex, oldNext);
entries[entryIndex] = CompactHashing.maskCombine(hash, newNext, newMask);
oldNext = CompactHashing.getNext(oldEntry, oldMask);
}
}
this.table = newTable;
setHashTableMask(newMask);
return newMask;
}
@Override
public boolean contains(@Nullable Object object) {
if (needsAllocArrays()) {
return false;
}
Set<E> delegate = delegateOrNull();
if (delegate != null) {
return delegate.contains(object);
}
int hash = smearedHash(object);
int mask = hashTableMask();
int next = CompactHashing.tableGet(requireTable(), hash & mask);
if (next == UNSET) {
return false;
}
int hashPrefix = CompactHashing.getHashPrefix(hash, mask);
do {
int entryIndex = next - 1;
int entry = entry(entryIndex);
if (CompactHashing.getHashPrefix(entry, mask) == hashPrefix
&& Objects.equals(object, element(entryIndex))) {
return true;
}
next = CompactHashing.getNext(entry, mask);
} while (next != UNSET);
return false;
}
@CanIgnoreReturnValue
@Override
public boolean remove(@Nullable Object object) {
if (needsAllocArrays()) {
return false;
}
Set<E> delegate = delegateOrNull();
if (delegate != null) {
return delegate.remove(object);
}
int mask = hashTableMask();
int index =
CompactHashing.remove(
object,
/* value= */ null,
mask,
requireTable(),
requireEntries(),
requireElements(),
/* values= */ null);
if (index == -1) {
return false;
}
moveLastEntry(index, mask);
size--;
incrementModCount();
return true;
}
/**
* Moves the last entry in the entry array into {@code dstIndex}, and nulls out its old position.
*/
void moveLastEntry(int dstIndex, int mask) {
Object table = requireTable();
int[] entries = requireEntries();
@Nullable Object[] elements = requireElements();
int srcIndex = size() - 1;
if (dstIndex < srcIndex) {
// move last entry to deleted spot
Object object = elements[srcIndex];
elements[dstIndex] = object;
elements[srcIndex] = null;
// move the last entry to the removed spot, just like we moved the element
entries[dstIndex] = entries[srcIndex];
entries[srcIndex] = 0;
// also need to update whoever's "next" pointer was pointing to the last entry place
int tableIndex = smearedHash(object) & mask;
int next = CompactHashing.tableGet(table, tableIndex);
int srcNext = srcIndex + 1;
if (next == srcNext) {
// we need to update the root pointer
CompactHashing.tableSet(table, tableIndex, dstIndex + 1);
} else {
// we need to update a pointer in an entry
int entryIndex;
int entry;
do {
entryIndex = next - 1;
entry = entries[entryIndex];
next = CompactHashing.getNext(entry, mask);
} while (next != srcNext);
// here, entries[entryIndex] points to the old entry location; update it
entries[entryIndex] = CompactHashing.maskCombine(entry, dstIndex + 1, mask);
}
} else {
elements[dstIndex] = null;
entries[dstIndex] = 0;
}
}
int firstEntryIndex() {
return isEmpty() ? -1 : 0;
}
int getSuccessor(int entryIndex) {
return (entryIndex + 1 < size) ? entryIndex + 1 : -1;
}
/**
* Updates the index an iterator is pointing to after a call to remove: returns the index of the
* entry that should be looked at after a removal on indexRemoved, with indexBeforeRemove as the
* index that *was* the next entry that would be looked at.
*/
int adjustAfterRemove(int indexBeforeRemove, @SuppressWarnings("unused") int indexRemoved) {
return indexBeforeRemove - 1;
}
@Override
public Iterator<E> iterator() {
Set<E> delegate = delegateOrNull();
if (delegate != null) {
return delegate.iterator();
}
return new Iterator<E>() {
int expectedMetadata = metadata;
int currentIndex = firstEntryIndex();
int indexToRemove = -1;
@Override
public boolean hasNext() {
return currentIndex >= 0;
}
@Override
@ParametricNullness
public E next() {
checkForConcurrentModification();
if (!hasNext()) {
throw new NoSuchElementException();
}
indexToRemove = currentIndex;
E result = element(currentIndex);
currentIndex = getSuccessor(currentIndex);
return result;
}
@Override
public void remove() {
checkForConcurrentModification();
checkRemove(indexToRemove >= 0);
incrementExpectedModCount();
CompactHashSet.this.remove(element(indexToRemove));
currentIndex = adjustAfterRemove(currentIndex, indexToRemove);
indexToRemove = -1;
}
void incrementExpectedModCount() {
expectedMetadata += CompactHashing.MODIFICATION_COUNT_INCREMENT;
}
private void checkForConcurrentModification() {
if (metadata != expectedMetadata) {
throw new ConcurrentModificationException();
}
}
};
}
@Override
public int size() {
Set<E> delegate = delegateOrNull();
return (delegate != null) ? delegate.size() : size;
}
@Override
public boolean isEmpty() {
return size() == 0;
}
@Override
public @Nullable Object[] toArray() {
if (needsAllocArrays()) {
return new Object[0];
}
Set<E> delegate = delegateOrNull();
return (delegate != null) ? delegate.toArray() : Arrays.copyOf(requireElements(), size);
}
@CanIgnoreReturnValue
@Override
@SuppressWarnings("nullness") // b/192354773 in our checker affects toArray declarations
public <T extends @Nullable Object> T[] toArray(T[] a) {
if (needsAllocArrays()) {
if (a.length > 0) {
a[0] = null;
}
return a;
}
Set<E> delegate = delegateOrNull();
return (delegate != null)
? delegate.toArray(a)
: ObjectArrays.toArrayImpl(requireElements(), 0, size, a);
}
/**
* Ensures that this {@code CompactHashSet} has the smallest representation in memory, given its
* current size.
*/
public void trimToSize() {
if (needsAllocArrays()) {
return;
}
Set<E> delegate = delegateOrNull();
if (delegate != null) {
Set<E> newDelegate = createHashFloodingResistantDelegate(size());
newDelegate.addAll(delegate);
this.table = newDelegate;
return;
}
int size = this.size;
if (size < requireEntries().length) {
resizeEntries(size);
}
int minimumTableSize = CompactHashing.tableSize(size);
int mask = hashTableMask();
if (minimumTableSize < mask) { // smaller table size will always be less than current mask
resizeTable(mask, minimumTableSize, UNSET, UNSET);
}
}
@Override
public void clear() {
if (needsAllocArrays()) {
return;
}
incrementModCount();
Set<E> delegate = delegateOrNull();
if (delegate != null) {
metadata =
Ints.constrainToRange(size(), CompactHashing.DEFAULT_SIZE, CompactHashing.MAX_SIZE);
delegate.clear(); // invalidate any iterators left over!
table = null;
size = 0;
} else {
Arrays.fill(requireElements(), 0, size, null);
CompactHashing.tableClear(requireTable());
Arrays.fill(requireEntries(), 0, size, 0);
this.size = 0;
}
}
@J2ktIncompatible
private void writeObject(ObjectOutputStream stream) throws IOException {
stream.defaultWriteObject();
stream.writeInt(size());
for (E e : this) {
stream.writeObject(e);
}
}
@SuppressWarnings("unchecked")
@J2ktIncompatible
private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException {
stream.defaultReadObject();
int elementCount = stream.readInt();
if (elementCount < 0) {
throw new InvalidObjectException("Invalid size: " + elementCount);
}
init(elementCount);
for (int i = 0; i < elementCount; i++) {
E element = (E) stream.readObject();
add(element);
}
}
/*
* For discussion of the safety of the following methods, see the comments near the end of
* CompactHashMap.
*/
private Object requireTable() {
return requireNonNull(table);
}
private int[] requireEntries() {
return requireNonNull(entries);
}
private @Nullable Object[] requireElements() {
return requireNonNull(elements);
}
@SuppressWarnings("unchecked")
private E element(int i) {
return (E) requireElements()[i];
}
private int entry(int i) {
return requireEntries()[i];
}
private void setElement(int i, E value) {
requireElements()[i] = value;
}
private void setEntry(int i, int value) {
requireEntries()[i] = value;
}
}
| CompactHashSet |
java | spring-projects__spring-boot | module/spring-boot-graphql/src/main/java/org/springframework/boot/graphql/autoconfigure/GraphQlProperties.java | {
"start": 4548,
"end": 5016
} | class ____ {
/**
* Path to the GraphiQL UI endpoint.
*/
private String path = "/graphiql";
/**
* Whether the default GraphiQL UI is enabled.
*/
private boolean enabled;
public String getPath() {
return this.path;
}
public void setPath(String path) {
this.path = path;
}
public boolean isEnabled() {
return this.enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
}
public static | Graphiql |
java | apache__logging-log4j2 | log4j-api-test/src/test/java/org/apache/logging/log4j/util/CharsetForNameMain.java | {
"start": 884,
"end": 1484
} | class ____ {
/**
* Checks that the given Charset names can be loaded.
*/
public static void main(final String[] args) {
for (final String value : args) {
final String charsetName = value.trim();
if (Charset.isSupported(charsetName)) {
final Charset cs = Charset.forName(charsetName);
System.out.println(String.format("%s -> %s aliases: %s", charsetName, cs.name(), cs.aliases()));
} else {
System.err.println("Not supported:" + charsetName);
}
}
}
}
| CharsetForNameMain |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSourceOperator.java | {
"start": 890,
"end": 1082
} | class ____ extends LuceneSourceOperator {
private static final int MAX_TARGET_PAGE_SIZE = 2048;
private static final int CHUNK_SIZE = 128;
public static final | TimeSeriesSourceOperator |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/FloatFieldTest.java | {
"start": 132,
"end": 513
} | class ____ extends TestCase {
public void test_codec() throws Exception {
User user = new User();
user.setValue(1001F);
String text = JSON.toJSONString(user);
System.out.println(text);
User user1 = JSON.parseObject(text, User.class);
Assert.assertTrue(user1.getValue() == user.getValue());
}
public static | FloatFieldTest |
java | google__dagger | hilt-compiler/main/java/dagger/hilt/android/processor/internal/androidentrypoint/AndroidEntryPointMetadata.java | {
"start": 2721,
"end": 2799
} | class ____ @AndroidEntryPoint annotated classes. */
@AutoValue
public abstract | for |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/timer/TimerRestartTest.java | {
"start": 1032,
"end": 1783
} | class ____ extends ContextTestSupport {
@Test
public void testTimerRestart() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMinimumMessageCount(1);
assertMockEndpointsSatisfied();
mock.reset();
mock.expectedMessageCount(0);
context.stop();
mock.reset();
context.start();
mock.expectedMinimumMessageCount(1);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("timer://foo?fixedRate=true&delay=0&period=10").to("mock:result");
}
};
}
}
| TimerRestartTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/integer_/IntegerAssert_isNotEqualTo_int_Test.java | {
"start": 895,
"end": 1221
} | class ____ extends IntegerAssertBaseTest {
@Override
protected IntegerAssert invoke_api_method() {
return assertions.isNotEqualTo(8);
}
@Override
protected void verify_internal_effects() {
verify(integers).assertNotEqual(getInfo(assertions), getActual(assertions), 8);
}
}
| IntegerAssert_isNotEqualTo_int_Test |
java | apache__maven | compat/maven-embedder/src/main/java/org/apache/maven/cli/transfer/BatchModeMavenTransferListener.java | {
"start": 938,
"end": 1104
} | class ____ extends AbstractMavenTransferListener {
public BatchModeMavenTransferListener(PrintStream out) {
super(out);
}
}
| BatchModeMavenTransferListener |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_3668/Parent.java | {
"start": 608,
"end": 657
} | class ____ extends Parent<Child.ChildB> { }
}
| ParentB |
java | quarkusio__quarkus | integration-tests/reactive-messaging-pulsar/src/main/java/io/quarkus/it/pulsar/HelloServiceImpl.java | {
"start": 210,
"end": 474
} | class ____ extends MutinyHelloGrpc.HelloImplBase {
@Override
public Uni<HelloReply> sayHello(HelloRequest request) {
return Uni.createFrom().item(HelloReply.newBuilder().setMessage("Hello World, " + request.getName()).build());
}
}
| HelloServiceImpl |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/RangeSetAssert_doesNotContainAll_Test.java | {
"start": 1508,
"end": 3264
} | class ____ {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
RangeSet<Integer> actual = null;
// WHEN
var error = expectAssertionError(() -> assertThat(actual).doesNotContainAll(asList(1, 2)));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_if_values_is_null() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of();
Iterable<Integer> values = null;
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).doesNotContainAll(values));
// THEN
then(thrown).isInstanceOf(NullPointerException.class)
.hasMessage(shouldNotBeNull("values").create());
}
@Test
void should_fail_if_values_is_empty() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of();
Iterable<Integer> values = emptySet();
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).doesNotContainAll(values));
// THEN
assertThat(thrown).isInstanceOf(IllegalArgumentException.class)
.hasMessage("Expecting values not to be empty");
}
@Test
void should_fail_if_actual_contains_values() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of(closed(1, 10));
Iterable<Integer> values = List.of(0, 2, 3, 4);
// WHEN
var error = expectAssertionError(() -> assertThat(actual).doesNotContainAll(values));
// THEN
then(error).hasMessage(shouldNotContain(actual, values, List.of(2, 3, 4)).create());
}
@Test
void should_pass_if_actual_does_not_contain_values() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of(closed(0, 3));
// WHEN/THEN
assertThat(actual).doesNotContainAll(List.of(4, 5));
}
}
| RangeSetAssert_doesNotContainAll_Test |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/array/ScienceMapper.java | {
"start": 524,
"end": 2662
} | interface ____ {
ScienceMapper INSTANCE = Mappers.getMapper( ScienceMapper.class );
ScientistDto scientistToDto(Scientist scientist);
ScientistDto[] scientistsToDtos(Scientist[] scientists);
ScientistDto[] scientistsToDtos(List<Scientist> scientists);
List<ScientistDto> scientistsToDtosAsList(Scientist[] scientists);
ScientistDto[] scientistsToDtos(Scientist[] scientists, @MappingTarget ScientistDto[] target);
@IterableMapping(nullValueMappingStrategy = NullValueMappingStrategy.RETURN_DEFAULT)
boolean[] nvmMapping(boolean[] source);
@IterableMapping( nullValueMappingStrategy = NullValueMappingStrategy.RETURN_DEFAULT )
boolean[] nvmMapping(boolean[] source, @MappingTarget boolean[] target);
@IterableMapping( nullValueMappingStrategy = NullValueMappingStrategy.RETURN_DEFAULT )
short[] nvmMapping(int[] source, @MappingTarget short[] target);
@IterableMapping( nullValueMappingStrategy = NullValueMappingStrategy.RETURN_DEFAULT )
char[] nvmMapping(String[] source, @MappingTarget char[] target);
@IterableMapping( nullValueMappingStrategy = NullValueMappingStrategy.RETURN_DEFAULT )
int[] nvmMapping(int[] source, @MappingTarget int[] target);
@IterableMapping( nullValueMappingStrategy = NullValueMappingStrategy.RETURN_DEFAULT )
long[] nvmMapping(int[] source, @MappingTarget long[] target);
@IterableMapping( nullValueMappingStrategy = NullValueMappingStrategy.RETURN_DEFAULT )
float[] nvmMapping(int[] source, @MappingTarget float[] target);
@IterableMapping( nullValueMappingStrategy = NullValueMappingStrategy.RETURN_DEFAULT )
double[] nvmMapping(int[] source, @MappingTarget double[] target);
@IterableMapping( nullValueMappingStrategy = NullValueMappingStrategy.RETURN_DEFAULT )
String[] nvmMapping(int[] source, @MappingTarget String[] target);
void nvmMappingVoidReturnNull(int[] source, @MappingTarget long[] target);
@IterableMapping( nullValueMappingStrategy = NullValueMappingStrategy.RETURN_DEFAULT )
void nvmMappingVoidReturnDefault(int[] source, @MappingTarget long[] target);
}
| ScienceMapper |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/HeldLockAnalyzerTest.java | {
"start": 5932,
"end": 6747
} | class ____ {
final Lock mu = null;
final Lock lock = null;
@GuardedBy("lock")
int x;
void m() {
mu.lock();
// BUG: Diagnostic contains: []
x++;
try {
// BUG: Diagnostic contains:
// [(SELECT (THIS) mu)]
x++;
} finally {
mu.unlock();
}
// BUG: Diagnostic contains: []
x++;
}
}
""")
.doTest();
}
/** A customized {@link GuardedByChecker} that prints more test-friendly diagnostics. */
@BugPattern(name = "GuardedByLockSet", summary = "", explanation = "", severity = ERROR)
public static | Test |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/function/FailableTest.java | {
"start": 71913,
"end": 72484
} | interface ____ properly defined to throw any exception using the top level generic types
* Object and Throwable.
*/
@Test
void testThrows_FailableBiFunction_Object_Throwable() {
assertThrows(IOException.class, () -> new FailableBiFunction<Object, Object, Object, Throwable>() {
@Override
public Object apply(final Object input1, final Object input2) throws Throwable {
throw new IOException("test");
}
}.apply(new Object(), new Object()));
}
/**
* Tests that our failable | is |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/Boat.java | {
"start": 546,
"end": 1031
} | class ____ implements Serializable {
private Integer id;
private int size;
private int weight;
public Boat() {
super();
}
@Id
@GeneratedValue
public Integer getId() {
return id;
}
@Column(name = "boat_size")
public int getSize() {
return size;
}
public void setId(Integer integer) {
id = integer;
}
public void setSize(int i) {
size = i;
}
public int getWeight() {
return weight;
}
public void setWeight(int weight) {
this.weight = weight;
}
}
| Boat |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/authentication/Http403ForbiddenEntryPoint.java | {
"start": 1353,
"end": 1876
} | class ____'t actually responsible for the commencement of authentication,
* as it is in the case of other providers. It will be called if the user is rejected by
* the AbstractPreAuthenticatedProcessingFilter, resulting in a null authentication.
* <p>
* The <code>commence</code> method will always return an
* <code>HttpServletResponse.SC_FORBIDDEN</code> (403 error).
*
* @author Luke Taylor
* @author Ruud Senden
* @since 2.0
* @see org.springframework.security.web.access.ExceptionTranslationFilter
*/
public | isn |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/producer/illegal/ProducerFieldInInterceptorTest.java | {
"start": 1449,
"end": 1795
} | class ____ {
@AroundInvoke
public Object aroundInvoke(InvocationContext ic) throws Exception {
return ic.proceed();
}
// declaring a producer inside an interceptor should raise DefinitionException
@Produces
String val = "42";
}
@Dependent
@MyBinding
static | BadInterceptor |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/promql/PromqlVerifierTests.java | {
"start": 835,
"end": 5536
} | class ____ extends ESTestCase {
private final Analyzer tsdb = AnalyzerTestUtils.analyzer(AnalyzerTestUtils.tsdbIndexResolution());
@BeforeClass
public static void checkPromqlEnabled() {
assumeTrue("requires snapshot build with promql feature enabled", PromqlFeatures.isEnabled());
}
public void testPromqlMissingAcrossSeriesAggregation() {
assertThat(
error("""
TS test | PROMQL step 5m (
rate(network.bytes_in[5m])
)""", tsdb),
equalTo("2:3: only aggregations across timeseries are supported at this time (found [rate(network.bytes_in[5m])])")
);
}
public void testPromqlStepAndRangeMisaligned() {
assertThat(
error("""
TS test | PROMQL step 1m (
avg(rate(network.bytes_in[5m]))
)""", tsdb),
equalTo("2:29: the duration for range vector selector [5m] must be equal to the query's step for range queries at this time")
);
}
public void testPromqlIllegalNameLabelMatcher() {
assertThat(
error("TS test | PROMQL step 5m (avg({__name__=~\"*.foo.*\"}))", tsdb),
equalTo("1:31: regex label selectors on __name__ are not supported at this time [{__name__=~\"*.foo.*\"}]")
);
}
public void testPromqlSubquery() {
assertThat(
error("TS test | PROMQL step 5m (avg(rate(network.bytes_in[5m:])))", tsdb),
equalTo("1:36: subqueries are not supported at this time [network.bytes_in[5m:]]")
);
assertThat(
error("TS test | PROMQL step 5m (avg(rate(network.bytes_in[5m:1m])))", tsdb),
equalTo("1:36: subqueries are not supported at this time [network.bytes_in[5m:1m]]")
);
}
@AwaitsFix(
bugUrl = "Doesn't parse: line 1:27: Invalid query '1+1'[ArithmeticBinaryContext] given; "
+ "expected LogicalPlan but found VectorBinaryArithmetic"
)
public void testPromqlArithmetricOperators() {
assertThat(
error("TS test | PROMQL step 5m (1+1)", tsdb),
equalTo("1:27: arithmetic operators are not supported at this time [foo]")
);
assertThat(
error("TS test | PROMQL step 5m (foo+1)", tsdb),
equalTo("1:27: arithmetic operators are not supported at this time [foo]")
);
assertThat(
error("TS test | PROMQL step 5m (1+foo)", tsdb),
equalTo("1:27: arithmetic operators are not supported at this time [foo]")
);
assertThat(
error("TS test | PROMQL step 5m (foo+bar)", tsdb),
equalTo("1:27: arithmetic operators are not supported at this time [foo]")
);
}
@AwaitsFix(
bugUrl = "Doesn't parse: line 1:27: Invalid query 'method_code_http_errors_rate5m{code=\"500\"}'"
+ "[ValueExpressionContext] given; expected Expression but found InstantSelector"
)
public void testPromqlVectorMatching() {
assertThat(
error(
"TS test | PROMQL step 5m (method_code_http_errors_rate5m{code=\"500\"} / ignoring(code) method_http_requests_rate5m)",
tsdb
),
equalTo("")
);
assertThat(
error(
"TS test | PROMQL step 5m (method_code_http_errors_rate5m / ignoring(code) group_left method_http_requests_rate5m)",
tsdb
),
equalTo("")
);
}
public void testPromqlModifier() {
assertThat(
error("TS test | PROMQL step 5m (avg(rate(network.bytes_in[5m] offset 5m)))", tsdb),
equalTo("1:36: offset modifiers are not supported at this time [network.bytes_in[5m] offset 5m]")
);
/* TODO
assertThat(
error("TS test | PROMQL step 5m (foo @ start())", tsdb),
equalTo("1:27: @ modifiers are not supported at this time [foo @ start()]")
);*/
}
@AwaitsFix(
bugUrl = "Doesn't parse: line 1:27: Invalid query 'foo and bar'[LogicalBinaryContext] given; "
+ "expected Expression but found InstantSelector"
)
public void testLogicalSetBinaryOperators() {
assertThat(error("TS test | PROMQL step 5m (foo and bar)", tsdb), equalTo(""));
assertThat(error("TS test | PROMQL step 5m (foo or bar)", tsdb), equalTo(""));
assertThat(error("TS test | PROMQL step 5m (foo unless bar)", tsdb), equalTo(""));
}
@Override
protected List<String> filteredWarnings() {
return withDefaultLimitWarning(super.filteredWarnings());
}
}
| PromqlVerifierTests |
java | apache__flink | flink-table/flink-table-code-splitter/src/test/resources/member/expected/TestRewriteGenericType.java | {
"start": 7,
"end": 499
} | class ____ {
java.util.List<Integer>[] rewrite$0 = new java.util.List[1];
java.util.List<String>[] rewrite$1 = new java.util.List[2];
{
rewrite$1[0] = new java.util.ArrayList<>();
rewrite$0[0] = new java.util.ArrayList<>();
rewrite$1[1] = new java.util.ArrayList<>();
}
public String myFun() {
String aa = rewrite$1[0].get(0);
long bb = rewrite$0[0].get(1);
String cc = rewrite$1[1].get(2);
return cc + bb + aa;
}
}
| TestRewriteGenericType |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/serialization/entity/BuildRecordId.java | {
"start": 262,
"end": 804
} | class ____ implements Serializable {
private long id;
public BuildRecordId() {
}
public BuildRecordId(long id) {
this.id = id;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
BuildRecordId longId = (BuildRecordId) o;
return id == longId.id;
}
@Override
public int hashCode() {
return Objects.hash( id );
}
}
| BuildRecordId |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/StringParam.java | {
"start": 921,
"end": 1247
} | class ____ extends Param<String, StringParam.Domain> {
StringParam(final Domain domain, String str) {
super(domain, domain.parse(str));
}
/** @return the parameter value as a string */
@Override
public String getValueString() {
return value;
}
/** The domain of the parameter. */
static final | StringParam |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/bugs/injection/InjectMocksShouldTryPropertySettersFirstBeforeFieldAccessTest.java | {
"start": 586,
"end": 1066
} | class ____ {
@Mock List<?> fieldAccess;
@Mock List<?> propertySetterAccess;
@InjectMocks BeanAwaitingInjection awaitingInjection;
@Test
public void shouldInjectUsingPropertySetterIfAvailable() {
assertTrue(awaitingInjection.propertySetterUsed);
}
@Test
public void shouldInjectFieldIfNoSetter() {
assertEquals(fieldAccess, awaitingInjection.fieldAccess);
}
static | InjectMocksShouldTryPropertySettersFirstBeforeFieldAccessTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/select/OracleSelectTest38.java | {
"start": 1017,
"end": 9350
} | class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = //
"select * from " + "(with vw_kreis_statics_t as"
+ " (select substr(xzqh,1,6) xzqh,swrslx,sum(swrs_count) acd_totle from"
+ " (select xzqh,sglx,case when (swrs7 < 3) then '1'"
+ " when (swrs7 < 5) then '2' when (swrs7 <= 9) then '3' else '4' end swrslx,1 swrs_count"
+ " from acduser.vw_acd_info where sglx='1' " + " "
+ " and sgfssj >= ?" + " " + " "
+ " )" + " group by substr(xzqh,1,6),swrslx)" + ""
+ " select e.\"XZQH\",e.\"LESS3\",e.\"F3TO5\",e.\"F5TO9\",e.\"MORE9\",kreis_code, kreis_name,px1,py1,px2,py2 from"
+ " ( select" + " xzqh," + " nvl(max(decode(swrslx,'1',acd_totle)),0) less3,"
+ " nvl(max(decode(swrslx,'2',acd_totle)),0) f3to5,"
+ " nvl(max(decode(swrslx,'3',acd_totle)),0) f5to9,"
+ " nvl(max(decode(swrslx,'4',acd_totle)),0) more9"
+ " from( select * from acduser.vw_kreis_statics_t) group by xzqh " + " ) e" + ""
+ " left join" + " acduser.vw_sc_kreis_code_lv2 f on e.xzqh = f.short_kreis_code) "
+ " where kreis_code in" + "(select * from "
+ " (select tbek_code from acduser.vw_kreis_code start with tbek_code = ? connect by prior tbek_pk=tbek_parent ) "
+ "where tbek_code != ?)";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
// System.out.println(stmt.toString());
{
String result = SQLUtils.toOracleString(stmt);
assertEquals("SELECT *\n" +
"FROM (\n" +
"\tWITH vw_kreis_statics_t AS (\n" +
"\t\t\tSELECT substr(xzqh, 1, 6) AS xzqh, swrslx\n" +
"\t\t\t\t, sum(swrs_count) AS acd_totle\n" +
"\t\t\tFROM (\n" +
"\t\t\t\tSELECT xzqh, sglx\n" +
"\t\t\t\t\t, CASE\n" +
"\t\t\t\t\t\tWHEN (swrs7 < 3) THEN '1'\n" +
"\t\t\t\t\t\tWHEN (swrs7 < 5) THEN '2'\n" +
"\t\t\t\t\t\tWHEN (swrs7 <= 9) THEN '3'\n" +
"\t\t\t\t\t\tELSE '4'\n" +
"\t\t\t\t\tEND AS swrslx, 1 AS swrs_count\n" +
"\t\t\t\tFROM acduser.vw_acd_info\n" +
"\t\t\t\tWHERE sglx = '1'\n" +
"\t\t\t\t\tAND sgfssj >= ?\n" +
"\t\t\t)\n" +
"\t\t\tGROUP BY substr(xzqh, 1, 6), swrslx\n" +
"\t\t)\n" +
"\tSELECT e.\"XZQH\", e.\"LESS3\", e.\"F3TO5\", e.\"F5TO9\", e.\"MORE9\"\n" +
"\t\t, kreis_code, kreis_name, px1, py1, px2\n" +
"\t\t, py2\n" +
"\tFROM (\n" +
"\t\tSELECT xzqh\n" +
"\t\t\t, nvl(max(decode(swrslx, '1', acd_totle)), 0) AS less3\n" +
"\t\t\t, nvl(max(decode(swrslx, '2', acd_totle)), 0) AS f3to5\n" +
"\t\t\t, nvl(max(decode(swrslx, '3', acd_totle)), 0) AS f5to9\n" +
"\t\t\t, nvl(max(decode(swrslx, '4', acd_totle)), 0) AS more9\n" +
"\t\tFROM (\n" +
"\t\t\tSELECT *\n" +
"\t\t\tFROM acduser.vw_kreis_statics_t\n" +
"\t\t)\n" +
"\t\tGROUP BY xzqh\n" +
"\t) e\n" +
"\t\tLEFT JOIN acduser.vw_sc_kreis_code_lv2 f ON e.xzqh = f.short_kreis_code \n" +
")\n" +
"WHERE kreis_code IN (\n" +
"\tSELECT *\n" +
"\tFROM (\n" +
"\t\tSELECT tbek_code\n" +
"\t\tFROM acduser.vw_kreis_code\n" +
"\t\tSTART WITH tbek_code = ?\n" +
"\t\tCONNECT BY PRIOR tbek_pk = tbek_parent\n" +
"\t)\n" +
"\tWHERE tbek_code != ?\n" +
")", result);
}
{
String result = SQLUtils.toOracleString(stmt, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION);
assertEquals("select *\n" +
"from (\n" +
"\twith vw_kreis_statics_t as (\n" +
"\t\t\tselect substr(xzqh, 1, 6) as xzqh, swrslx\n" +
"\t\t\t\t, sum(swrs_count) as acd_totle\n" +
"\t\t\tfrom (\n" +
"\t\t\t\tselect xzqh, sglx\n" +
"\t\t\t\t\t, case\n" +
"\t\t\t\t\t\twhen (swrs7 < 3) then '1'\n" +
"\t\t\t\t\t\twhen (swrs7 < 5) then '2'\n" +
"\t\t\t\t\t\twhen (swrs7 <= 9) then '3'\n" +
"\t\t\t\t\t\telse '4'\n" +
"\t\t\t\t\tend as swrslx, 1 as swrs_count\n" +
"\t\t\t\tfrom acduser.vw_acd_info\n" +
"\t\t\t\twhere sglx = '1'\n" +
"\t\t\t\t\tand sgfssj >= ?\n" +
"\t\t\t)\n" +
"\t\t\tgroup by substr(xzqh, 1, 6), swrslx\n" +
"\t\t)\n" +
"\tselect e.\"XZQH\", e.\"LESS3\", e.\"F3TO5\", e.\"F5TO9\", e.\"MORE9\"\n" +
"\t\t, kreis_code, kreis_name, px1, py1, px2\n" +
"\t\t, py2\n" +
"\tfrom (\n" +
"\t\tselect xzqh\n" +
"\t\t\t, nvl(max(decode(swrslx, '1', acd_totle)), 0) as less3\n" +
"\t\t\t, nvl(max(decode(swrslx, '2', acd_totle)), 0) as f3to5\n" +
"\t\t\t, nvl(max(decode(swrslx, '3', acd_totle)), 0) as f5to9\n" +
"\t\t\t, nvl(max(decode(swrslx, '4', acd_totle)), 0) as more9\n" +
"\t\tfrom (\n" +
"\t\t\tselect *\n" +
"\t\t\tfrom acduser.vw_kreis_statics_t\n" +
"\t\t)\n" +
"\t\tgroup by xzqh\n" +
"\t) e\n" +
"\t\tleft join acduser.vw_sc_kreis_code_lv2 f on e.xzqh = f.short_kreis_code \n" +
")\n" +
"where kreis_code in (\n" +
"\tselect *\n" +
"\tfrom (\n" +
"\t\tselect tbek_code\n" +
"\t\tfrom acduser.vw_kreis_code\n" +
"\t\tstart with tbek_code = ?\n" +
"\t\tconnect by prior tbek_pk = tbek_parent\n" +
"\t)\n" +
"\twhere tbek_code != ?\n" +
")", result);
}
assertEquals(1, statementList.size());
System.out.println(stmt);
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(4, visitor.getTables().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("acduser.vw_acd_info")));
assertEquals(18, visitor.getColumns().size());
assertTrue(visitor.getColumns().contains(new TableStat.Column("acduser.vw_acd_info", "xzqh")));
assertTrue(visitor.getColumns().contains(new TableStat.Column("acduser.vw_acd_info", "sglx")));
assertTrue(visitor.getColumns().contains(new TableStat.Column("acduser.vw_sc_kreis_code_lv2", "kreis_code")));
// assertTrue(visitor.getOrderByColumns().contains(new TableStat.Column("employees", "last_name")));
}
}
| OracleSelectTest38 |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/common/logger/FluentLogger.java | {
"start": 887,
"end": 3136
} | interface ____ {
FluentLogger cause(String cause);
FluentLogger more(String extendedInformation);
FluentLogger msg(String msg);
FluentLogger msg(String msg, Object... args);
FluentLogger msg(Supplier<String> supplier);
void trace();
void trace(Throwable t);
void trace(String msg);
void trace(String msg, Object... args);
void trace(String msg, Throwable t);
void debug();
void debug(Throwable t);
void debug(String msg);
void debug(String msg, Object... args);
void debug(String msg, Throwable t);
void info();
void info(Throwable t);
void info(String msg, Object... args);
void info(String msg);
void info(String msg, Throwable t);
void internalWarn();
void internalWarn(Throwable t);
void internalWarn(String msg);
void internalWarn(String msg, Object... args);
void internalWarn(String msg, Throwable t);
void warn(String code);
void warn(String code, Throwable t);
void warn(String code, String msg, Object... args);
void warn(String code, String msg, Throwable t);
void internalError();
void internalError(Throwable t);
void internalError(String msg);
void internalError(String msg, Object... args);
void internalError(String msg, Throwable t);
void error(String code);
void error(String code, Throwable t);
void error(String code, String msg, Object... args);
void error(String code, String msg, Throwable t);
void log(Level level);
void log(Level level, Throwable t);
void log(Level level, String msg);
void log(Level level, String msg, Object... args);
void log(Level level, String msg, Throwable t);
void log(String code, Level level);
void log(String code, Level level, String msg, Object... args);
void log(String code, Level level, String msg, Throwable t);
boolean isTraceEnabled();
boolean isDebugEnabled();
boolean isInfoEnabled();
boolean isWarnEnabled();
boolean isErrorEnabled();
static FluentLogger of(Class<?> key) {
return new FluentLoggerImpl(key);
}
static FluentLogger of(String key) {
return new FluentLoggerImpl(key);
}
| FluentLogger |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/operators/testutils/UniformIntPairGenerator.java | {
"start": 989,
"end": 2111
} | class ____ implements MutableObjectIterator<IntPair> {
final int numKeys;
final int numVals;
int keyCnt = 0;
int valCnt = 0;
boolean repeatKey;
public UniformIntPairGenerator(int numKeys, int numVals, boolean repeatKey) {
this.numKeys = numKeys;
this.numVals = numVals;
this.repeatKey = repeatKey;
}
@Override
public IntPair next(IntPair target) {
if (!repeatKey) {
if (valCnt >= numVals) {
return null;
}
target.setKey(keyCnt++);
target.setValue(valCnt);
if (keyCnt == numKeys) {
keyCnt = 0;
valCnt++;
}
} else {
if (keyCnt >= numKeys) {
return null;
}
target.setKey(keyCnt);
target.setValue(valCnt++);
if (valCnt == numVals) {
valCnt = 0;
keyCnt++;
}
}
return target;
}
@Override
public IntPair next() {
return next(new IntPair());
}
}
| UniformIntPairGenerator |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/runtime/operators/lifecycle/graph/MultiInputTestOperatorFactory.java | {
"start": 1213,
"end": 2370
} | class ____ implements StreamOperatorFactory<TestDataElement> {
private final int numInputs;
private final TestEventQueue eventQueue;
private ChainingStrategy strategy;
private final String operatorId;
public MultiInputTestOperatorFactory(
int numInputs, TestEventQueue eventQueue, String operatorId) {
this.numInputs = numInputs;
this.eventQueue = eventQueue;
this.operatorId = operatorId;
}
@Override
@SuppressWarnings("unchecked")
public <T extends StreamOperator<TestDataElement>> T createStreamOperator(
StreamOperatorParameters<TestDataElement> parameters) {
return (T) new MultiInputTestOperator(numInputs, parameters, eventQueue, operatorId);
}
@Override
public void setChainingStrategy(ChainingStrategy strategy) {
this.strategy = strategy;
}
@Override
public ChainingStrategy getChainingStrategy() {
return strategy;
}
@Override
public Class<? extends StreamOperator<?>> getStreamOperatorClass(ClassLoader classLoader) {
return MultiInputTestOperator.class;
}
}
| MultiInputTestOperatorFactory |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/internal/reader/AuditReaderImpl.java | {
"start": 1584,
"end": 5339
} | class ____ implements AuditReaderImplementor {
private final EnversService enversService;
private final SessionImplementor sessionImplementor;
private final Session session;
private final FirstLevelCache firstLevelCache;
private final CrossTypeRevisionChangesReader crossTypeRevisionChangesReader;
public AuditReaderImpl(
EnversService enversService,
Session session,
SessionImplementor sessionImplementor) {
this.enversService = enversService;
this.sessionImplementor = sessionImplementor;
this.session = session;
firstLevelCache = new FirstLevelCache();
crossTypeRevisionChangesReader = new CrossTypeRevisionChangesReaderImpl( this, enversService );
}
private void checkSession() {
if ( !session.isOpen() ) {
throw new IllegalStateException( "The associated entity manager is closed!" );
}
}
@Override
public SessionImplementor getSessionImplementor() {
return sessionImplementor;
}
@Override
public Session getSession() {
return session;
}
@Override
public FirstLevelCache getFirstLevelCache() {
return firstLevelCache;
}
@Override
public <T> T find(Class<T> cls, Object primaryKey, Number revision) throws
IllegalArgumentException, NotAuditedException, IllegalStateException {
cls = getTargetClassIfProxied( cls );
return this.find( cls, cls.getName(), primaryKey, revision );
}
@Override
public <T> T find(Class<T> cls, String entityName, Object primaryKey, Number revision)
throws IllegalArgumentException, NotAuditedException, IllegalStateException {
return this.find( cls, entityName, primaryKey, revision, false );
}
@Override
@SuppressWarnings("unchecked")
public <T> T find(
Class<T> cls,
String entityName,
Object primaryKey,
Number revision,
boolean includeDeletions) throws IllegalArgumentException, NotAuditedException, IllegalStateException {
cls = getTargetClassIfProxied( cls );
checkNotNull( cls, "Entity class" );
checkNotNull( entityName, "Entity name" );
checkNotNull( primaryKey, "Primary key" );
checkNotNull( revision, "Entity revision" );
checkPositive( revision, "Entity revision" );
checkSession();
if ( firstLevelCache.contains( entityName, revision, primaryKey ) ) {
return (T) firstLevelCache.get( entityName, revision, primaryKey );
}
Object result;
try {
// The result is put into the cache by the entity instantiator called from the query
result = createQuery().forEntitiesAtRevision( cls, entityName, revision, includeDeletions )
.add( AuditEntity.id().eq( primaryKey ) ).getSingleResult();
}
catch (NoResultException e) {
result = null;
}
catch (NonUniqueResultException e) {
throw new AuditException( e );
}
return (T) result;
}
@Override
public List<Number> getRevisions(Class<?> cls, Object primaryKey)
throws IllegalArgumentException, NotAuditedException, IllegalStateException {
cls = getTargetClassIfProxied( cls );
return this.getRevisions( cls, cls.getName(), primaryKey );
}
@Override
public <T> T find(Class<T> cls, Object primaryKey, Date date)
throws IllegalArgumentException, NotAuditedException, RevisionDoesNotExistException, IllegalStateException {
return find( cls, primaryKey, getRevisionNumberForDate( date ) );
}
@Override
public <T> T find(Class<T> cls, Object primaryKey, LocalDateTime datetime)
throws IllegalArgumentException, NotAuditedException, RevisionDoesNotExistException, IllegalStateException {
return find( cls, primaryKey, getRevisionNumberForDate( datetime ) );
}
@Override
@SuppressWarnings("unchecked")
public List<Number> getRevisions(Class<?> cls, String entityName, Object primaryKey)
throws IllegalArgumentException, NotAuditedException, IllegalStateException {
// todo: if a | AuditReaderImpl |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/test/java/org/apache/camel/component/salesforce/api/dto/composite/SObjectCompositeResponseTest.java | {
"start": 1243,
"end": 7458
} | class ____ {
@Test
public void shouldDeserializeFailedJsonResponse() throws IOException {
final String json = IOUtils.toString(
this.getClass().getResourceAsStream(
"/org/apache/camel/component/salesforce/api/dto/composite_response_example_failure.json"),
StandardCharsets.UTF_8);
final ObjectMapper mapper = JsonUtils.createObjectMapper();
final SObjectCompositeResponse response = mapper.readerFor(SObjectCompositeResponse.class).readValue(json);
assertFailedResponse(response);
}
@Test
public void shouldDeserializeSuccessfulJsonResponse() throws IOException {
final String json = IOUtils.toString(
this.getClass().getResourceAsStream(
"/org/apache/camel/component/salesforce/api/dto/composite_response_example_success.json"),
StandardCharsets.UTF_8);
final ObjectMapper mapper = JsonUtils.createObjectMapper();
final SObjectCompositeResponse response = mapper.readerFor(SObjectCompositeResponse.class).readValue(json);
assertSuccessfulResponse(response);
}
static void assertFailedResponse(final SObjectCompositeResponse response) {
final List<SObjectCompositeResult> compositeResponse = response.getCompositeResponse();
final List<SObjectCompositeResult> results = compositeResponse;
assertThat(results).as("It should contain 2 results").hasSize(2);
// upsert
final SObjectCompositeResult upsertResponse = compositeResponse.get(0);
assertThat(upsertResponse.getReferenceId()).as("ReferenceId of first operation should be NewPayment1")
.isEqualTo("NewPayment1");
assertThat(upsertResponse.getHttpStatusCode()).as("httpStatusCode of first operation should be 400").isEqualTo(400);
assertThat(upsertResponse.getBody()).isInstanceOf(List.class);
@SuppressWarnings("unchecked")
final List<Map<String, Object>> upsertBody = (List<Map<String, Object>>) upsertResponse.getBody();
assertThat(upsertBody).hasSize(1);
final Map<String, Object> upsertBodyContent = upsertBody.get(0);
assertThat(upsertBodyContent).as("message of the create operation should be populated properly")
.containsEntry("message",
"The transaction was rolled back since another operation in the same transaction failed.");
assertThat(upsertBodyContent).as("errorCode of the create operation should be PROCESSING_HALTED")
.containsEntry("errorCode", "PROCESSING_HALTED");
// create
final SObjectCompositeResult createResponse = compositeResponse.get(1);
assertThat(createResponse.getReferenceId()).as("ReferenceId of first operation should be NewPayment2")
.isEqualTo("NewPayment2");
assertThat(createResponse.getHttpStatusCode()).as("httpStatusCode of first operation should be 400").isEqualTo(400);
@SuppressWarnings("unchecked")
final List<Map<String, Object>> createBody = (List<Map<String, Object>>) createResponse.getBody();
assertThat(createBody).hasSize(1);
final Map<String, Object> createBodyContent = createBody.get(0);
assertThat(createBodyContent).as("message of the create operation should be populated properly")
.containsEntry("message",
"Foreign key external ID: 0116 not found for field Invoice_External_Id__c in entity blng__Invoice__c");
assertThat(createBodyContent).as("errorCode of the create operation should be INVALID_FIELD").containsEntry("errorCode",
"INVALID_FIELD");
}
static void assertSuccessfulResponse(final SObjectCompositeResponse response) {
final List<SObjectCompositeResult> compositeResponse = response.getCompositeResponse();
final List<SObjectCompositeResult> results = compositeResponse;
assertThat(results).as("It should contain 2 results").hasSize(2);
// create 1
final SObjectCompositeResult firstResponse = compositeResponse.get(0);
assertThat(firstResponse.getHttpHeaders()).as("Location of the create resource should be populated")
.containsEntry("Location", "/services/data/v41.0/sobjects/blng__Payment__c/a1V3E000000EXomUAM");
assertThat(firstResponse.getHttpStatusCode()).as("httpStatusCode of the create operation should be 201").isEqualTo(201);
assertThat(firstResponse.getReferenceId()).as("ReferenceId of the create operation should be NewPayment1")
.isEqualTo("NewPayment1");
@SuppressWarnings("unchecked")
final Map<String, Object> firstResponseMap = (Map<String, Object>) firstResponse.getBody();
assertThat(firstResponseMap).as("id of the create operation should be a1V3E000000EXomUAM").containsEntry("id",
"a1V3E000000EXomUAM");
assertThat(firstResponseMap).as("success of the create operation should be true").containsEntry("success",
Boolean.TRUE);
// create 2
final SObjectCompositeResult secondResponse = compositeResponse.get(1);
assertThat(secondResponse.getHttpHeaders()).as("Location of the create resource should be populated")
.containsEntry("Location", "/services/data/v41.0/sobjects/blng__Payment__c/a1V3E000000EXomUAG");
assertThat(secondResponse.getHttpStatusCode()).as("httpStatusCode of the create operation should be 201")
.isEqualTo(201);
assertThat(secondResponse.getReferenceId()).as("ReferenceId of the create operation should be NewPayment2")
.isEqualTo("NewPayment2");
@SuppressWarnings("unchecked")
final Map<String, Object> secondResponseMap = (Map<String, Object>) secondResponse.getBody();
assertThat(secondResponseMap).as("id of the create operation should be a1V3E000000EXomUAG").containsEntry("id",
"a1V3E000000EXomUAG");
assertThat(secondResponseMap).as("success of the create operation should be true").containsEntry("success",
Boolean.TRUE);
}
}
| SObjectCompositeResponseTest |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java | {
"start": 20394,
"end": 21988
} | class ____ when calling a static interface
// method since java 8 did not check, but java 9 and 10 do
if (painlessMethod.javaMethod().getDeclaringClass().isInterface()) {
visitMethodInsn(
Opcodes.INVOKESTATIC,
type.getInternalName(),
painlessMethod.javaMethod().getName(),
method.getDescriptor(),
true
);
} else {
invokeStatic(type, method);
}
} else if (painlessMethod.javaMethod().getDeclaringClass().isInterface()) {
invokeInterface(type, method);
} else {
invokeVirtual(type, method);
}
}
public void invokeLambdaCall(FunctionRef functionRef) {
Object[] args = new Object[7 + functionRef.delegateInjections.length];
args[0] = Type.getMethodType(functionRef.interfaceMethodType.toMethodDescriptorString());
args[1] = functionRef.delegateClassName;
args[2] = functionRef.delegateInvokeType;
args[3] = functionRef.delegateMethodName;
args[4] = Type.getMethodType(functionRef.delegateMethodType.toMethodDescriptorString());
args[5] = functionRef.isDelegateInterface ? 1 : 0;
args[6] = functionRef.isDelegateAugmented ? 1 : 0;
System.arraycopy(functionRef.delegateInjections, 0, args, 7, functionRef.delegateInjections.length);
invokeDynamic(functionRef.interfaceMethodName, functionRef.getFactoryMethodDescriptor(), LAMBDA_BOOTSTRAP_HANDLE, args);
}
}
| constant |
java | spring-projects__spring-framework | spring-websocket/src/test/java/org/springframework/web/socket/server/standard/SpringConfiguratorTests.java | {
"start": 1405,
"end": 2956
} | class ____ {
private MockServletContext servletContext;
private ContextLoader contextLoader;
private AnnotationConfigWebApplicationContext webAppContext;
private SpringConfigurator configurator;
@BeforeEach
void setup() {
this.servletContext = new MockServletContext();
this.webAppContext = new AnnotationConfigWebApplicationContext();
this.webAppContext.register(Config.class);
this.contextLoader = new ContextLoader(this.webAppContext);
this.contextLoader.initWebApplicationContext(this.servletContext);
this.configurator = new SpringConfigurator();
}
@AfterEach
void destroy() {
this.contextLoader.closeWebApplicationContext(this.servletContext);
}
@Test
void getEndpointPerConnection() throws Exception {
PerConnectionEchoEndpoint endpoint = this.configurator.getEndpointInstance(PerConnectionEchoEndpoint.class);
assertThat(endpoint).isNotNull();
}
@Test
void getEndpointSingletonByType() throws Exception {
EchoEndpoint expected = this.webAppContext.getBean(EchoEndpoint.class);
EchoEndpoint actual = this.configurator.getEndpointInstance(EchoEndpoint.class);
assertThat(actual).isSameAs(expected);
}
@Test
void getEndpointSingletonByComponentName() throws Exception {
ComponentEchoEndpoint expected = this.webAppContext.getBean(ComponentEchoEndpoint.class);
ComponentEchoEndpoint actual = this.configurator.getEndpointInstance(ComponentEchoEndpoint.class);
assertThat(actual).isSameAs(expected);
}
@Configuration
@Import(ComponentEchoEndpoint.class)
static | SpringConfiguratorTests |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_3104/Issue3104Mapper.java | {
"start": 838,
"end": 1274
} | class ____ {
private List<Child> children = Collections.emptyList();
public List<Child> getChildren() {
return children;
}
public void setChildren(List<Child> children) {
if ( children == null ) {
throw new IllegalArgumentException( "children is null" );
}
this.children = Collections.unmodifiableList( children );
}
}
| Target |
java | quarkusio__quarkus | independent-projects/qute/core/src/main/java/io/quarkus/qute/SectionHelperFactory.java | {
"start": 7151,
"end": 10611
} | interface ____ extends ParserDelegate {
/**
*
* @return the section name
*/
String getName();
/**
*
* @return the parameters of the main block
* @see SectionBlock#parameters
*/
default public Map<String, String> getParameters() {
return getBlocks().get(0).parameters;
}
/**
*
* @return {@code true} if the main block declares a parameter of the given name
*/
default public boolean hasParameter(String name) {
return getParameters().containsKey(name);
}
/**
*
* @return the parameter, or null/{@link Parameter.EMPTY} if the main block does not declare a parameter of the given
* name
*/
default public String getParameter(String name) {
return getParameters().get(name);
}
/**
*
* @return the parameter for the specified position
* @see SectionBlock#getParameter(int)
*/
default public String getParameter(int position) {
return getBlocks().get(0).getParameter(position);
}
/**
*
* @param name
* @param defaultValue
* @return the param or the default value if not specified
*/
default public String getParameterOrDefault(String name, String defaultValue) {
String param = getParameter(name);
return param == null || Parameter.EMPTY.equals(param) ? defaultValue : param;
}
/**
* Note that the expression must be registered in the {@link SectionHelperFactory#initializeBlock(Scope, BlockInfo)}
* first.
*
* @param parameterName
* @return the expression registered for the main block under the specified param name, or {@code null}
* @see BlockInfo#addExpression(String, String)
*/
public Expression getExpression(String parameterName);
/**
* Parse the specified value. The expression is not registered in the template.
*
* @param value
* @return a new expression
*/
public Expression parseValue(String value);
public List<SectionBlock> getBlocks();
/**
*
* @param label
* @return the first block with the given label, or {code null} if no such exists
*/
default SectionBlock getBlock(String label) {
for (SectionBlock block : getBlocks()) {
if (label.equals(block.label)) {
return block;
}
}
return null;
}
/**
*
* @return the engine
*/
public Engine getEngine();
/**
*
* @return the origin of the section start tag
*/
public default Origin getOrigin() {
return getBlocks().get(0).origin;
}
/**
* Note that the returned supplier may only be used after the template is parsed, e.g. during the invocation of
* {@link SectionHelper#resolve(io.quarkus.qute.SectionHelper.SectionResolutionContext)}.
*
* @return the current template
*/
Supplier<Template> getCurrentTemplate();
}
/**
*
* @see Parameter
*/
public static final | SectionInitContext |
java | jhy__jsoup | src/test/java/org/jsoup/integration/TestServer.java | {
"start": 1524,
"end": 5357
} | class ____ {
static int Port;
static int TlsPort;
private static final String Localhost = "localhost";
private static final String KeystorePassword = "hunter2";
private static final Server Jetty = newServer();
private static final ServletHandler JettyHandler = new ServletHandler();
private static final Server Proxy = newServer();
private static final Server AuthedProxy = newServer();
private static final HandlerWrapper ProxyHandler = new HandlerWrapper();
private static final HandlerWrapper AuthedProxyHandler = new HandlerWrapper();
private static final ProxySettings ProxySettings = new ProxySettings();
private static Server newServer() {
// logs to stdout, so not highlighted as errors in Maven test runs
StdErrLog logger = new StdErrLog();
logger.setStdErrStream(System.out);
Log.setLog(logger);
return new Server(new InetSocketAddress(Localhost, 0));
}
static {
Jetty.setHandler(JettyHandler);
Proxy.setHandler(ProxyHandler);
AuthedProxy.setHandler(AuthedProxyHandler);
// TLS setup:
try {
File keystoreFile = ParseTest.getFile("/local-cert/server.pfx");
if (!keystoreFile.exists()) throw new FileNotFoundException(keystoreFile.toString());
addHttpsConnector(keystoreFile, Jetty);
setupDefaultTrust(keystoreFile);
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
private TestServer() {
}
public static void start() {
synchronized (Jetty) {
if (Jetty.isStarted()) return;
try {
Jetty.start();
JettyHandler.addFilterWithMapping(new FilterHolder(new AuthFilter(false, false)), "/*", FilterMapping.ALL);
Connector[] jcons = Jetty.getConnectors();
Port = ((ServerConnector) jcons[0]).getLocalPort();
TlsPort = ((ServerConnector) jcons[1]).getLocalPort();
ProxyHandler.setHandler(ProxyServlet.createHandler(false)); // includes proxy, CONNECT proxy, and Auth filters
Proxy.start();
ProxySettings.port = ((ServerConnector) Proxy.getConnectors()[0]).getLocalPort();
AuthedProxyHandler.setHandler(ProxyServlet.createHandler(true));
AuthedProxy.start();
ProxySettings.authedPort = ((ServerConnector) AuthedProxy.getConnectors()[0]).getLocalPort();
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
}
/**
Close any current connections to the authed proxy. Tunneled connections only authenticate in their first
CONNECT, and may be kept alive and reused. So when we want to test unauthed - authed flows, we need to disconnect
them first.
*/
static int closeAuthedProxyConnections() {
ServerConnector connector = (ServerConnector) AuthedProxy.getConnectors()[0];
AtomicInteger count = new AtomicInteger();
connector.getConnectedEndPoints().forEach(endPoint -> {
endPoint.close();
count.getAndIncrement();
});
return count.get();
}
public static ServletUrls map(Class<? extends BaseServlet> servletClass) {
synchronized (Jetty) {
if (!Jetty.isStarted())
start(); // if running out of the test cases
String path = "/" + servletClass.getSimpleName();
JettyHandler.addServletWithMapping(servletClass, path + "/*");
String url = "http://" + Localhost + ":" + Port + path;
String tlsUrl = "https://" + Localhost + ":" + TlsPort + path;
return new ServletUrls(url, tlsUrl);
}
}
public static | TestServer |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng3710PollutedClonedPluginsTest.java | {
"start": 1300,
"end": 3381
} | class ____ extends AbstractMavenIntegrationTestCase {
@Test
public void testitMNG3710POMInheritance() throws Exception {
File testDir = extractResources("/mng-3710/pom-inheritance");
File pluginDir = new File(testDir, "maven-mng3710-pomInheritance-plugin");
File projectsDir = new File(testDir, "projects");
Verifier verifier;
verifier = newVerifier(pluginDir.getAbsolutePath());
verifier.addCliArgument("install");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier = newVerifier(projectsDir.getAbsolutePath());
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
File topLevelTouchFile = new File(projectsDir, "target/touch.txt");
assertFalse(topLevelTouchFile.exists(), "Top-level touch file should NOT be created in projects tree.");
File midLevelTouchFile = new File(projectsDir, "middle/target/touch.txt");
assertTrue(midLevelTouchFile.exists(), "Mid-level touch file should have been created in projects tree.");
File childLevelTouchFile = new File(projectsDir, "middle/child/target/touch.txt");
assertTrue(childLevelTouchFile.exists(), "Child-level touch file should have been created in projects tree.");
}
@Test
public void testitMNG3710OriginalModel() throws Exception {
File testDir = extractResources("/mng-3710/original-model");
File pluginsDir = new File(testDir, "plugins");
File projectDir = new File(testDir, "project");
Verifier verifier;
verifier = newVerifier(pluginsDir.getAbsolutePath());
verifier.addCliArgument("install");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier = newVerifier(projectDir.getAbsolutePath());
verifier.addCliArguments("org.apache.maven.its.mng3710:mavenit-mng3710-directInvoke-plugin:1:run", "validate");
verifier.execute();
verifier.verifyErrorFreeLog();
}
}
| MavenITmng3710PollutedClonedPluginsTest |
java | spring-projects__spring-security | oauth2/oauth2-jose/src/test/java/org/springframework/security/oauth2/jwt/NimbusJweEncoderTests.java | {
"start": 2346,
"end": 7650
} | class ____ {
// @formatter:off
private static final JweHeader DEFAULT_JWE_HEADER =
JweHeader.with(JweAlgorithm.RSA_OAEP_256, EncryptionMethod.A256GCM.getName()).build();
// @formatter:on
private List<JWK> jwkList;
private JWKSource<SecurityContext> jwkSource;
private NimbusJweEncoder jweEncoder;
@BeforeEach
public void setUp() {
this.jwkList = new ArrayList<>();
this.jwkSource = (jwkSelector, securityContext) -> jwkSelector.select(new JWKSet(this.jwkList));
this.jweEncoder = new NimbusJweEncoder(this.jwkSource);
}
@Test
public void encodeWhenJwtClaimsSetThenEncodes() {
RSAKey rsaJwk = TestJwks.DEFAULT_RSA_JWK;
this.jwkList.add(rsaJwk);
JwtClaimsSet jwtClaimsSet = TestJwtClaimsSets.jwtClaimsSet().build();
// @formatter:off
// **********************
// Assume future API:
// JwtEncoderParameters.with(JweHeader jweHeader, JwtClaimsSet claims)
// **********************
// @formatter:on
Jwt encodedJwe = this.jweEncoder.encode(JwtEncoderParameters.from(jwtClaimsSet));
assertThat(encodedJwe.getHeaders()).containsEntry(JoseHeaderNames.ALG, DEFAULT_JWE_HEADER.getAlgorithm());
assertThat(encodedJwe.getHeaders()).containsEntry("enc", DEFAULT_JWE_HEADER.<String>getHeader("enc"));
assertThat(encodedJwe.getHeaders().get(JoseHeaderNames.JKU)).isNull();
assertThat(encodedJwe.getHeaders().get(JoseHeaderNames.JWK)).isNull();
assertThat(encodedJwe.getHeaders()).containsEntry(JoseHeaderNames.KID, rsaJwk.getKeyID());
assertThat(encodedJwe.getHeaders().get(JoseHeaderNames.X5U)).isNull();
assertThat(encodedJwe.getHeaders().get(JoseHeaderNames.X5C)).isNull();
assertThat(encodedJwe.getHeaders().get(JoseHeaderNames.X5T)).isNull();
assertThat(encodedJwe.getHeaders().get(JoseHeaderNames.X5T_S256)).isNull();
assertThat(encodedJwe.getHeaders().get(JoseHeaderNames.TYP)).isNull();
assertThat(encodedJwe.getHeaders().get(JoseHeaderNames.CTY)).isNull();
assertThat(encodedJwe.getHeaders().get(JoseHeaderNames.CRIT)).isNull();
assertThat(encodedJwe.getIssuer()).isEqualTo(jwtClaimsSet.getIssuer());
assertThat(encodedJwe.getSubject()).isEqualTo(jwtClaimsSet.getSubject());
assertThat(encodedJwe.getAudience()).isEqualTo(jwtClaimsSet.getAudience());
assertThat(encodedJwe.getExpiresAt()).isEqualTo(jwtClaimsSet.getExpiresAt());
assertThat(encodedJwe.getNotBefore()).isEqualTo(jwtClaimsSet.getNotBefore());
assertThat(encodedJwe.getIssuedAt()).isEqualTo(jwtClaimsSet.getIssuedAt());
assertThat(encodedJwe.getId()).isEqualTo(jwtClaimsSet.getId());
assertThat(encodedJwe.<String>getClaim("custom-claim-name")).isEqualTo("custom-claim-value");
assertThat(encodedJwe.getTokenValue()).isNotNull();
}
@Test
public void encodeWhenNestedJwsThenEncodes() {
// See Nimbus example -> Nested signed and encrypted JWT
// https://connect2id.com/products/nimbus-jose-jwt/examples/signed-and-encrypted-jwt
RSAKey rsaJwk = TestJwks.DEFAULT_RSA_JWK;
this.jwkList.add(rsaJwk);
JwsHeader jwsHeader = JwsHeader.with(SignatureAlgorithm.RS256).build();
JwtClaimsSet jwtClaimsSet = TestJwtClaimsSets.jwtClaimsSet().build();
// @formatter:off
// **********************
// Assume future API:
// JwtEncoderParameters.with(JwsHeader jwsHeader, JweHeader jweHeader, JwtClaimsSet claims)
// **********************
// @formatter:on
Jwt encodedJweNestedJws = this.jweEncoder.encode(JwtEncoderParameters.from(jwsHeader, jwtClaimsSet));
assertThat(encodedJweNestedJws.getHeaders()).containsEntry(JoseHeaderNames.ALG,
DEFAULT_JWE_HEADER.getAlgorithm());
assertThat(encodedJweNestedJws.getHeaders()).containsEntry("enc", DEFAULT_JWE_HEADER.<String>getHeader("enc"));
assertThat(encodedJweNestedJws.getHeaders().get(JoseHeaderNames.JKU)).isNull();
assertThat(encodedJweNestedJws.getHeaders().get(JoseHeaderNames.JWK)).isNull();
assertThat(encodedJweNestedJws.getHeaders()).containsEntry(JoseHeaderNames.KID, rsaJwk.getKeyID());
assertThat(encodedJweNestedJws.getHeaders().get(JoseHeaderNames.X5U)).isNull();
assertThat(encodedJweNestedJws.getHeaders().get(JoseHeaderNames.X5C)).isNull();
assertThat(encodedJweNestedJws.getHeaders().get(JoseHeaderNames.X5T)).isNull();
assertThat(encodedJweNestedJws.getHeaders().get(JoseHeaderNames.X5T_S256)).isNull();
assertThat(encodedJweNestedJws.getHeaders().get(JoseHeaderNames.TYP)).isNull();
assertThat(encodedJweNestedJws.getHeaders()).containsEntry(JoseHeaderNames.CTY, "JWT");
assertThat(encodedJweNestedJws.getHeaders().get(JoseHeaderNames.CRIT)).isNull();
assertThat(encodedJweNestedJws.getIssuer()).isEqualTo(jwtClaimsSet.getIssuer());
assertThat(encodedJweNestedJws.getSubject()).isEqualTo(jwtClaimsSet.getSubject());
assertThat(encodedJweNestedJws.getAudience()).isEqualTo(jwtClaimsSet.getAudience());
assertThat(encodedJweNestedJws.getExpiresAt()).isEqualTo(jwtClaimsSet.getExpiresAt());
assertThat(encodedJweNestedJws.getNotBefore()).isEqualTo(jwtClaimsSet.getNotBefore());
assertThat(encodedJweNestedJws.getIssuedAt()).isEqualTo(jwtClaimsSet.getIssuedAt());
assertThat(encodedJweNestedJws.getId()).isEqualTo(jwtClaimsSet.getId());
assertThat(encodedJweNestedJws.<String>getClaim("custom-claim-name")).isEqualTo("custom-claim-value");
assertThat(encodedJweNestedJws.getTokenValue()).isNotNull();
}
| NimbusJweEncoderTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/taskmanager/TaskManagerRuntimeInfo.java | {
"start": 1115,
"end": 2412
} | interface ____ {
/**
* Gets the configuration that the TaskManager was started with.
*
* @return The configuration that the TaskManager was started with.
*/
Configuration getConfiguration();
/**
* Gets the list of temporary file directories.
*
* @return The list of temporary file directories.
*/
String[] getTmpDirectories();
/**
* Checks whether the TaskManager should exit the JVM when the task thread throws an
* OutOfMemoryError.
*
* @return True to terminate the JVM on an OutOfMemoryError, false otherwise.
*/
boolean shouldExitJvmOnOutOfMemoryError();
/**
* Gets the external address of the TaskManager.
*
* @return The external address of the TaskManager.
*/
String getTaskManagerExternalAddress();
/**
* Gets the bind address of the Taskmanager.
*
* @return The bind address of the TaskManager.
*/
default String getTaskManagerBindAddress() {
return getConfiguration().get(TaskManagerOptions.BIND_HOST);
}
/**
* Gets the temporary working directory of the TaskManager instance.
*
* @return The temporary working directory of the TaskManager.
*/
File getTmpWorkingDirectory();
}
| TaskManagerRuntimeInfo |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/aot/samples/basic/BasicSpringTestNGTests.java | {
"start": 2099,
"end": 2439
} | class ____ implements ApplicationContextInitializer<ConfigurableApplicationContext> {
@Override
public void initialize(ConfigurableApplicationContext applicationContext) {
applicationContext.getEnvironment().getPropertySources()
.addFirst(new MockPropertySource().withProperty("test.engine", "testng"));
}
}
}
| CustomInitializer |
java | bumptech__glide | instrumentation/src/androidTest/java/com/bumptech/glide/FitCenterRegressionTest.java | {
"start": 960,
"end": 4494
} | class ____ {
@Rule public final TestName testName = new TestName();
@Rule public final TearDownGlide tearDownGlide = new TearDownGlide();
private BitmapRegressionTester bitmapRegressionTester;
private Context context;
private CanonicalBitmap canonical;
@Before
public void setUp() {
context = ApplicationProvider.getApplicationContext();
bitmapRegressionTester =
BitmapRegressionTester.newInstance(getClass(), testName).assumeShouldRun();
canonical = new CanonicalBitmap();
}
@Test
public void fitCenter_withSquareSmallerThanImage_returnsImageFitWithinSquare()
throws ExecutionException, InterruptedException {
Bitmap result =
bitmapRegressionTester.test(
GlideApp.with(context).asBitmap().load(canonical.getBitmap()).fitCenter().override(50));
assertThat(result.getWidth()).isEqualTo(50);
assertThat(result.getHeight()).isEqualTo(37);
}
@Test
public void fitCenter_withSquareLargerThanImage_returnsUpscaledSquare()
throws ExecutionException, InterruptedException {
float multiplier = 1.1f;
int multipliedWidth = (int) (canonical.getWidth() * multiplier);
int multipliedHeight = (int) (canonical.getHeight() * multiplier);
Bitmap result =
bitmapRegressionTester.test(
GlideApp.with(context)
.asBitmap()
.load(canonical.getBitmap())
.fitCenter()
.override(multipliedWidth));
assertThat(result.getWidth()).isEqualTo(multipliedWidth);
assertThat(result.getHeight()).isEqualTo(multipliedHeight);
}
@Test
public void fitCenter_withNarrowRectangle_fitsWithinMaintainingAspectRatio()
throws ExecutionException, InterruptedException {
Bitmap result =
bitmapRegressionTester.test(
GlideApp.with(context)
.asBitmap()
.load(canonical.getBitmap())
.fitCenter()
.override(canonical.getWidth() / 10, canonical.getHeight()));
assertThat(result.getWidth()).isEqualTo(canonical.getWidth() / 10);
assertThat(result.getHeight()).isEqualTo(canonical.getHeight() / 10);
}
@Test
public void fitCenter_withShortRectangle_fitsWithinMaintainingAspectRatio()
throws ExecutionException, InterruptedException {
Bitmap result =
bitmapRegressionTester.test(
GlideApp.with(context)
.asBitmap()
.load(canonical.getBitmap())
.fitCenter()
.override(canonical.getWidth(), canonical.getHeight() / 2));
assertThat(result.getWidth()).isEqualTo(canonical.getWidth() / 2);
assertThat(result.getHeight()).isEqualTo(canonical.getHeight() / 2);
}
@Test
public void fitCenter_withHugeRectangle_throwsOOM()
throws ExecutionException, InterruptedException {
float multiplier = Integer.MAX_VALUE / (canonical.getWidth() * canonical.getHeight() * 2);
final int overrideWidth = (int) multiplier * canonical.getWidth();
final int overrideHeight = (int) multiplier * canonical.getHeight();
assertThrows(
ExecutionException.class,
new ThrowingRunnable() {
@Override
public void run() throws Throwable {
GlideApp.with(context)
.asBitmap()
.load(canonical.getBitmap())
.fitCenter()
.override(overrideWidth, overrideHeight)
.submit()
.get();
}
});
}
}
| FitCenterRegressionTest |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/internals/ShareConsumerImpl.java | {
"start": 8467,
"end": 26348
} | class ____ implements EventProcessor<BackgroundEvent> {
public BackgroundEventProcessor() {}
@Override
public void process(final BackgroundEvent event) {
if (event.type() == BackgroundEvent.Type.ERROR) {
process((ErrorEvent) event);
} else {
throw new IllegalArgumentException("Background event type " + event.type() + " was not expected");
}
}
private void process(final ErrorEvent event) {
throw event.error();
}
}
private final ApplicationEventHandler applicationEventHandler;
private final Time time;
private final KafkaShareConsumerMetrics kafkaShareConsumerMetrics;
private final AsyncConsumerMetrics asyncConsumerMetrics;
private Logger log;
private final String clientId;
private final String groupId;
private final ShareAcknowledgementEventHandler acknowledgementEventHandler;
private final BlockingQueue<ShareAcknowledgementEvent> acknowledgementEventQueue;
private final ShareAcknowledgementEventProcessor acknowledgementEventProcessor;
private final BlockingQueue<BackgroundEvent> backgroundEventQueue;
private final BackgroundEventHandler backgroundEventHandler;
private final BackgroundEventProcessor backgroundEventProcessor;
private final CompletableEventReaper backgroundEventReaper;
private final Deserializers<K, V> deserializers;
private ShareFetch<K, V> currentFetch;
private AcknowledgementCommitCallbackHandler acknowledgementCommitCallbackHandler;
private final List<Map<TopicIdPartition, Acknowledgements>> completedAcknowledgements;
private final ShareAcknowledgementMode acknowledgementMode;
/**
* A thread-safe {@link ShareFetchBuffer fetch buffer} for the results that are populated in the
* {@link ConsumerNetworkThread network thread} when the results are available. Because of the interaction
* of the fetch buffer in the application thread and the network I/O thread, this is shared between the
* two threads and is thus designed to be thread-safe.
*/
private final ShareFetchBuffer fetchBuffer;
private final ShareFetchCollector<K, V> fetchCollector;
private final SubscriptionState subscriptions;
private final ShareConsumerMetadata metadata;
private final Metrics metrics;
private final int requestTimeoutMs;
private final int defaultApiTimeoutMs;
private volatile boolean closed = false;
// Init value is needed to avoid NPE in case of exception raised in the constructor
private Optional<ClientTelemetryReporter> clientTelemetryReporter = Optional.empty();
private final WakeupTrigger wakeupTrigger = new WakeupTrigger();
// currentThread holds the threadId of the current thread accessing the KafkaShareConsumer
// and is used to prevent multithreaded access
private final AtomicLong currentThread = new AtomicLong(NO_CURRENT_THREAD);
private final AtomicInteger refCount = new AtomicInteger(0);
private boolean shouldSendShareFetchEvent = false;
ShareConsumerImpl(final ConsumerConfig config,
final Deserializer<K> keyDeserializer,
final Deserializer<V> valueDeserializer) {
this(
config,
keyDeserializer,
valueDeserializer,
Time.SYSTEM,
ApplicationEventHandler::new,
CompletableEventReaper::new,
ShareFetchCollector::new,
new LinkedBlockingQueue<>(),
new LinkedBlockingQueue<>()
);
}
// Visible for testing
ShareConsumerImpl(final ConsumerConfig config,
final Deserializer<K> keyDeserializer,
final Deserializer<V> valueDeserializer,
final Time time,
final ApplicationEventHandlerFactory applicationEventHandlerFactory,
final AsyncKafkaConsumer.CompletableEventReaperFactory backgroundEventReaperFactory,
final ShareFetchCollectorFactory<K, V> fetchCollectorFactory,
final LinkedBlockingQueue<ShareAcknowledgementEvent> acknowledgementEventQueue,
final LinkedBlockingQueue<BackgroundEvent> backgroundEventQueue) {
try {
GroupRebalanceConfig groupRebalanceConfig = new GroupRebalanceConfig(
config,
GroupRebalanceConfig.ProtocolType.SHARE
);
this.clientId = config.getString(ConsumerConfig.CLIENT_ID_CONFIG);
this.groupId = config.getString(ConsumerConfig.GROUP_ID_CONFIG);
maybeThrowInvalidGroupIdException();
LogContext logContext = createLogContext(clientId, groupId);
this.acknowledgementEventQueue = acknowledgementEventQueue;
this.backgroundEventQueue = backgroundEventQueue;
this.log = logContext.logger(getClass());
log.debug("Initializing the Kafka share consumer");
this.requestTimeoutMs = config.getInt(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG);
this.defaultApiTimeoutMs = config.getInt(ConsumerConfig.DEFAULT_API_TIMEOUT_MS_CONFIG);
this.time = time;
List<MetricsReporter> reporters = CommonClientConfigs.metricsReporters(clientId, config);
this.clientTelemetryReporter = CommonClientConfigs.telemetryReporter(clientId, config);
this.clientTelemetryReporter.ifPresent(reporters::add);
this.metrics = createMetrics(config, time, reporters);
this.asyncConsumerMetrics = new AsyncConsumerMetrics(metrics, CONSUMER_SHARE_METRIC_GROUP);
this.acknowledgementMode = initializeAcknowledgementMode(config, log);
this.deserializers = new Deserializers<>(config, keyDeserializer, valueDeserializer, metrics);
this.currentFetch = ShareFetch.empty();
this.subscriptions = createSubscriptionState(config, logContext);
ClusterResourceListeners clusterResourceListeners = ClientUtils.configureClusterResourceListeners(
metrics.reporters(),
Arrays.asList(deserializers.keyDeserializer(), deserializers.valueDeserializer()));
this.metadata = new ShareConsumerMetadata(config, subscriptions, logContext, clusterResourceListeners);
final List<InetSocketAddress> addresses = ClientUtils.parseAndValidateAddresses(config);
metadata.bootstrap(addresses);
ShareFetchMetricsManager shareFetchMetricsManager = createShareFetchMetricsManager(metrics);
ApiVersions apiVersions = new ApiVersions();
final BlockingQueue<ApplicationEvent> applicationEventQueue = new LinkedBlockingQueue<>();
this.acknowledgementEventHandler = new ShareAcknowledgementEventHandler(acknowledgementEventQueue);
this.backgroundEventHandler = new BackgroundEventHandler(
backgroundEventQueue, time, asyncConsumerMetrics);
// This FetchBuffer is shared between the application and network threads.
this.fetchBuffer = new ShareFetchBuffer(logContext);
final Supplier<NetworkClientDelegate> networkClientDelegateSupplier = NetworkClientDelegate.supplier(
time,
logContext,
metadata,
config,
apiVersions,
metrics,
shareFetchMetricsManager.throttleTimeSensor(),
clientTelemetryReporter.map(ClientTelemetryReporter::telemetrySender).orElse(null),
backgroundEventHandler,
true,
asyncConsumerMetrics
);
this.completedAcknowledgements = new LinkedList<>();
final Supplier<RequestManagers> requestManagersSupplier = RequestManagers.supplier(
time,
logContext,
acknowledgementEventHandler,
backgroundEventHandler,
metadata,
subscriptions,
fetchBuffer,
config,
groupRebalanceConfig,
shareFetchMetricsManager,
clientTelemetryReporter,
metrics
);
final Supplier<ApplicationEventProcessor> applicationEventProcessorSupplier = ApplicationEventProcessor.supplier(
logContext,
metadata,
subscriptions,
requestManagersSupplier
);
this.applicationEventHandler = applicationEventHandlerFactory.build(
logContext,
time,
config.getInt(CommonClientConfigs.DEFAULT_API_TIMEOUT_MS_CONFIG),
applicationEventQueue,
new CompletableEventReaper(logContext),
applicationEventProcessorSupplier,
networkClientDelegateSupplier,
requestManagersSupplier,
asyncConsumerMetrics);
this.acknowledgementEventProcessor = new ShareAcknowledgementEventProcessor();
this.backgroundEventProcessor = new BackgroundEventProcessor();
this.backgroundEventReaper = backgroundEventReaperFactory.build(logContext);
this.fetchCollector = fetchCollectorFactory.build(
logContext,
metadata,
subscriptions,
new ShareFetchConfig(config),
deserializers);
this.kafkaShareConsumerMetrics = new KafkaShareConsumerMetrics(metrics);
config.logUnused();
AppInfoParser.registerAppInfo(CONSUMER_JMX_PREFIX, clientId, metrics, time.milliseconds());
log.debug("Kafka share consumer initialized");
} catch (Throwable t) {
// Call close methods if internal objects are already constructed; this is to prevent resource leak.
// We do not need to call `close` at all when `log` is null, which means no internal objects were initialized.
if (this.log != null) {
close(Duration.ZERO, true);
}
// Now propagate the exception
throw new KafkaException("Failed to construct Kafka share consumer", t);
}
}
// Visible for testing
ShareConsumerImpl(final LogContext logContext,
final String clientId,
final String groupId,
final ConsumerConfig config,
final Deserializer<K> keyDeserializer,
final Deserializer<V> valueDeserializer,
final Time time,
final KafkaClient client,
final SubscriptionState subscriptions,
final ShareConsumerMetadata metadata) {
this.clientId = clientId;
this.groupId = groupId;
this.log = logContext.logger(getClass());
this.time = time;
this.metrics = new Metrics(time);
this.clientTelemetryReporter = Optional.empty();
this.deserializers = new Deserializers<>(config, keyDeserializer, valueDeserializer, metrics);
this.currentFetch = ShareFetch.empty();
this.subscriptions = subscriptions;
this.metadata = metadata;
this.requestTimeoutMs = config.getInt(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG);
this.defaultApiTimeoutMs = config.getInt(ConsumerConfig.DEFAULT_API_TIMEOUT_MS_CONFIG);
this.acknowledgementMode = initializeAcknowledgementMode(config, log);
this.fetchBuffer = new ShareFetchBuffer(logContext);
this.completedAcknowledgements = new LinkedList<>();
ShareConsumerMetrics metricsRegistry = new ShareConsumerMetrics();
ShareFetchMetricsManager shareFetchMetricsManager = new ShareFetchMetricsManager(metrics, metricsRegistry.shareFetchMetrics);
this.fetchCollector = new ShareFetchCollector<>(
logContext,
metadata,
subscriptions,
new ShareFetchConfig(config),
deserializers);
this.kafkaShareConsumerMetrics = new KafkaShareConsumerMetrics(metrics);
this.asyncConsumerMetrics = new AsyncConsumerMetrics(metrics, CONSUMER_SHARE_METRIC_GROUP);
final BlockingQueue<ApplicationEvent> applicationEventQueue = new LinkedBlockingQueue<>();
this.acknowledgementEventQueue = new LinkedBlockingQueue<>();
this.acknowledgementEventHandler = new ShareAcknowledgementEventHandler(acknowledgementEventQueue);
this.backgroundEventQueue = new LinkedBlockingQueue<>();
this.backgroundEventHandler = new BackgroundEventHandler(
backgroundEventQueue, time, asyncConsumerMetrics);
final Supplier<NetworkClientDelegate> networkClientDelegateSupplier =
NetworkClientDelegate.supplier(time, config, logContext, client, metadata, backgroundEventHandler, true, asyncConsumerMetrics);
GroupRebalanceConfig groupRebalanceConfig = new GroupRebalanceConfig(
config,
GroupRebalanceConfig.ProtocolType.SHARE);
final Supplier<RequestManagers> requestManagersSupplier = RequestManagers.supplier(
time,
logContext,
acknowledgementEventHandler,
backgroundEventHandler,
metadata,
subscriptions,
fetchBuffer,
config,
groupRebalanceConfig,
shareFetchMetricsManager,
clientTelemetryReporter,
metrics
);
final Supplier<ApplicationEventProcessor> applicationEventProcessorSupplier = ApplicationEventProcessor.supplier(
logContext,
metadata,
subscriptions,
requestManagersSupplier
);
this.applicationEventHandler = new ApplicationEventHandler(
logContext,
time,
config.getInt(CommonClientConfigs.DEFAULT_API_TIMEOUT_MS_CONFIG),
applicationEventQueue,
new CompletableEventReaper(logContext),
applicationEventProcessorSupplier,
networkClientDelegateSupplier,
requestManagersSupplier,
asyncConsumerMetrics);
this.acknowledgementEventProcessor = new ShareAcknowledgementEventProcessor();
this.backgroundEventProcessor = new BackgroundEventProcessor();
this.backgroundEventReaper = new CompletableEventReaper(logContext);
config.logUnused();
AppInfoParser.registerAppInfo(CONSUMER_JMX_PREFIX, clientId, metrics, time.milliseconds());
}
// Visible for testing
@SuppressWarnings("ParameterNumber")
ShareConsumerImpl(final LogContext logContext,
final String clientId,
final Deserializer<K> keyDeserializer,
final Deserializer<V> valueDeserializer,
final ShareFetchBuffer fetchBuffer,
final ShareFetchCollector<K, V> fetchCollector,
final Time time,
final ApplicationEventHandler applicationEventHandler,
final BlockingQueue<ShareAcknowledgementEvent> acknowledgementEventQueue,
final BlockingQueue<BackgroundEvent> backgroundEventQueue,
final CompletableEventReaper backgroundEventReaper,
final Metrics metrics,
final SubscriptionState subscriptions,
final ShareConsumerMetadata metadata,
final int requestTimeoutMs,
final int defaultApiTimeoutMs,
final String groupId,
final String acknowledgementModeConfig) {
this.log = logContext.logger(getClass());
this.subscriptions = subscriptions;
this.clientId = clientId;
this.groupId = groupId;
this.fetchBuffer = fetchBuffer;
this.fetchCollector = fetchCollector;
this.time = time;
this.acknowledgementEventQueue = acknowledgementEventQueue;
this.acknowledgementEventProcessor = new ShareAcknowledgementEventProcessor();
this.backgroundEventQueue = backgroundEventQueue;
this.backgroundEventProcessor = new BackgroundEventProcessor();
this.backgroundEventReaper = backgroundEventReaper;
this.metrics = metrics;
this.metadata = metadata;
this.requestTimeoutMs = requestTimeoutMs;
this.defaultApiTimeoutMs = defaultApiTimeoutMs;
this.acknowledgementMode = ShareAcknowledgementMode.fromString(acknowledgementModeConfig);
this.deserializers = new Deserializers<>(keyDeserializer, valueDeserializer, metrics);
this.currentFetch = ShareFetch.empty();
this.applicationEventHandler = applicationEventHandler;
this.kafkaShareConsumerMetrics = new KafkaShareConsumerMetrics(metrics);
this.clientTelemetryReporter = Optional.empty();
this.completedAcknowledgements = Collections.emptyList();
this.asyncConsumerMetrics = new AsyncConsumerMetrics(metrics, CONSUMER_SHARE_METRIC_GROUP);
this.acknowledgementEventHandler = new ShareAcknowledgementEventHandler(acknowledgementEventQueue);
this.backgroundEventHandler = new BackgroundEventHandler(
backgroundEventQueue, time, asyncConsumerMetrics);
}
// auxiliary | BackgroundEventProcessor |
java | spring-projects__spring-boot | module/spring-boot-security/src/test/java/org/springframework/boot/security/autoconfigure/web/servlet/SecurityFilterAutoConfigurationEarlyInitializationTests.java | {
"start": 6020,
"end": 6203
} | class ____ implements Converter<SourceType, DestinationType> {
@Override
public DestinationType convert(SourceType source) {
return new DestinationType();
}
}
}
| ConverterBean |
java | greenrobot__greendao | DaoCore/src/main/java/org/greenrobot/greendao/database/DatabaseOpenHelper.java | {
"start": 1196,
"end": 4232
} | class ____ extends SQLiteOpenHelper {
private final Context context;
private final String name;
private final int version;
private EncryptedHelper encryptedHelper;
private boolean loadSQLCipherNativeLibs = true;
public DatabaseOpenHelper(Context context, String name, int version) {
this(context, name, null, version);
}
public DatabaseOpenHelper(Context context, String name, CursorFactory factory, int version) {
super(context, name, factory, version);
this.context = context;
this.name = name;
this.version = version;
}
@SuppressLint("NewApi")
public DatabaseOpenHelper(Context context, String name, CursorFactory factory, int version, DatabaseErrorHandler errorHandler) {
super(context, name, factory, version, errorHandler);
this.context = context;
this.name = name;
this.version = version;
}
/**
* Flag to load SQLCipher native libs (default: true).
*/
public void setLoadSQLCipherNativeLibs(boolean loadSQLCipherNativeLibs) {
this.loadSQLCipherNativeLibs = loadSQLCipherNativeLibs;
}
/**
* Like {@link #getWritableDatabase()}, but returns a greenDAO abstraction of the database.
* The backing DB is an standard {@link SQLiteDatabase}.
*/
public Database getWritableDb() {
return wrap(getWritableDatabase());
}
/**
* Like {@link #getReadableDatabase()}, but returns a greenDAO abstraction of the database.
* The backing DB is an standard {@link SQLiteDatabase}.
*/
public Database getReadableDb() {
return wrap(getReadableDatabase());
}
protected Database wrap(SQLiteDatabase sqLiteDatabase) {
return new StandardDatabase(sqLiteDatabase);
}
/**
* Delegates to {@link #onCreate(Database)}, which uses greenDAO's database abstraction.
*/
@Override
public void onCreate(SQLiteDatabase db) {
onCreate(wrap(db));
}
/**
* Override this if you do not want to depend on {@link SQLiteDatabase}.
*/
public void onCreate(Database db) {
// Do nothing by default
}
/**
* Delegates to {@link #onUpgrade(Database, int, int)}, which uses greenDAO's database abstraction.
*/
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
onUpgrade(wrap(db), oldVersion, newVersion);
}
/**
* Override this if you do not want to depend on {@link SQLiteDatabase}.
*/
public void onUpgrade(Database db, int oldVersion, int newVersion) {
// Do nothing by default
}
/**
* Delegates to {@link #onOpen(Database)}, which uses greenDAO's database abstraction.
*/
@Override
public void onOpen(SQLiteDatabase db) {
onOpen(wrap(db));
}
/**
* Override this if you do not want to depend on {@link SQLiteDatabase}.
*/
public void onOpen(Database db) {
// Do nothing by default
}
| DatabaseOpenHelper |
java | grpc__grpc-java | api/src/test/java/io/grpc/ForwardingChannelBuilderTest.java | {
"start": 1308,
"end": 3234
} | class ____ extends ForwardingChannelBuilder<TestBuilder> {
@Override
@SuppressWarnings("deprecation")
protected ManagedChannelBuilder<?> delegate() {
return mockDelegate;
}
}
@Test
public void allMethodsForwarded() throws Exception {
ForwardingTestUtil.testMethodsForwarded(
ManagedChannelBuilder.class,
mockDelegate,
testChannelBuilder,
Collections.<Method>emptyList(),
new ForwardingTestUtil.ArgumentProvider() {
@Override
public Object get(Method method, int argPos, Class<?> clazz) {
if (method.getName().equals("maxInboundMetadataSize")) {
assertThat(argPos).isEqualTo(0);
return 1; // an arbitrary positive number
}
return null;
}
});
}
@Test
public void allBuilderMethodsReturnThis() throws Exception {
for (Method method : ManagedChannelBuilder.class.getDeclaredMethods()) {
if (Modifier.isStatic(method.getModifiers()) || Modifier.isPrivate(method.getModifiers())) {
continue;
}
if (method.getName().equals("build")) {
continue;
}
Class<?>[] argTypes = method.getParameterTypes();
Object[] args = new Object[argTypes.length];
for (int i = 0; i < argTypes.length; i++) {
args[i] = Defaults.defaultValue(argTypes[i]);
}
if (method.getName().equals("maxInboundMetadataSize")) {
args[0] = 1; // an arbitrary positive number
}
Object returnedValue = method.invoke(testChannelBuilder, args);
assertThat(returnedValue).isSameInstanceAs(testChannelBuilder);
}
}
@Test
public void buildReturnsDelegateBuildByDefault() {
ManagedChannel mockChannel = mock(ManagedChannel.class);
doReturn(mockChannel).when(mockDelegate).build();
assertThat(testChannelBuilder.build()).isSameInstanceAs(mockChannel);
}
}
| TestBuilder |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/callbacks/typematching/CallbackMethodTypeMatchingTest.java | {
"start": 582,
"end": 890
} | class ____ {
@ProcessorTest
public void callbackMethodAreCalled() {
CarEntity carEntity = CarMapper.INSTANCE.toCarEntity( new CarDto() );
assertThat( carEntity.getId() ).isEqualTo( 2 );
assertThat( carEntity.getSeatCount() ).isEqualTo( 5 );
}
}
| CallbackMethodTypeMatchingTest |
java | spring-projects__spring-boot | module/spring-boot-webservices-test/src/test/java/org/springframework/boot/webservices/test/autoconfigure/server/WebServiceServerPropertiesIntegrationTests.java | {
"start": 1417,
"end": 1775
} | class ____ {
@Autowired
private Environment innerEnvironment;
@Test
void propertiesFromEnclosingClassAffectNestedTests() {
assertThat(WebServiceServerPropertiesIntegrationTests.this.environment.getActiveProfiles())
.containsExactly("test");
assertThat(this.innerEnvironment.getActiveProfiles()).containsExactly("test");
}
}
}
| NestedTests |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformAction.java | {
"start": 1677,
"end": 2222
} | class ____ extends ActionType<GetTransformAction.Response> {
public static final GetTransformAction INSTANCE = new GetTransformAction();
public static final String NAME = "cluster:monitor/transform/get";
static final TransportVersion DANGLING_TASKS = TransportVersion.fromName("transform_check_for_dangling_tasks");
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(GetTransformAction.class);
private GetTransformAction() {
super(NAME);
}
public static | GetTransformAction |
java | spring-projects__spring-boot | module/spring-boot-mongodb/src/test/java/org/springframework/boot/data/mongodb/autoconfigure/health/MongoHealthContributorAutoConfigurationTests.java | {
"start": 1381,
"end": 2072
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(MongoAutoConfiguration.class,
MongoHealthContributorAutoConfiguration.class, HealthContributorAutoConfiguration.class));
@Test
void runShouldCreateIndicator() {
this.contextRunner.run((context) -> assertThat(context).hasSingleBean(MongoHealthIndicator.class));
}
@Test
void runWhenDisabledShouldNotCreateIndicator() {
this.contextRunner.withPropertyValues("management.health.mongodb.enabled:false")
.run((context) -> assertThat(context).doesNotHaveBean(MongoHealthIndicator.class));
}
}
| MongoHealthContributorAutoConfigurationTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/operators/hash/MutableHashTable.java | {
"start": 71286,
"end": 80137
} | class ____<BT, PT> implements MutableObjectIterator<BT> {
private final TypeSerializer<BT> accessor;
private final TypePairComparator<PT, BT> comparator;
private MemorySegment bucket;
private MemorySegment[] overflowSegments;
private HashPartition<BT, PT> partition;
private int bucketInSegmentOffset;
private int searchHashCode;
private int posInSegment;
private int countInSegment;
private int numInSegment;
private int originalBucketInSegmentOffset;
private MemorySegment originalBucket;
private long lastPointer;
private BitSet probedSet;
private boolean isBuildOuterJoin = false;
HashBucketIterator(
TypeSerializer<BT> accessor,
TypePairComparator<PT, BT> comparator,
BitSet probedSet,
boolean isBuildOuterJoin) {
this.accessor = accessor;
this.comparator = comparator;
this.probedSet = probedSet;
this.isBuildOuterJoin = isBuildOuterJoin;
}
void set(
MemorySegment bucket,
MemorySegment[] overflowSegments,
HashPartition<BT, PT> partition,
int searchHashCode,
int bucketInSegmentOffset) {
this.bucket = bucket;
this.originalBucket = bucket;
this.overflowSegments = overflowSegments;
this.partition = partition;
this.searchHashCode = searchHashCode;
this.bucketInSegmentOffset = bucketInSegmentOffset;
this.originalBucketInSegmentOffset = bucketInSegmentOffset;
this.posInSegment = this.bucketInSegmentOffset + BUCKET_HEADER_LENGTH;
this.countInSegment = bucket.getShort(bucketInSegmentOffset + HEADER_COUNT_OFFSET);
this.numInSegment = 0;
}
public BT next(BT reuse) {
// loop over all segments that are involved in the bucket (original bucket plus overflow
// buckets)
while (true) {
probedSet.setMemorySegment(
bucket, this.bucketInSegmentOffset + HEADER_PROBED_FLAGS_OFFSET);
while (this.numInSegment < this.countInSegment) {
final int thisCode = this.bucket.getInt(this.posInSegment);
this.posInSegment += HASH_CODE_LEN;
// check if the hash code matches
if (thisCode == this.searchHashCode) {
// get the pointer to the pair
final long pointer =
this.bucket.getLong(
this.bucketInSegmentOffset
+ BUCKET_POINTER_START_OFFSET
+ (this.numInSegment * POINTER_LEN));
this.numInSegment++;
// deserialize the key to check whether it is really equal, or whether we
// had only a hash collision
try {
this.partition.setReadPosition(pointer);
reuse = this.accessor.deserialize(reuse, this.partition);
if (this.comparator.equalToReference(reuse)) {
if (isBuildOuterJoin) {
probedSet.set(numInSegment - 1);
}
this.lastPointer = pointer;
return reuse;
}
} catch (IOException ioex) {
throw new RuntimeException(
"Error deserializing key or value from the hashtable: "
+ ioex.getMessage(),
ioex);
}
} else {
this.numInSegment++;
}
}
// this segment is done. check if there is another chained bucket
final long forwardPointer =
this.bucket.getLong(this.bucketInSegmentOffset + HEADER_FORWARD_OFFSET);
if (forwardPointer == BUCKET_FORWARD_POINTER_NOT_SET) {
return null;
}
final int overflowSegNum = (int) (forwardPointer >>> 32);
this.bucket = this.overflowSegments[overflowSegNum];
this.bucketInSegmentOffset = (int) forwardPointer;
this.countInSegment =
this.bucket.getShort(this.bucketInSegmentOffset + HEADER_COUNT_OFFSET);
this.posInSegment = this.bucketInSegmentOffset + BUCKET_HEADER_LENGTH;
this.numInSegment = 0;
}
}
public BT next() {
// loop over all segments that are involved in the bucket (original bucket plus overflow
// buckets)
while (true) {
probedSet.setMemorySegment(
bucket, this.bucketInSegmentOffset + HEADER_PROBED_FLAGS_OFFSET);
while (this.numInSegment < this.countInSegment) {
final int thisCode = this.bucket.getInt(this.posInSegment);
this.posInSegment += HASH_CODE_LEN;
// check if the hash code matches
if (thisCode == this.searchHashCode) {
// get the pointer to the pair
final long pointer =
this.bucket.getLong(
this.bucketInSegmentOffset
+ BUCKET_POINTER_START_OFFSET
+ (this.numInSegment * POINTER_LEN));
this.numInSegment++;
// deserialize the key to check whether it is really equal, or whether we
// had only a hash collision
try {
this.partition.setReadPosition(pointer);
BT result = this.accessor.deserialize(this.partition);
if (this.comparator.equalToReference(result)) {
if (isBuildOuterJoin) {
probedSet.set(numInSegment - 1);
}
this.lastPointer = pointer;
return result;
}
} catch (IOException ioex) {
throw new RuntimeException(
"Error deserializing key or value from the hashtable: "
+ ioex.getMessage(),
ioex);
}
} else {
this.numInSegment++;
}
}
// this segment is done. check if there is another chained bucket
final long forwardPointer =
this.bucket.getLong(this.bucketInSegmentOffset + HEADER_FORWARD_OFFSET);
if (forwardPointer == BUCKET_FORWARD_POINTER_NOT_SET) {
return null;
}
final int overflowSegNum = (int) (forwardPointer >>> 32);
this.bucket = this.overflowSegments[overflowSegNum];
this.bucketInSegmentOffset = (int) forwardPointer;
this.countInSegment =
this.bucket.getShort(this.bucketInSegmentOffset + HEADER_COUNT_OFFSET);
this.posInSegment = this.bucketInSegmentOffset + BUCKET_HEADER_LENGTH;
this.numInSegment = 0;
}
}
public void writeBack(BT value) throws IOException {
final SeekableDataOutputView outView = this.partition.getWriteView();
outView.setWritePosition(this.lastPointer);
this.accessor.serialize(value, outView);
}
public void reset() {
this.bucket = this.originalBucket;
this.bucketInSegmentOffset = this.originalBucketInSegmentOffset;
this.posInSegment = this.bucketInSegmentOffset + BUCKET_HEADER_LENGTH;
this.countInSegment = bucket.getShort(bucketInSegmentOffset + HEADER_COUNT_OFFSET);
this.numInSegment = 0;
}
} // end HashBucketIterator
/** Iterate all the elements in memory which has not been probed during probe phase. */
public static | HashBucketIterator |
java | google__error-prone | core/src/test/java/com/google/errorprone/refaster/UnificationTest.java | {
"start": 15316,
"end": 16343
} | class ____ {",
" public void example(String x) {",
" if (Math.random() > 0.5) {",
" x = \"foo\";",
" } else {",
" x = \"bar\";",
" }",
" }",
"}");
expectMatches(
blockTemplate,
Match.create(
ImmutableMap.of(
"cond", "(Math.random() > 0.5)",
"x", "x",
"y", "\"foo\"",
"z", "\"bar\"",
"T", "java.lang.String")));
}
@Test
public void newArray() {
// Template: new String[] {str}
ExpressionTemplate template =
ExpressionTemplate.create(
ImmutableMap.of("str", UClassType.create("java.lang.String")),
UNewArray.create(
UClassIdent.create("java.lang.String"),
ImmutableList.<UExpression>of(),
ImmutableList.of(UFreeIdent.create("str"))),
UArrayType.create(UClassType.create("java.lang.String")));
compile(
" | IfBlockExample |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java | {
"start": 61033,
"end": 63970
} | class ____ implements EventHandler<Event> {
@Override
public void handle(Event event) {
//Empty
}
}
private static void validateInputParam(String value, String param)
throws IOException {
if (value == null) {
String msg = param + " is null";
LOG.error(msg);
throw new IOException(msg);
}
}
public static void main(String[] args) {
try {
mainStarted = true;
Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
String containerIdStr =
System.getenv(Environment.CONTAINER_ID.name());
String nodeHostString = System.getenv(Environment.NM_HOST.name());
String nodePortString = System.getenv(Environment.NM_PORT.name());
String nodeHttpPortString =
System.getenv(Environment.NM_HTTP_PORT.name());
String appSubmitTimeStr =
System.getenv(ApplicationConstants.APP_SUBMIT_TIME_ENV);
validateInputParam(containerIdStr,
Environment.CONTAINER_ID.name());
validateInputParam(nodeHostString, Environment.NM_HOST.name());
validateInputParam(nodePortString, Environment.NM_PORT.name());
validateInputParam(nodeHttpPortString,
Environment.NM_HTTP_PORT.name());
validateInputParam(appSubmitTimeStr,
ApplicationConstants.APP_SUBMIT_TIME_ENV);
ContainerId containerId = ContainerId.fromString(containerIdStr);
ApplicationAttemptId applicationAttemptId =
containerId.getApplicationAttemptId();
if (applicationAttemptId != null) {
CallerContext.setCurrent(new CallerContext.Builder(
"mr_appmaster_" + applicationAttemptId.toString()).build());
}
long appSubmitTime = Long.parseLong(appSubmitTimeStr);
MRAppMaster appMaster =
new MRAppMaster(applicationAttemptId, containerId, nodeHostString,
Integer.parseInt(nodePortString),
Integer.parseInt(nodeHttpPortString), appSubmitTime);
ShutdownHookManager.get().addShutdownHook(
new MRAppMasterShutdownHook(appMaster), SHUTDOWN_HOOK_PRIORITY);
JobConf conf = new JobConf(new YarnConfiguration());
conf.addResource(new Path(MRJobConfig.JOB_CONF_FILE));
MRWebAppUtil.initialize(conf);
// log the system properties
String systemPropsToLog = MRApps.getSystemPropertiesToLog(conf);
if (systemPropsToLog != null) {
LOG.info(systemPropsToLog);
}
String jobUserName = System
.getenv(ApplicationConstants.Environment.USER.name());
conf.set(MRJobConfig.USER_NAME, jobUserName);
initAndStartAppMaster(appMaster, conf, jobUserName);
} catch (Throwable t) {
LOG.error("Error starting MRAppMaster", t);
ExitUtil.terminate(1, t);
}
}
// The shutdown hook that runs when a signal is received AND during normal
// close of the JVM.
static | NoopEventHandler |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/config/ConfigDataLocationRuntimeHintsTests.java | {
"start": 4774,
"end": 5634
} | class ____ extends ConfigDataLocationRuntimeHints {
private final MockSpringFactoriesLoader springFactoriesLoader;
TestConfigDataLocationRuntimeHints(MockSpringFactoriesLoader springFactoriesLoader) {
this.springFactoriesLoader = springFactoriesLoader;
}
TestConfigDataLocationRuntimeHints() {
this(springFactoriesLoader());
}
private static MockSpringFactoriesLoader springFactoriesLoader() {
MockSpringFactoriesLoader springFactoriesLoader = new MockSpringFactoriesLoader();
springFactoriesLoader.add(PropertySourceLoader.class, PropertiesPropertySourceLoader.class,
YamlPropertySourceLoader.class);
return springFactoriesLoader;
}
@Override
protected SpringFactoriesLoader getSpringFactoriesLoader(@Nullable ClassLoader classLoader) {
return this.springFactoriesLoader;
}
}
}
| TestConfigDataLocationRuntimeHints |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java | {
"start": 1473,
"end": 11339
} | class ____ implements Writeable, ToXContentFragment {
private static final ParseField FILTER = new ParseField("filter");
private static final ParseField ROUTING = new ParseField("routing");
private static final ParseField INDEX_ROUTING = new ParseField("index_routing", "indexRouting", "index-routing");
private static final ParseField SEARCH_ROUTING = new ParseField("search_routing", "searchRouting", "search-routing");
private static final ParseField IS_WRITE_INDEX = new ParseField("is_write_index");
private static final ParseField IS_HIDDEN = new ParseField("is_hidden");
private static final Set<String> KNOWN_FIELDS = Set.of(
FILTER.getPreferredName(),
ROUTING.getPreferredName(),
INDEX_ROUTING.getPreferredName(),
SEARCH_ROUTING.getPreferredName(),
IS_WRITE_INDEX.getPreferredName(),
IS_HIDDEN.getPreferredName()
);
private String name;
@Nullable
private String filter;
@Nullable
private String indexRouting;
@Nullable
private String searchRouting;
@Nullable
private Boolean writeIndex;
@Nullable
private Boolean isHidden;
public Alias(StreamInput in) throws IOException {
name = in.readString();
filter = in.readOptionalString();
indexRouting = in.readOptionalString();
searchRouting = in.readOptionalString();
writeIndex = in.readOptionalBoolean();
isHidden = in.readOptionalBoolean();
}
public Alias(String name) {
this.name = name;
}
/**
* Returns the alias name
*/
public String name() {
return name;
}
/**
Modify the alias name only
*/
public Alias name(String name) {
this.name = name;
return this;
}
/**
* Returns the filter associated with the alias
*/
public String filter() {
return filter;
}
/**
* Associates a filter to the alias
*/
public Alias filter(String filter) {
this.filter = filter;
return this;
}
/**
* Associates a filter to the alias
*/
public Alias filter(Map<String, Object> filter) {
if (filter == null || filter.isEmpty()) {
this.filter = null;
return this;
}
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(filter);
this.filter = Strings.toString(builder);
return this;
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e);
}
}
/**
* Associates a filter to the alias
*/
public Alias filter(QueryBuilder filterBuilder) {
if (filterBuilder == null) {
this.filter = null;
return this;
}
try {
XContentBuilder builder = XContentFactory.jsonBuilder();
filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.close();
this.filter = Strings.toString(builder);
return this;
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to build json for alias request", e);
}
}
/**
* Associates a routing value to the alias
*/
public Alias routing(String routing) {
this.indexRouting = routing;
this.searchRouting = routing;
return this;
}
/**
* Returns the index routing value associated with the alias
*/
public String indexRouting() {
return indexRouting;
}
/**
* Associates an index routing value to the alias
*/
public Alias indexRouting(String indexRouting) {
this.indexRouting = indexRouting;
return this;
}
/**
* Returns the search routing value associated with the alias
*/
public String searchRouting() {
return searchRouting;
}
/**
* Associates a search routing value to the alias
*/
public Alias searchRouting(String searchRouting) {
this.searchRouting = searchRouting;
return this;
}
/**
* @return the write index flag for the alias
*/
public Boolean writeIndex() {
return writeIndex;
}
/**
* Sets whether an alias is pointing to a write-index
*/
public Alias writeIndex(@Nullable Boolean writeIndex) {
this.writeIndex = writeIndex;
return this;
}
/**
* @return whether this alias is hidden or not
*/
public Boolean isHidden() {
return isHidden;
}
/**
* Sets whether this alias is hidden
*/
public Alias isHidden(@Nullable Boolean isHidden) {
this.isHidden = isHidden;
return this;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeOptionalString(filter);
out.writeOptionalString(indexRouting);
out.writeOptionalString(searchRouting);
out.writeOptionalBoolean(writeIndex);
out.writeOptionalBoolean(isHidden);
}
/**
* Parses an alias and returns its parsed representation
*/
public static Alias fromXContent(XContentParser parser) throws IOException {
Alias alias = new Alias(parser.currentName());
String currentFieldName = null;
XContentParser.Token token = parser.nextToken();
if (token == null) {
throw new IllegalArgumentException("No alias is specified");
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
// check if there are any unknown fields
if (KNOWN_FIELDS.contains(currentFieldName) == false) {
throw new IllegalArgumentException("Unknown field [" + currentFieldName + "] in alias [" + alias.name + "]");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (FILTER.match(currentFieldName, parser.getDeprecationHandler())) {
Map<String, Object> filter = parser.mapOrdered();
alias.filter(filter);
}
} else if (token == XContentParser.Token.VALUE_STRING) {
if (ROUTING.match(currentFieldName, parser.getDeprecationHandler())) {
alias.routing(parser.text());
} else if (INDEX_ROUTING.match(currentFieldName, parser.getDeprecationHandler())) {
alias.indexRouting(parser.text());
} else if (SEARCH_ROUTING.match(currentFieldName, parser.getDeprecationHandler())) {
alias.searchRouting(parser.text());
} else {
throw new IllegalArgumentException(
"Unsupported String type value ["
+ parser.text()
+ "] for field ["
+ currentFieldName
+ "] in alias ["
+ alias.name
+ "]"
);
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if (IS_WRITE_INDEX.match(currentFieldName, parser.getDeprecationHandler())) {
alias.writeIndex(parser.booleanValue());
} else if (IS_HIDDEN.match(currentFieldName, parser.getDeprecationHandler())) {
alias.isHidden(parser.booleanValue());
} else {
throw new IllegalArgumentException(
"Unsupported boolean type value ["
+ parser.text()
+ "] for field ["
+ currentFieldName
+ "] in alias ["
+ alias.name
+ "]"
);
}
} else {
throw new IllegalArgumentException("Unknown token [" + token + "] in alias [" + alias.name + "]");
}
}
return alias;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
if (filter != null) {
try (InputStream stream = new BytesArray(filter).streamInput()) {
builder.rawField(FILTER.getPreferredName(), stream, XContentType.JSON);
}
}
if (indexRouting != null && indexRouting.equals(searchRouting)) {
builder.field(ROUTING.getPreferredName(), indexRouting);
} else {
if (indexRouting != null) {
builder.field(INDEX_ROUTING.getPreferredName(), indexRouting);
}
if (searchRouting != null) {
builder.field(SEARCH_ROUTING.getPreferredName(), searchRouting);
}
}
if (writeIndex != null) {
builder.field(IS_WRITE_INDEX.getPreferredName(), writeIndex);
}
if (isHidden != null) {
builder.field(IS_HIDDEN.getPreferredName(), isHidden);
}
builder.endObject();
return builder;
}
@Override
public String toString() {
return Strings.toString(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Alias alias = (Alias) o;
return Objects.equals(name, alias.name);
}
@Override
public int hashCode() {
return name != null ? name.hashCode() : 0;
}
}
| Alias |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java | {
"start": 2326,
"end": 8111
} | class ____ extends HtmlBlock {
final AppContext appContext;
@Inject JobBlock(AppContext appctx) {
appContext = appctx;
}
@Override protected void render(Block html) {
String jid = $(JOB_ID);
if (jid.isEmpty()) {
html.
p().__("Sorry, can't do anything without a JobID.").__();
return;
}
JobId jobID = MRApps.toJobID(jid);
Job job = appContext.getJob(jobID);
if (job == null) {
html.
p().__("Sorry, ", jid, " not found.").__();
return;
}
List<AMInfo> amInfos = job.getAMInfos();
String amString =
amInfos.size() == 1 ? "ApplicationMaster" : "ApplicationMasters";
JobInfo jinfo = new JobInfo(job, true);
info("Job Overview").
__("Job Name:", jinfo.getName()).
__("User Name:", jinfo.getUserName()).
__("Queue Name:", jinfo.getQueueName()).
__("State:", jinfo.getState()).
__("Uberized:", jinfo.isUberized()).
__("Started:", new Date(jinfo.getStartTime())).
__("Elapsed:", StringUtils.formatTime(jinfo.getElapsedTime()));
DIV<Hamlet> div = html.
__(InfoBlock.class).
div(_INFO_WRAP);
// MRAppMasters Table
TABLE<DIV<Hamlet>> table = div.table("#job");
table.
tr().
th(amString).
__().
tr().
th(_TH, "Attempt Number").
th(_TH, "Start Time").
th(_TH, "Node").
th(_TH, "Logs").
__();
for (AMInfo amInfo : amInfos) {
AMAttemptInfo attempt = new AMAttemptInfo(amInfo,
jinfo.getId(), jinfo.getUserName());
table.tr().
td(String.valueOf(attempt.getAttemptId())).
td(new Date(attempt.getStartTime()).toString()).
td().a(".nodelink", url(MRWebAppUtil.getYARNWebappScheme(),
attempt.getNodeHttpAddress()),
attempt.getNodeHttpAddress()).__().
td().a(".logslink", url(attempt.getLogsLink()),
"logs").__().
__();
}
table.__();
div.__();
html.div(_INFO_WRAP).
// Tasks table
table("#job").
tr().
th(_TH, "Task Type").
th(_TH, "Progress").
th(_TH, "Total").
th(_TH, "Pending").
th(_TH, "Running").
th(_TH, "Complete").__().
tr(_ODD).
th("Map").
td().
div(_PROGRESSBAR).
$title(join(jinfo.getMapProgressPercent(), '%')). // tooltip
div(_PROGRESSBAR_VALUE).
$style(join("width:", jinfo.getMapProgressPercent(), '%')).__().__().__().
td().a(url("tasks", jid, "m", "ALL"), String.valueOf(jinfo.getMapsTotal())).__().
td().a(url("tasks", jid, "m", "PENDING"), String.valueOf(jinfo.getMapsPending())).__().
td().a(url("tasks", jid, "m", "RUNNING"), String.valueOf(jinfo.getMapsRunning())).__().
td().a(url("tasks", jid, "m", "COMPLETED"), String.valueOf(jinfo.getMapsCompleted())).__().__().
tr(_EVEN).
th("Reduce").
td().
div(_PROGRESSBAR).
$title(join(jinfo.getReduceProgressPercent(), '%')). // tooltip
div(_PROGRESSBAR_VALUE).
$style(join("width:", jinfo.getReduceProgressPercent(), '%')).__().__().__().
td().a(url("tasks", jid, "r", "ALL"), String.valueOf(jinfo.getReducesTotal())).__().
td().a(url("tasks", jid, "r", "PENDING"), String.valueOf(jinfo.getReducesPending())).__().
td().a(url("tasks", jid, "r", "RUNNING"), String.valueOf(jinfo.getReducesRunning())).__().
td().a(url("tasks", jid, "r", "COMPLETED"), String.valueOf(jinfo.getReducesCompleted())).__().__()
.__().
// Attempts table
table("#job").
tr().
th(_TH, "Attempt Type").
th(_TH, "New").
th(_TH, "Running").
th(_TH, "Failed").
th(_TH, "Killed").
th(_TH, "Successful").__().
tr(_ODD).
th("Maps").
td().a(url("attempts", jid, "m",
TaskAttemptStateUI.NEW.toString()),
String.valueOf(jinfo.getNewMapAttempts())).__().
td().a(url("attempts", jid, "m",
TaskAttemptStateUI.RUNNING.toString()),
String.valueOf(jinfo.getRunningMapAttempts())).__().
td().a(url("attempts", jid, "m",
TaskAttemptStateUI.FAILED.toString()),
String.valueOf(jinfo.getFailedMapAttempts())).__().
td().a(url("attempts", jid, "m",
TaskAttemptStateUI.KILLED.toString()),
String.valueOf(jinfo.getKilledMapAttempts())).__().
td().a(url("attempts", jid, "m",
TaskAttemptStateUI.SUCCESSFUL.toString()),
String.valueOf(jinfo.getSuccessfulMapAttempts())).__().
__().
tr(_EVEN).
th("Reduces").
td().a(url("attempts", jid, "r",
TaskAttemptStateUI.NEW.toString()),
String.valueOf(jinfo.getNewReduceAttempts())).__().
td().a(url("attempts", jid, "r",
TaskAttemptStateUI.RUNNING.toString()),
String.valueOf(jinfo.getRunningReduceAttempts())).__().
td().a(url("attempts", jid, "r",
TaskAttemptStateUI.FAILED.toString()),
String.valueOf(jinfo.getFailedReduceAttempts())).__().
td().a(url("attempts", jid, "r",
TaskAttemptStateUI.KILLED.toString()),
String.valueOf(jinfo.getKilledReduceAttempts())).__().
td().a(url("attempts", jid, "r",
TaskAttemptStateUI.SUCCESSFUL.toString()),
String.valueOf(jinfo.getSuccessfulReduceAttempts())).__().
__().
__().
__();
}
}
| JobBlock |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-jackson/deployment/src/test/java/io/quarkus/resteasy/reactive/jackson/deployment/test/streams/Demands.java | {
"start": 102,
"end": 278
} | class ____ {
public List<Long> demands;
public Demands(List<Long> demands) {
this.demands = demands;
}
// for Jsonb
public Demands() {
}
}
| Demands |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/deser/list/ListStringFieldTest_dom.java | {
"start": 432,
"end": 5496
} | class ____ extends TestCase {
public void test_list() throws Exception {
String text = "{\"values\":[\"a\",null,\"b\",\"ab\\\\c\"]}";
Model model = JSON.parseObject(text, Model.class);
Assert.assertEquals(4, model.values.size());
Assert.assertEquals("a", model.values.get(0));
Assert.assertEquals(null, model.values.get(1));
Assert.assertEquals("b", model.values.get(2));
Assert.assertEquals("ab\\c", model.values.get(3));
}
public void test_null() throws Exception {
String text = "{\"values\":null}";
Model model = JSON.parseObject(text, Model.class);
Assert.assertNull(model.values);
}
public void test_empty() throws Exception {
String text = "{\"values\":[]}";
Model model = JSON.parseObject(text, Model.class);
Assert.assertEquals(0, model.values.size());
}
public void test_null_element() throws Exception {
String text = "{\"values\":[\"abc\",null]}";
Model model = JSON.parseObject(text, Model.class);
Assert.assertEquals(2, model.values.size());
Assert.assertEquals("abc", model.values.get(0));
Assert.assertEquals(null, model.values.get(1));
}
public void test_map_empty() throws Exception {
String text = "{\"model\":{\"values\":[]}}";
Map<String, Model> map = JSON.parseObject(text, new TypeReference<Map<String, Model>>() {
});
Model model = (Model) map.get("model");
Assert.assertEquals(0, model.values.size());
}
public void test_notMatch() throws Exception {
String text = "{\"value\":[]}";
Model model = JSON.parseObject(text, Model.class);
Assert.assertNull(model.values);
}
public void test_error() throws Exception {
String text = "{\"values\":[1";
Exception error = null;
try {
JSON.parseObject(text, Model.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_1() throws Exception {
String text = "{\"values\":[\"b\"[";
Exception error = null;
try {
JSON.parseObject(text, Model.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_2() throws Exception {
String text = "{\"model\":{\"values\":[][";
Exception error = null;
try {
JSON.parseObject(text, new TypeReference<Map<String, Model>>() {
});
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_3() throws Exception {
String text = "{\"model\":{\"values\":[]}[";
Exception error = null;
try {
JSON.parseObject(text, new TypeReference<Map<String, Model>>() {
});
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_4() throws Exception {
String text = "{\"model\":{\"values\":[\"aaa]}[";
Exception error = null;
try {
JSON.parseObject(text, new TypeReference<Map<String, Model>>() {
});
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_n() throws Exception {
String text = "{\"values\":[n";
Exception error = null;
try {
JSON.parseObject(text, Model.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_nu() throws Exception {
String text = "{\"values\":[nu";
Exception error = null;
try {
JSON.parseObject(text, Model.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_nul() throws Exception {
String text = "{\"values\":[nul";
Exception error = null;
try {
JSON.parseObject(text, Model.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_null() throws Exception {
String text = "{\"values\":[null";
Exception error = null;
try {
JSON.parseObject(text, Model.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_rbacket() throws Exception {
String text = "{\"values\":[null,]";
Exception error = null;
try {
JSON.parseObject(text, Model.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public static | ListStringFieldTest_dom |
java | alibaba__nacos | consistency/src/main/java/com/alibaba/nacos/consistency/cp/RequestProcessor4CP.java | {
"start": 977,
"end": 1357
} | class ____ extends RequestProcessor {
/**
* Discovery snapshot handler It is up to LogProcessor to decide which SnapshotOperate should be loaded and saved by
* itself.
*
* @return {@link List <SnapshotOperate>}
*/
public List<SnapshotOperation> loadSnapshotOperate() {
return Collections.emptyList();
}
}
| RequestProcessor4CP |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequestTests.java | {
"start": 1536,
"end": 14885
} | class ____ extends ESTestCase {
private static final String NODE_IDENTIFIERS_INCORRECTLY_SET_MSG = "You must set [node_names] or [node_ids] but not both";
public void testSerializationForNodeIdOrNodeName() throws IOException {
AddVotingConfigExclusionsRequest originalRequest = new AddVotingConfigExclusionsRequest(
TEST_REQUEST_TIMEOUT,
new String[] { "nodeId1", "nodeId2" },
Strings.EMPTY_ARRAY,
TimeValue.ZERO
);
AddVotingConfigExclusionsRequest deserialized = copyWriteable(
originalRequest,
writableRegistry(),
AddVotingConfigExclusionsRequest::new
);
assertThat(deserialized.getNodeIds(), equalTo(originalRequest.getNodeIds()));
assertThat(deserialized.getNodeNames(), equalTo(originalRequest.getNodeNames()));
assertThat(deserialized.getTimeout(), equalTo(originalRequest.getTimeout()));
originalRequest = new AddVotingConfigExclusionsRequest(TEST_REQUEST_TIMEOUT, "nodeName1", "nodeName2");
deserialized = copyWriteable(originalRequest, writableRegistry(), AddVotingConfigExclusionsRequest::new);
assertThat(deserialized.getNodeIds(), equalTo(originalRequest.getNodeIds()));
assertThat(deserialized.getNodeNames(), equalTo(originalRequest.getNodeNames()));
assertThat(deserialized.getTimeout(), equalTo(originalRequest.getTimeout()));
}
public void testResolve() {
final DiscoveryNode localNode = DiscoveryNodeUtils.builder("local")
.name("local")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final VotingConfigExclusion localNodeExclusion = new VotingConfigExclusion(localNode);
final DiscoveryNode otherNode1 = DiscoveryNodeUtils.builder("other1")
.name("other1")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final VotingConfigExclusion otherNode1Exclusion = new VotingConfigExclusion(otherNode1);
final DiscoveryNode otherNode2 = DiscoveryNodeUtils.builder("other2")
.name("other2")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final VotingConfigExclusion otherNode2Exclusion = new VotingConfigExclusion(otherNode2);
final DiscoveryNode otherDataNode = DiscoveryNodeUtils.builder("data").name("data").roles(emptySet()).build();
final ClusterState clusterState = ClusterState.builder(new ClusterName("cluster"))
.nodes(new Builder().add(localNode).add(otherNode1).add(otherNode2).add(otherDataNode).localNodeId(localNode.getId()))
.build();
assertThat(
new AddVotingConfigExclusionsRequest(TEST_REQUEST_TIMEOUT, "local", "other1", "other2").resolveVotingConfigExclusions(
clusterState
),
containsInAnyOrder(localNodeExclusion, otherNode1Exclusion, otherNode2Exclusion)
);
assertThat(
new AddVotingConfigExclusionsRequest(TEST_REQUEST_TIMEOUT, "local").resolveVotingConfigExclusions(clusterState),
contains(localNodeExclusion)
);
assertThat(
new AddVotingConfigExclusionsRequest(TEST_REQUEST_TIMEOUT, "other1", "other2").resolveVotingConfigExclusions(clusterState),
containsInAnyOrder(otherNode1Exclusion, otherNode2Exclusion)
);
assertThat(
new AddVotingConfigExclusionsRequest(
TEST_REQUEST_TIMEOUT,
Strings.EMPTY_ARRAY,
new String[] { "other1", "other2" },
TimeValue.ZERO
).resolveVotingConfigExclusions(clusterState),
containsInAnyOrder(otherNode1Exclusion, otherNode2Exclusion)
);
}
public void testResolveAllNodeIdentifiersNullOrEmpty() {
assertThat(
expectThrows(
IllegalArgumentException.class,
() -> new AddVotingConfigExclusionsRequest(TEST_REQUEST_TIMEOUT, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, TimeValue.ZERO)
).getMessage(),
equalTo(NODE_IDENTIFIERS_INCORRECTLY_SET_MSG)
);
}
public void testResolveMoreThanOneNodeIdentifiersSet() {
assertThat(
expectThrows(
IllegalArgumentException.class,
() -> new AddVotingConfigExclusionsRequest(
TEST_REQUEST_TIMEOUT,
new String[] { "nodeId" },
new String[] { "nodeName" },
TimeValue.ZERO
)
).getMessage(),
equalTo(NODE_IDENTIFIERS_INCORRECTLY_SET_MSG)
);
}
public void testResolveByNodeIds() {
final DiscoveryNode node1 = DiscoveryNodeUtils.builder("nodeId1")
.name("nodeName1")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final VotingConfigExclusion node1Exclusion = new VotingConfigExclusion(node1);
final DiscoveryNode node2 = DiscoveryNodeUtils.builder("nodeId2")
.name("nodeName2")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final VotingConfigExclusion node2Exclusion = new VotingConfigExclusion(node2);
final DiscoveryNode node3 = DiscoveryNodeUtils.builder("nodeId3")
.name("nodeName3")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final VotingConfigExclusion unresolvableVotingConfigExclusion = new VotingConfigExclusion(
"unresolvableNodeId",
VotingConfigExclusion.MISSING_VALUE_MARKER
);
final ClusterState clusterState = ClusterState.builder(new ClusterName("cluster"))
.nodes(new Builder().add(node1).add(node2).add(node3).localNodeId(node1.getId()))
.build();
assertThat(
new AddVotingConfigExclusionsRequest(
TEST_REQUEST_TIMEOUT,
new String[] { "nodeId1", "nodeId2" },
Strings.EMPTY_ARRAY,
TimeValue.ZERO
).resolveVotingConfigExclusions(clusterState),
containsInAnyOrder(node1Exclusion, node2Exclusion)
);
assertThat(
new AddVotingConfigExclusionsRequest(
TEST_REQUEST_TIMEOUT,
new String[] { "nodeId1", "unresolvableNodeId" },
Strings.EMPTY_ARRAY,
TimeValue.ZERO
).resolveVotingConfigExclusions(clusterState),
containsInAnyOrder(node1Exclusion, unresolvableVotingConfigExclusion)
);
}
public void testResolveByNodeNames() {
final DiscoveryNode node1 = DiscoveryNodeUtils.builder("nodeId1")
.name("nodeName1")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final VotingConfigExclusion node1Exclusion = new VotingConfigExclusion(node1);
final DiscoveryNode node2 = DiscoveryNodeUtils.builder("nodeId2")
.name("nodeName2")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final VotingConfigExclusion node2Exclusion = new VotingConfigExclusion(node2);
final DiscoveryNode node3 = DiscoveryNodeUtils.builder("nodeId3")
.name("nodeName3")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final VotingConfigExclusion unresolvableVotingConfigExclusion = new VotingConfigExclusion(
VotingConfigExclusion.MISSING_VALUE_MARKER,
"unresolvableNodeName"
);
final ClusterState clusterState = ClusterState.builder(new ClusterName("cluster"))
.nodes(new Builder().add(node1).add(node2).add(node3).localNodeId(node1.getId()))
.build();
assertThat(
new AddVotingConfigExclusionsRequest(TEST_REQUEST_TIMEOUT, "nodeName1", "nodeName2").resolveVotingConfigExclusions(
clusterState
),
containsInAnyOrder(node1Exclusion, node2Exclusion)
);
assertThat(
new AddVotingConfigExclusionsRequest(TEST_REQUEST_TIMEOUT, "nodeName1", "unresolvableNodeName").resolveVotingConfigExclusions(
clusterState
),
containsInAnyOrder(node1Exclusion, unresolvableVotingConfigExclusion)
);
}
public void testResolveAmbiguousName() {
final DiscoveryNode node1 = DiscoveryNodeUtils.builder("nodeId1")
.name("ambiguous-name")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final DiscoveryNode node2 = DiscoveryNodeUtils.builder("nodeId2")
.name("ambiguous-name")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final ClusterState clusterState = ClusterState.builder(new ClusterName("cluster"))
.nodes(new Builder().add(node1).add(node2).localNodeId(node1.getId()))
.build();
final var request = new AddVotingConfigExclusionsRequest(TEST_REQUEST_TIMEOUT, "ambiguous-name");
assertThat(
expectThrows(IllegalArgumentException.class, () -> request.resolveVotingConfigExclusions(clusterState)).getMessage(),
allOf(
containsString("node name [ambiguous-name] is ambiguous"),
containsString(node1.descriptionWithoutAttributes()),
containsString(node2.descriptionWithoutAttributes())
)
);
}
public void testResolveRemoveExistingVotingConfigExclusions() {
final DiscoveryNode node1 = DiscoveryNodeUtils.builder("nodeId1")
.name("nodeName1")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final DiscoveryNode node2 = DiscoveryNodeUtils.builder("nodeId2")
.name("nodeName2")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final VotingConfigExclusion node2Exclusion = new VotingConfigExclusion(node2);
final DiscoveryNode node3 = DiscoveryNodeUtils.builder("nodeId3")
.name("nodeName3")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final VotingConfigExclusion existingVotingConfigExclusion = new VotingConfigExclusion(node1);
Metadata metadata = Metadata.builder()
.coordinationMetadata(CoordinationMetadata.builder().addVotingConfigExclusion(existingVotingConfigExclusion).build())
.build();
final ClusterState clusterState = ClusterState.builder(new ClusterName("cluster"))
.metadata(metadata)
.nodes(new Builder().add(node1).add(node2).add(node3).localNodeId(node1.getId()))
.build();
assertThat(
new AddVotingConfigExclusionsRequest(
TEST_REQUEST_TIMEOUT,
new String[] { "nodeId1", "nodeId2" },
Strings.EMPTY_ARRAY,
TimeValue.ZERO
).resolveVotingConfigExclusions(clusterState),
contains(node2Exclusion)
);
}
public void testResolveAndCheckMaximum() {
final DiscoveryNode localNode = DiscoveryNodeUtils.builder("local")
.name("local")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final VotingConfigExclusion localNodeExclusion = new VotingConfigExclusion(localNode);
final DiscoveryNode otherNode1 = DiscoveryNodeUtils.builder("other1")
.name("other1")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final VotingConfigExclusion otherNode1Exclusion = new VotingConfigExclusion(otherNode1);
final DiscoveryNode otherNode2 = DiscoveryNodeUtils.builder("other2")
.name("other2")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE))
.build();
final ClusterState.Builder builder = ClusterState.builder(new ClusterName("cluster"))
.nodes(new Builder().add(localNode).add(otherNode1).add(otherNode2).localNodeId(localNode.getId()));
builder.metadata(
Metadata.builder().coordinationMetadata(CoordinationMetadata.builder().addVotingConfigExclusion(otherNode1Exclusion).build())
);
final ClusterState clusterState = builder.build();
assertThat(
new AddVotingConfigExclusionsRequest(TEST_REQUEST_TIMEOUT, "local").resolveVotingConfigExclusionsAndCheckMaximum(
clusterState,
2,
"setting.name"
),
contains(localNodeExclusion)
);
assertThat(
expectThrows(
IllegalArgumentException.class,
() -> new AddVotingConfigExclusionsRequest(TEST_REQUEST_TIMEOUT, "local").resolveVotingConfigExclusionsAndCheckMaximum(
clusterState,
1,
"setting.name"
)
).getMessage(),
equalTo(
"add voting config exclusions request for nodes named [local] would add [1] exclusions to the existing [1] which "
+ "would exceed the maximum of [1] set by [setting.name]"
)
);
}
}
| AddVotingConfigExclusionsRequestTests |
java | dropwizard__dropwizard | dropwizard-jersey/src/main/java/io/dropwizard/jersey/jsr310/LocalDateTimeParam.java | {
"start": 326,
"end": 629
} | class ____ extends AbstractParam<LocalDateTime> {
public LocalDateTimeParam(@Nullable final String input) {
super(input);
}
@Override
protected LocalDateTime parse(@Nullable final String input) throws Exception {
return LocalDateTime.parse(input);
}
}
| LocalDateTimeParam |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/util/WhatWgUrlParser.java | {
"start": 94144,
"end": 96129
} | class ____ implements Path {
private @Nullable StringBuilder builder = null;
@Nullable String segment;
PathSegment(String segment) {
this.segment = segment;
}
PathSegment(int codePoint) {
append(codePoint);
}
public String segment() {
String result = this.segment;
if (result == null) {
Assert.state(this.builder != null, "String nor StringBuilder available");
result = this.builder.toString();
this.segment = result;
}
return result;
}
@Override
public void append(int codePoint) {
this.segment = null;
if (this.builder == null) {
this.builder = new StringBuilder(2);
}
this.builder.appendCodePoint(codePoint);
}
@Override
public void append(String s) {
this.segment = null;
if (this.builder == null) {
this.builder = new StringBuilder(s);
}
else {
this.builder.append(s);
}
}
@Override
public String name() {
String name = segment();
if (name.startsWith("/")) {
name = name.substring(1);
}
return name;
}
@Override
public boolean isEmpty() {
if (this.segment != null) {
return this.segment.isEmpty();
}
else {
Assert.state(this.builder != null, "String nor StringBuilder available");
return this.builder.isEmpty();
}
}
@Override
public void shorten(String scheme) {
throw new IllegalStateException("Opaque path not expected");
}
@Override
public boolean isOpaque() {
return true;
}
@SuppressWarnings("MethodDoesntCallSuperMethod")
@Override
public Path clone() {
return new PathSegment(segment());
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
else if (o instanceof PathSegment other) {
return segment().equals(other.segment());
}
else {
return false;
}
}
@Override
public int hashCode() {
return segment().hashCode();
}
@Override
public String toString() {
return segment();
}
}
static final | PathSegment |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/introspect/TestPropertyRename.java | {
"start": 738,
"end": 1079
} | class ____ {
@JsonProperty("a")
private int a;
public Bean323WithExplicitCleave1(@JsonProperty("a") final int a ) {
this.a = a;
}
@JsonProperty("b")
private int getA () {
return a;
}
}
@JsonPropertyOrder({ "a","b" })
static | Bean323WithExplicitCleave1 |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/SpringApplicationAotProcessorTests.java | {
"start": 5941,
"end": 6167
} | class ____ {
public static void main() {
invoker.invoke(null, () -> SpringApplication.run(PublicParameterlessMainMethod.class));
}
}
@Configuration(proxyBeanMethods = false)
public static | PublicParameterlessMainMethod |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/docs/propagation/MdcService.java | {
"start": 364,
"end": 1109
} | class ____ {
// tag::createUser[]
public String createUser(String name) {
try {
UUID newUserId = UUID.randomUUID();
MDC.put("userId", newUserId.toString());
try (PropagatedContext.Scope ignore = PropagatedContext.getOrEmpty().plus(new MdcPropagationContext()).propagate()) {
return createUserInternal(newUserId, name);
}
} finally {
MDC.remove("userId");
}
}
// end::createUser[]
private String createUserInternal(UUID id, String name) {
if (MDC.get("userId") == null) {
throw new IllegalStateException("Missing userId");
}
return "New user id: " + id + " name: " + name;
}
}
| MdcService |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java | {
"start": 1257,
"end": 4623
} | class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ToBase64Evaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator field;
private final BytesRefBuilder oScratch;
private final DriverContext driverContext;
private Warnings warnings;
public ToBase64Evaluator(Source source, EvalOperator.ExpressionEvaluator field,
BytesRefBuilder oScratch, DriverContext driverContext) {
this.source = source;
this.field = field;
this.oScratch = oScratch;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (BytesRefBlock fieldBlock = (BytesRefBlock) field.eval(page)) {
BytesRefVector fieldVector = fieldBlock.asVector();
if (fieldVector == null) {
return eval(page.getPositionCount(), fieldBlock);
}
return eval(page.getPositionCount(), fieldVector);
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += field.baseRamBytesUsed();
return baseRamBytesUsed;
}
public BytesRefBlock eval(int positionCount, BytesRefBlock fieldBlock) {
try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) {
BytesRef fieldScratch = new BytesRef();
position: for (int p = 0; p < positionCount; p++) {
switch (fieldBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
BytesRef field = fieldBlock.getBytesRef(fieldBlock.getFirstValueIndex(p), fieldScratch);
try {
result.appendBytesRef(ToBase64.process(field, this.oScratch));
} catch (ArithmeticException e) {
warnings().registerException(e);
result.appendNull();
}
}
return result.build();
}
}
public BytesRefBlock eval(int positionCount, BytesRefVector fieldVector) {
try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) {
BytesRef fieldScratch = new BytesRef();
position: for (int p = 0; p < positionCount; p++) {
BytesRef field = fieldVector.getBytesRef(p, fieldScratch);
try {
result.appendBytesRef(ToBase64.process(field, this.oScratch));
} catch (ArithmeticException e) {
warnings().registerException(e);
result.appendNull();
}
}
return result.build();
}
}
@Override
public String toString() {
return "ToBase64Evaluator[" + "field=" + field + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(field);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static | ToBase64Evaluator |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/aot/nativex/substitution/Target_ClassFinder.java | {
"start": 1131,
"end": 1286
} | class ____ {
@Alias
public static Class<?> findClass(String name, ClassLoader loader) throws ClassNotFoundException {
return null;
}
}
| Target_ClassFinder |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/deser/ThrowableDeserializerTest.java | {
"start": 2234,
"end": 2365
} | class ____ extends Exception {
public MyException2() {
throw new RuntimeException();
}
}
}
| MyException2 |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/result_handler_type/DefaultResultHandlerTypeTest.java | {
"start": 1109,
"end": 3296
} | class ____ {
@Test
void selectList() throws Exception {
String xmlConfig = "org/apache/ibatis/submitted/result_handler_type/MapperConfig.xml";
SqlSessionFactory sqlSessionFactory = getSqlSessionFactoryXmlConfig(xmlConfig);
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
List<Person> list = sqlSession
.selectList("org.apache.ibatis.submitted.result_handler_type.PersonMapper.doSelect");
assertEquals(2, list.size());
assertEquals("java.util.LinkedList", list.getClass().getCanonicalName());
}
}
@Test
void selectMap() throws Exception {
String xmlConfig = "org/apache/ibatis/submitted/result_handler_type/MapperConfig.xml";
SqlSessionFactory sqlSessionFactory = getSqlSessionFactoryXmlConfig(xmlConfig);
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Map<Integer, Person> map = sqlSession
.selectMap("org.apache.ibatis.submitted.result_handler_type.PersonMapper.doSelect", "id");
assertEquals(2, map.size());
assertEquals("java.util.LinkedHashMap", map.getClass().getCanonicalName());
}
}
@Test
void selectMapAnnotation() throws Exception {
String xmlConfig = "org/apache/ibatis/submitted/result_handler_type/MapperConfig.xml";
SqlSessionFactory sqlSessionFactory = getSqlSessionFactoryXmlConfig(xmlConfig);
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
PersonMapper mapper = sqlSession.getMapper(PersonMapper.class);
Map<Integer, Person> map = mapper.selectAsMap();
assertEquals(2, map.size());
assertEquals("java.util.LinkedHashMap", map.getClass().getCanonicalName());
}
}
private SqlSessionFactory getSqlSessionFactoryXmlConfig(String resource) throws Exception {
try (Reader configReader = Resources.getResourceAsReader(resource)) {
SqlSessionFactory sqlSessionFactory = new SqlSessionFactoryBuilder().build(configReader);
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/result_handler_type/CreateDB.sql");
return sqlSessionFactory;
}
}
}
| DefaultResultHandlerTypeTest |
java | apache__rocketmq | remoting/src/test/java/org/apache/rocketmq/remoting/protocol/body/SubscriptionGroupWrapperTest.java | {
"start": 1201,
"end": 2590
} | class ____ {
@Test
public void testFromJson() {
SubscriptionGroupWrapper subscriptionGroupWrapper = new SubscriptionGroupWrapper();
ConcurrentHashMap<String, SubscriptionGroupConfig> subscriptions = new ConcurrentHashMap<>();
SubscriptionGroupConfig subscriptionGroupConfig = new SubscriptionGroupConfig();
subscriptionGroupConfig.setConsumeBroadcastEnable(true);
subscriptionGroupConfig.setBrokerId(1234);
subscriptionGroupConfig.setGroupName("Consumer-group-one");
subscriptions.put("Consumer-group-one", subscriptionGroupConfig);
subscriptionGroupWrapper.setSubscriptionGroupTable(subscriptions);
DataVersion dataVersion = new DataVersion();
dataVersion.nextVersion();
subscriptionGroupWrapper.setDataVersion(dataVersion);
String json = RemotingSerializable.toJson(subscriptionGroupWrapper, true);
SubscriptionGroupWrapper fromJson = RemotingSerializable.fromJson(json, SubscriptionGroupWrapper.class);
assertThat(fromJson.getSubscriptionGroupTable()).containsKey("Consumer-group-one");
assertThat(fromJson.getSubscriptionGroupTable().get("Consumer-group-one").getGroupName()).isEqualTo("Consumer-group-one");
assertThat(fromJson.getSubscriptionGroupTable().get("Consumer-group-one").getBrokerId()).isEqualTo(1234);
}
}
| SubscriptionGroupWrapperTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/EqualsBrokenForNullTest.java | {
"start": 9230,
"end": 9463
} | class ____ {
@Override
public boolean equals(Object obj) {
if (obj != null && !getClass().equals(obj.getClass())) {
return false;
}
return true;
}
}
private | NullCheckAndObjectGetClassArgToEquals |
java | elastic__elasticsearch | x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportPostSecretAction.java | {
"start": 1004,
"end": 1994
} | class ____ extends HandledTransportAction<PostSecretRequest, PostSecretResponse> {
private final Client client;
@Inject
public TransportPostSecretAction(TransportService transportService, ActionFilters actionFilters, Client client) {
super(PostSecretAction.NAME, transportService, actionFilters, PostSecretRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE);
this.client = new OriginSettingClient(client, FLEET_ORIGIN);
}
protected void doExecute(Task task, PostSecretRequest request, ActionListener<PostSecretResponse> listener) {
try {
client.prepareIndex(FLEET_SECRETS_INDEX_NAME)
.setSource(request.toXContent(jsonBuilder()))
.execute(
listener.delegateFailureAndWrap((l, indexResponse) -> l.onResponse(new PostSecretResponse(indexResponse.getId())))
);
} catch (Exception e) {
listener.onFailure(e);
}
}
}
| TransportPostSecretAction |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/connector/source/ExternallyInducedSourceReader.java | {
"start": 1019,
"end": 2080
} | interface ____ checkpoints when receiving a trigger message from the
* checkpoint coordinator to the point when their input data/events indicate that a checkpoint
* should be triggered.
*
* <p>The ExternallyInducedSourceReader tells the Flink runtime that a checkpoint needs to be made
* by returning a checkpointId when {@link #shouldTriggerCheckpoint()} is invoked.
*
* <p>The implementations typically works together with the {@link SplitEnumerator} which informs
* the external system to trigger a checkpoint. The external system also needs to forward the
* Checkpoint ID to the source, so the source knows which checkpoint to trigger.
*
* <p><b>Important:</b> It is crucial that all parallel source tasks trigger their checkpoints at
* roughly the same time. Otherwise this leads to performance issues due to long checkpoint
* alignment phases or large alignment data snapshots.
*
* @param <T> The type of records produced by the source.
* @param <SplitT> The type of splits handled by the source.
*/
@Experimental
@PublicEvolving
public | delay |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.