language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/charsequence/CharSequenceAssert_isVisible_Test.java | {
"start": 1137,
"end": 2141
} | class ____ {
@ParameterizedTest
@ValueSource(strings = { "abc", "foo", "foo123", "!", "\"", "#", "$", "%", "&", "'", "(", ")", "*", "+", ",", "-", ".",
"/", ":", ";", "<", "=", ">", "?", "@", "[", "\\", "]", "^", "_", "`", "{", "|", "}", "~" })
void should_pass_when_actual_is_visible(CharSequence actual) {
assertThat(actual).isVisible();
}
@ParameterizedTest
@ValueSource(strings = { "\t", "\n", "½", "§", "©", "«abc»", " ", "" })
void should_fail_if_actual_is_not_visible(CharSequence actual) {
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).isVisible());
// THEN
then(assertionError).hasMessage(shouldBeVisible(actual).create());
}
@Test
void should_fail_when_actual_is_null() {
// GIVEN
CharSequence actual = null;
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).isVisible());
// THEN
then(assertionError).hasMessage(actualIsNull());
}
}
| CharSequenceAssert_isVisible_Test |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapReduceBase.java | {
"start": 1306,
"end": 1563
} | class ____ implements Closeable, JobConfigurable {
/** Default implementation that does nothing. */
public void close() throws IOException {
}
/** Default implementation that does nothing. */
public void configure(JobConf job) {
}
}
| MapReduceBase |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetAllResourceProfilesResponsePBImpl.java | {
"start": 1746,
"end": 1859
} | class ____ the GetAllResourceProfilesResponse.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public | for |
java | grpc__grpc-java | xds/src/test/java/io/grpc/xds/WrrLocalityLoadBalancerProviderTest.java | {
"start": 1345,
"end": 2672
} | class ____ {
@Test
public void provided() {
LoadBalancerProvider provider =
LoadBalancerRegistry.getDefaultRegistry().getProvider(
XdsLbPolicies.WRR_LOCALITY_POLICY_NAME);
assertThat(provider).isInstanceOf(WrrLocalityLoadBalancerProvider.class);
}
@Test
public void providesLoadBalancer() {
Helper helper = mock(Helper.class);
when(helper.getAuthority()).thenReturn("api.google.com");
LoadBalancerProvider provider = new WrrLocalityLoadBalancerProvider();
LoadBalancer loadBalancer = provider.newLoadBalancer(helper);
assertThat(loadBalancer).isInstanceOf(WrrLocalityLoadBalancer.class);
}
@Test
public void parseConfig() {
Map<String, ?> rawConfig = ImmutableMap.of("childPolicy",
ImmutableList.of(ImmutableMap.of("round_robin", ImmutableMap.of())));
WrrLocalityLoadBalancerProvider provider = new WrrLocalityLoadBalancerProvider();
NameResolver.ConfigOrError configOrError = provider.parseLoadBalancingPolicyConfig(rawConfig);
WrrLocalityConfig config = (WrrLocalityConfig) configOrError.getConfig();
LoadBalancerProvider childProvider =
GracefulSwitchLoadBalancerAccessor.getChildProvider(config.childConfig);
assertThat(childProvider.getPolicyName()).isEqualTo("round_robin");
}
}
| WrrLocalityLoadBalancerProviderTest |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/paths/PathsSimpleBaseTest.java | {
"start": 1231,
"end": 2346
} | class ____ {
protected static final AssertionInfo INFO = someInfo();
protected Path tempDir;
protected Paths paths;
protected Failures failures;
@BeforeEach
public void setUp(@TempDir Path tempDir) {
this.tempDir = tempDir;
failures = spy(Failures.instance());
paths = Paths.instance();
writeField(paths, "failures", failures);
}
Path createDirectory(Path parent, String name, String... files) {
Path directory = parent.resolve(name);
try {
java.nio.file.Files.createDirectory(directory);
stream(files).forEach(f -> createFile(directory, f));
} catch (IOException e) {
throw new UncheckedIOException("error during fixture directory creation", e);
}
return directory;
}
Path createDirectoryFromRoot(String... files) {
return createDirectory(tempDir, "root", files);
}
private void createFile(Path directory, String f) {
try {
java.nio.file.Files.createFile(directory.resolve(f));
} catch (IOException e) {
throw new UncheckedIOException("error during fixture file creation", e);
}
}
}
| PathsSimpleBaseTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/ConnectionsReleaseTest.java | {
"start": 1479,
"end": 2347
} | class ____ implements ServiceRegistryProducer {
@BeforeAll
static void beforeAll(DomainModelScope modelScope) {
modelScope.getDomainModel().orderColumns( false );
modelScope.getDomainModel().validate();
}
@Test
@Order(1)
public void testSchemaUpdateReleasesAllConnections(DomainModelScope modelScope) {
new SchemaUpdate().execute( EnumSet.of( TargetType.DATABASE ), modelScope.getDomainModel() );
assertThat( SharedDriverManagerConnectionProvider.getInstance().getOpenConnections(), is( 0 ) );
}
@Test
@Order(2)
public void testSchemaValidatorReleasesAllConnections(DomainModelScope modelScope) {
new SchemaValidator().validate( modelScope.getDomainModel() );
assertThat( SharedDriverManagerConnectionProvider.getInstance().getOpenConnections(), is( 0 ) );
}
@Entity(name = "Thing")
@Table(name = "Thing")
public static | ConnectionsReleaseTest |
java | mockito__mockito | mockito-integration-tests/inline-mocks-tests/src/test/java/org/mockitoinline/HierarchyPreInitializationTest.java | {
"start": 558,
"end": 629
} | interface ____ extends TestInterface {}
public static | TestSubInterface |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webmvc/src/main/java/org/springframework/cloud/gateway/server/mvc/common/AttributedArugmentSuppliedEvent.java | {
"start": 871,
"end": 1478
} | class ____<T> extends ApplicationEvent implements ArgumentSuppliedEvent<T> {
private final ArgumentSuppliedEvent<T> event;
private final Map<String, Object> attributes;
public AttributedArugmentSuppliedEvent(ArgumentSuppliedEvent<T> event, Map<String, Object> attributes) {
super(event.getSource());
this.event = event;
this.attributes = attributes;
}
@Override
public Class<T> getType() {
return event.getType();
}
@Override
public T getArgument() {
return event.getArgument();
}
public Map<String, Object> getAttributes() {
return attributes;
}
}
| AttributedArugmentSuppliedEvent |
java | quarkusio__quarkus | extensions/spring-data-jpa/deployment/src/test/java/io/quarkus/spring/data/deployment/MethodNameParserTest.java | {
"start": 892,
"end": 10788
} | class ____ {
private final Class<?> repositoryClass = PersonRepository.class;
private final Class<?> entityClass = Person.class;
private final Class[] additionalClasses = new Class[] { Person.Address.class, Person.Country.class };
@Test
public void testFindAllByAddressZipCode() throws Exception {
MethodNameParser.Result result = parseMethod(repositoryClass, "findAllByAddressZipCode", entityClass,
additionalClasses);
assertThat(result).isNotNull();
assertSameClass(result.getEntityClass(), entityClass);
assertThat(result.getParamCount()).isEqualTo(1);
assertThat(result.getQuery())
.isEqualTo("SELECT person FROM Person AS person LEFT JOIN person.address address WHERE address.zipCode = ?1");
}
@Test
public void testFindAllByNameAndOrder() throws Exception {
MethodNameParser.Result result = parseMethod(repositoryClass, "findAllByNameAndOrder", entityClass,
additionalClasses);
assertThat(result).isNotNull();
assertSameClass(result.getEntityClass(), entityClass);
assertThat(result.getParamCount()).isEqualTo(2);
assertThat(result.getQuery())
.isEqualTo("SELECT person FROM Person AS person WHERE name = ?1 AND order = ?2");
}
@Test
public void testFindAllByNameOrOrder() throws Exception {
MethodNameParser.Result result = parseMethod(repositoryClass, "findAllByNameOrOrder", entityClass,
additionalClasses);
assertThat(result).isNotNull();
assertSameClass(result.getEntityClass(), entityClass);
assertThat(result.getParamCount()).isEqualTo(2);
assertThat(result.getQuery())
.isEqualTo("SELECT person FROM Person AS person WHERE name = ?1 OR order = ?2");
}
@Test
public void testFindAllByAddressCountry() throws Exception {
MethodNameParser.Result result = parseMethod(repositoryClass, "findAllByAddressCountry", entityClass,
additionalClasses);
assertThat(result).isNotNull();
assertSameClass(result.getEntityClass(), entityClass);
assertThat(result.getQuery()).isEqualTo("SELECT person FROM Person AS person WHERE addressCountry = ?1");
assertThat(result.getParamCount()).isEqualTo(1);
}
@Test
public void findAllByNameOrAgeOrActive() throws Exception {
MethodNameParser.Result result = parseMethod(repositoryClass, "findAllByNameOrAgeOrActive", entityClass,
additionalClasses);
assertThat(result).isNotNull();
assertSameClass(result.getEntityClass(), entityClass);
assertThat(result.getQuery())
.isEqualTo("SELECT person FROM Person AS person WHERE name = ?1 OR age = ?2 OR active = ?3");
assertThat(result.getParamCount()).isEqualTo(3);
}
@Test
public void findAllByNameAndAgeOrActive() throws Exception {
MethodNameParser.Result result = parseMethod(repositoryClass, "findAllByNameAndAgeOrActive", entityClass,
additionalClasses);
assertThat(result).isNotNull();
assertSameClass(result.getEntityClass(), entityClass);
assertThat(result.getQuery())
.isEqualTo("SELECT person FROM Person AS person WHERE name = ?1 AND age = ?2 OR active = ?3");
assertThat(result.getParamCount()).isEqualTo(3);
}
@Test
public void findAllByNameAndAgeAndActive() throws Exception {
MethodNameParser.Result result = parseMethod(repositoryClass, "findAllByNameAndAgeAndActive", entityClass,
additionalClasses);
assertThat(result).isNotNull();
assertSameClass(result.getEntityClass(), entityClass);
assertThat(result.getQuery())
.isEqualTo("SELECT person FROM Person AS person WHERE name = ?1 AND age = ?2 AND active = ?3");
assertThat(result.getParamCount()).isEqualTo(3);
}
@Test
public void testFindAllByAddress_Country() throws Exception {
MethodNameParser.Result result = parseMethod(repositoryClass, "findAllByAddress_Country", entityClass,
additionalClasses);
assertThat(result).isNotNull();
assertSameClass(result.getEntityClass(), entityClass);
assertThat(result.getQuery())
.isEqualTo("SELECT person FROM Person AS person LEFT JOIN person.address address WHERE address.country = ?1");
assertThat(result.getParamCount()).isEqualTo(1);
}
@Test
public void testFindAllByAddressCountryIsoCode() throws Exception {
UnableToParseMethodException exception = assertThrows(UnableToParseMethodException.class,
() -> parseMethod(repositoryClass, "findAllByAddressCountryIsoCode", entityClass, additionalClasses));
assertThat(exception).hasMessageContaining("Person does not contain a field named: addressCountryIsoCode");
}
@Test
public void testFindAllByAddress_CountryIsoCode() throws Exception {
MethodNameParser.Result result = parseMethod(repositoryClass, "findAllByAddress_CountryIsoCode", entityClass,
additionalClasses);
assertThat(result).isNotNull();
assertSameClass(result.getEntityClass(), entityClass);
assertThat(result.getQuery())
.isEqualTo(
"SELECT person FROM Person AS person LEFT JOIN person.address address WHERE address.country.isoCode = ?1");
assertThat(result.getParamCount()).isEqualTo(1);
}
@Test
public void testFindAllByAddress_Country_IsoCode() throws Exception {
MethodNameParser.Result result = parseMethod(repositoryClass, "findAllByAddress_Country_IsoCode", entityClass,
additionalClasses);
assertThat(result).isNotNull();
assertSameClass(result.getEntityClass(), entityClass);
assertThat(result.getQuery())
.isEqualTo(
"SELECT person FROM Person AS person LEFT JOIN person.address address WHERE address.country.isoCode = ?1");
assertThat(result.getParamCount()).isEqualTo(1);
}
@Test
public void testFindAllByAddress_CountryInvalid() throws Exception {
UnableToParseMethodException exception = assertThrows(UnableToParseMethodException.class,
() -> parseMethod(repositoryClass, "findAllByAddress_CountryInvalid", entityClass, additionalClasses));
assertThat(exception).hasMessageContaining("Person does not contain a field named: address_CountryInvalid");
assertThat(exception).hasMessageContaining("Country.invalid");
}
@Test
public void testFindAllBy_() throws Exception {
UnableToParseMethodException exception = assertThrows(UnableToParseMethodException.class,
() -> parseMethod(repositoryClass, "findAllBy_", entityClass, additionalClasses));
assertThat(exception).hasMessageContaining("Person does not contain a field named: _");
}
@Test
public void testGenericsWithWildcard() throws Exception {
Class[] additionalClasses = new Class[] { ChildBase.class };
MethodNameParser.Result result = parseMethod(ParentBaseRepository.class, "countParentsByChildren_Nombre",
ParentBase.class,
additionalClasses);
assertThat(result).isNotNull();
assertSameClass(result.getEntityClass(), ParentBase.class);
assertThat(result.getQuery())
.isEqualTo(
"FROM ParentBase AS parentbase LEFT JOIN parentbase.children children WHERE children.nombre = ?1");
assertThat(result.getParamCount()).isEqualTo(1);
}
@Test
public void shouldParseRepositoryMethodOverEntityContainingACollection() throws Exception {
Class[] additionalClasses = new Class[] { LoginEvent.class };
MethodNameParser.Result result = parseMethod(UserRepository.class, "countUsersByLoginEvents_Id",
User.class,
additionalClasses);
assertThat(result).isNotNull();
assertSameClass(result.getEntityClass(), User.class);
assertThat(result.getParamCount()).isEqualTo(1);
assertThat(result.getQuery()).isEqualTo(
"FROM User AS user LEFT JOIN loginEvents loginEvents ON user.userId = loginEvents.user.userId WHERE loginEvents.id = ?1");
}
private AbstractStringAssert<?> assertSameClass(ClassInfo classInfo, Class<?> aClass) {
return assertThat(classInfo.name().toString()).isEqualTo(aClass.getName());
}
private MethodNameParser.Result parseMethod(Class<?> repositoryClass, String methodToParse,
Class<?> entityClass, Class<?>... additionalClasses) throws IOException {
IndexView indexView = index(ArrayUtils.addAll(additionalClasses, repositoryClass, entityClass));
DotName repository = DotName.createSimple(repositoryClass.getName());
DotName entity = DotName.createSimple(entityClass.getName());
ClassInfo entityClassInfo = indexView.getClassByName(entity);
ClassInfo repositoryClassInfo = indexView.getClassByName(repository);
MethodNameParser methodNameParser = new MethodNameParser(entityClassInfo, indexView);
MethodInfo repositoryMethod = repositoryClassInfo.firstMethod(methodToParse);
MethodNameParser.Result result = methodNameParser.parse(repositoryMethod);
return result;
}
public static Index index(Class<?>... classes) throws IOException {
Indexer indexer = new Indexer();
for (Class<?> clazz : classes) {
try (InputStream stream = MethodNameParserTest.class.getClassLoader()
.getResourceAsStream(fromClassNameToResourceName(clazz.getName()))) {
indexer.index(stream);
}
}
return indexer.complete();
}
}
| MethodNameParserTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/TransportDeleteAsyncResultAction.java | {
"start": 1297,
"end": 3577
} | class ____ extends HandledTransportAction<DeleteAsyncResultRequest, AcknowledgedResponse> {
public static final ActionType<AcknowledgedResponse> TYPE = new ActionType<>("indices:data/read/async_search/delete");
private final DeleteAsyncResultsService deleteResultsService;
private final ClusterService clusterService;
private final TransportService transportService;
@Inject
public TransportDeleteAsyncResultAction(
TransportService transportService,
ActionFilters actionFilters,
ClusterService clusterService,
NamedWriteableRegistry registry,
Client client,
ThreadPool threadPool,
BigArrays bigArrays
) {
super(TYPE.name(), transportService, actionFilters, DeleteAsyncResultRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE);
this.transportService = transportService;
this.clusterService = clusterService;
AsyncTaskIndexService<?> store = new AsyncTaskIndexService<>(
XPackPlugin.ASYNC_RESULTS_INDEX,
clusterService,
threadPool.getThreadContext(),
client,
ASYNC_SEARCH_ORIGIN,
(in) -> {
throw new UnsupportedOperationException("Reading is not supported during deletion");
},
registry,
bigArrays
);
this.deleteResultsService = new DeleteAsyncResultsService(store, transportService.getTaskManager());
}
@Override
protected void doExecute(Task task, DeleteAsyncResultRequest request, ActionListener<AcknowledgedResponse> listener) {
AsyncExecutionId searchId = AsyncExecutionId.decode(request.getId());
DiscoveryNode node = clusterService.state().nodes().get(searchId.getTaskId().getNodeId());
if (clusterService.localNode().getId().equals(searchId.getTaskId().getNodeId()) || node == null) {
deleteResultsService.deleteResponse(request, listener);
} else {
transportService.sendRequest(
node,
TYPE.name(),
request,
new ActionListenerResponseHandler<>(listener, AcknowledgedResponse::readFrom, EsExecutors.DIRECT_EXECUTOR_SERVICE)
);
}
}
}
| TransportDeleteAsyncResultAction |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/health/HealthCheckAware.java | {
"start": 847,
"end": 948
} | interface ____ represent an object which wishes to be injected with the {@link HealthCheck}
*/
public | to |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/apigenerator/CreateReactiveApi.java | {
"start": 1849,
"end": 9002
} | class ____ {
public static Set<String> KEEP_METHOD_RESULT_TYPE = LettuceSets.unmodifiableSet("digest", "close", "isOpen",
"BaseRedisCommands.reset", "getStatefulConnection", "setAutoFlushCommands", "flushCommands");
public static Set<String> FORCE_FLUX_RESULT = LettuceSets.unmodifiableSet("eval", "evalsha", "evalReadOnly",
"evalshaReadOnly", "fcall", "fcallReadOnly", "dispatch");
public static Set<String> VALUE_WRAP = LettuceSets.unmodifiableSet("geopos", "bitfield");
private static final Map<String, String> RESULT_SPEC;
static {
Map<String, String> resultSpec = new HashMap<>();
resultSpec.put("geopos", "Flux<Value<GeoCoordinates>>");
resultSpec.put("aclCat()", "Mono<Set<AclCategory>>");
resultSpec.put("aclCat(AclCategory category)", "Mono<Set<CommandType>>");
resultSpec.put("aclGetuser", "Mono<List<Object>>");
resultSpec.put("bitfield", "Flux<Value<Long>>");
resultSpec.put("hgetall", "Flux<KeyValue<K, V>>");
resultSpec.put("zmscore", "Mono<List<Double>>"); // Redis returns null if element was not found
resultSpec.put("hgetall(KeyValueStreamingChannel<K, V> channel, K key)", "Mono<Long>");
RESULT_SPEC = resultSpec;
}
protected Consumer<MethodDeclaration> methodMutator() {
return method -> {
if (isStreamingChannelMethod(method)) {
if (!method.getAnnotationByClass(Deprecated.class).isPresent()) {
method.addAnnotation(new MarkerAnnotationExpr("Deprecated"));
}
}
if (method.getNameAsString().equals("dispatch")) {
Parameter output = method.getParameterByName("output").get();
output.setType("CommandOutput<K, V, ?>");
}
};
}
protected boolean isStreamingChannelMethod(MethodDeclaration method) {
return method.getParameters().stream().anyMatch(p -> p.getType().asString().contains("StreamingChannel"));
}
/**
* Mutate type comment.
*
* @return
*/
Function<String, String> commentMutator() {
return s -> s.replaceAll("\\$\\{intent\\}", "Reactive executed commands").replaceAll("@since 3.0", "@since 4.0")
+ "* @generated by " + getClass().getName() + "\r\n ";
}
BiFunction<MethodDeclaration, Comment, Comment> methodCommentMutator() {
return (method, comment) -> {
String commentText = comment != null ? comment.getContent() : null;
if (commentText != null) {
commentText = commentText.replaceAll("List<(.*)>", "$1").replaceAll("Set<(.*)>", "$1");
if (isStreamingChannelMethod(method)) {
commentText += "* @deprecated since 6.0 in favor of consuming large results through the {@link org.reactivestreams.Publisher} returned by {@link #"
+ method.getNameAsString() + "}.";
}
comment.setContent(commentText);
}
return comment;
};
}
/**
* Mutate type to async result.
*
* @return
*/
Function<MethodDeclaration, Type> methodTypeMutator() {
return method -> {
ClassOrInterfaceDeclaration declaringClass = (ClassOrInterfaceDeclaration) method.getParentNode().get();
String baseType = "Mono";
String typeArgument = method.getType().toString().trim();
String fixedResultType = getResultType(method, declaringClass);
if (fixedResultType != null) {
return new ClassOrInterfaceType(fixedResultType);
} else if (CompilationUnitFactory.contains(FORCE_FLUX_RESULT, method)) {
baseType = "Flux";
} else if (typeArgument.startsWith("List<")) {
baseType = "Flux";
typeArgument = typeArgument.substring(5, typeArgument.length() - 1);
} else if (typeArgument.startsWith("Set<")) {
baseType = "Flux";
typeArgument = typeArgument.substring(4, typeArgument.length() - 1);
} else {
baseType = "Mono";
}
if (fixedResultType == null && CompilationUnitFactory.contains(VALUE_WRAP, method)) {
typeArgument = String.format("Value<%s>", typeArgument);
}
return CompilationUnitFactory.createParametrizedType(baseType, typeArgument);
};
}
private String getResultType(MethodDeclaration method, ClassOrInterfaceDeclaration classOfMethod) {
String declaration = nameAndParameters(method);
if (RESULT_SPEC.containsKey(declaration)) {
return RESULT_SPEC.get(declaration);
}
if (RESULT_SPEC.containsKey(method.getNameAsString())) {
return RESULT_SPEC.get(method.getNameAsString());
}
String key = classOfMethod.getNameAsString() + "." + method.getNameAsString();
if (RESULT_SPEC.containsKey(key)) {
return RESULT_SPEC.get(key);
}
return null;
}
/**
* Supply additional imports.
*
* @return
*/
Supplier<List<String>> importSupplier() {
return () -> Arrays.asList("reactor.core.publisher.Flux", "reactor.core.publisher.Mono");
}
@ParameterizedTest
@MethodSource("arguments")
@Tag(API_GENERATOR)
void createInterface(String argument) throws Exception {
createFactory(argument).createInterface();
}
static List<String> arguments() {
return Arrays.asList(Constants.TEMPLATE_NAMES);
}
private CompilationUnitFactory createFactory(String templateName) {
String targetName = templateName.replace("Commands", "ReactiveCommands");
File templateFile = new File(Constants.TEMPLATES, "io/lettuce/core/api/" + templateName + ".java");
String targetPackage;
if (templateName.contains("RedisSentinel")) {
targetPackage = "io.lettuce.core.sentinel.api.reactive";
} else {
targetPackage = "io.lettuce.core.api.reactive";
}
CompilationUnitFactory factory = new CompilationUnitFactory(templateFile, Constants.SOURCES, targetPackage, targetName,
commentMutator(), methodTypeMutator(), methodMutator(), methodDeclaration -> true, importSupplier(), null,
methodCommentMutator());
factory.keepMethodSignaturesFor(KEEP_METHOD_RESULT_TYPE);
return factory;
}
static String nameAndParameters(MethodDeclaration method) {
StringBuilder sb = new StringBuilder();
sb.append(method.getName());
sb.append("(");
boolean firstParam = true;
for (Parameter param : method.getParameters()) {
if (firstParam) {
firstParam = false;
} else {
sb.append(", ");
}
sb.append(param.toString(new PrettyPrinterConfiguration().setPrintComments(false)));
}
sb.append(")");
return sb.toString();
}
}
| CreateReactiveApi |
java | spring-projects__spring-security | oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/jackson/OAuth2UserAuthorityMixin.java | {
"start": 1447,
"end": 1637
} | class ____ {
@JsonCreator
OAuth2UserAuthorityMixin(@JsonProperty("authority") String authority,
@JsonProperty("attributes") Map<String, Object> attributes) {
}
}
| OAuth2UserAuthorityMixin |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelAliasAction.java | {
"start": 1063,
"end": 2081
} | class ____ extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(
new Route(PUT, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}/model_aliases/{" + MODEL_ALIAS + "}")
);
}
@Override
public String getName() {
return "ml_put_trained_model_alias_action";
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
String modelAlias = restRequest.param(MODEL_ALIAS);
String modelId = restRequest.param(TrainedModelConfig.MODEL_ID.getPreferredName());
boolean reassign = restRequest.paramAsBoolean(PutTrainedModelAliasAction.Request.REASSIGN, false);
return channel -> client.execute(
PutTrainedModelAliasAction.INSTANCE,
new PutTrainedModelAliasAction.Request(modelAlias, modelId, reassign),
new RestToXContentListener<>(channel)
);
}
}
| RestPutTrainedModelAliasAction |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/search/query/GeoFilterRadius.java | {
"start": 735,
"end": 998
} | interface ____ {
/**
* Defines search within radius
*
* @param radius - radius in geo units
* @param geoUnit - geo unit
* @return search conditions object
*/
QueryFilter radius(double radius, GeoUnit geoUnit);
}
| GeoFilterRadius |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/util/OrderedParameterParser.java | {
"start": 251,
"end": 408
} | class ____ extends ParameterParser {
@Override
protected <K, V> Map<K, V> newMap() {
return new LinkedHashMap<>();
}
} | OrderedParameterParser |
java | spring-projects__spring-framework | spring-tx/src/test/java/org/springframework/transaction/event/TransactionalApplicationListenerAdapterTests.java | {
"start": 1096,
"end": 4235
} | class ____ {
@Test
void invokesCompletionCallbackOnSuccess() {
CapturingSynchronizationCallback callback = new CapturingSynchronizationCallback();
PayloadApplicationEvent<Object> event = new PayloadApplicationEvent<>(this, new Object());
TransactionalApplicationListener<PayloadApplicationEvent<Object>> adapter =
TransactionalApplicationListener.forPayload(p -> {});
adapter.addCallback(callback);
runInTransaction(() -> adapter.onApplicationEvent(event));
assertThat(callback.preEvent).isEqualTo(event);
assertThat(callback.postEvent).isEqualTo(event);
assertThat(callback.ex).isNull();
assertThat(adapter.getTransactionPhase()).isEqualTo(TransactionPhase.AFTER_COMMIT);
assertThat(adapter.getListenerId()).isEmpty();
}
@Test
void invokesExceptionHandlerOnException() {
CapturingSynchronizationCallback callback = new CapturingSynchronizationCallback();
PayloadApplicationEvent<String> event = new PayloadApplicationEvent<>(this, "event");
RuntimeException ex = new RuntimeException("event");
TransactionalApplicationListener<PayloadApplicationEvent<String>> adapter =
TransactionalApplicationListener.forPayload(
TransactionPhase.BEFORE_COMMIT, p -> {throw ex;});
adapter.addCallback(callback);
assertThatRuntimeException()
.isThrownBy(() -> runInTransaction(() -> adapter.onApplicationEvent(event)))
.withMessage("event");
assertThat(callback.preEvent).isEqualTo(event);
assertThat(callback.postEvent).isEqualTo(event);
assertThat(callback.ex).isEqualTo(ex);
assertThat(adapter.getTransactionPhase()).isEqualTo(TransactionPhase.BEFORE_COMMIT);
assertThat(adapter.getListenerId()).isEmpty();
}
@Test
void useSpecifiedIdentifier() {
CapturingSynchronizationCallback callback = new CapturingSynchronizationCallback();
PayloadApplicationEvent<String> event = new PayloadApplicationEvent<>(this, "event");
TransactionalApplicationListenerAdapter<PayloadApplicationEvent<String>> adapter =
new TransactionalApplicationListenerAdapter<>(e -> {});
adapter.setTransactionPhase(TransactionPhase.BEFORE_COMMIT);
adapter.setListenerId("identifier");
adapter.addCallback(callback);
runInTransaction(() -> adapter.onApplicationEvent(event));
assertThat(callback.preEvent).isEqualTo(event);
assertThat(callback.postEvent).isEqualTo(event);
assertThat(callback.ex).isNull();
assertThat(adapter.getTransactionPhase()).isEqualTo(TransactionPhase.BEFORE_COMMIT);
assertThat(adapter.getListenerId()).isEqualTo("identifier");
}
private static void runInTransaction(Runnable runnable) {
TransactionSynchronizationManager.setActualTransactionActive(true);
TransactionSynchronizationManager.initSynchronization();
try {
runnable.run();
TransactionSynchronizationManager.getSynchronizations().forEach(it -> {
it.beforeCommit(false);
it.afterCommit();
it.afterCompletion(TransactionSynchronization.STATUS_COMMITTED);
});
}
finally {
TransactionSynchronizationManager.clearSynchronization();
TransactionSynchronizationManager.setActualTransactionActive(false);
}
}
}
| TransactionalApplicationListenerAdapterTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/query/TupleNativeQueryTest.java | {
"start": 1096,
"end": 24644
} | class ____ {
@BeforeEach
public void setUp(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
User user = new User("Arnold");
entityManager.persist(user);
});
}
@AfterEach
public void tearDown(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
CriteriaDelete<User> delete = entityManager.getCriteriaBuilder().createCriteriaDelete(User.class);
delete.from(User.class);
entityManager.createQuery(delete).executeUpdate();
});
}
@Test
public void testPositionalGetterShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleResult(entityManager);
Tuple tuple = result.get(0);
assertEquals(1L, tuple.get(0));
assertEquals("Arnold", tuple.get(1));
});
}
@Test
public void testPositionalGetterWithClassShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleResult(entityManager);
Tuple tuple = result.get(0);
assertEquals(Long.valueOf(1L), tuple.get(0, Long.class));
assertEquals("Arnold", tuple.get(1, String.class));
});
}
@Test
public void testPositionalGetterShouldThrowExceptionWhenLessThanZeroGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getTupleResult(entityManager);
Tuple tuple = result.get(0);
tuple.get(-1);
}
);
});
}
@Test
public void testPositionalGetterShouldThrowExceptionWhenTupleSizePositionGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getTupleResult(entityManager);
Tuple tuple = result.get(0);
tuple.get(2);
}
);
});
}
@Test
public void testPositionalGetterShouldThrowExceptionWhenExceedingPositionGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getTupleResult(entityManager);
Tuple tuple = result.get(0);
tuple.get(3);
}
);
});
}
@Test
public void testAliasGetterWithoutExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleResult(entityManager);
Tuple tuple = result.get(0);
assertEquals(1L, tuple.get("ID"));
assertEquals("Arnold", tuple.get("FIRSTNAME"));
});
}
@Test
public void testAliasGetterShouldWorkWithoutExplicitAliasWhenLowerCaseAliasGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleResult(entityManager);
Tuple tuple = result.get(0);
tuple.get("id");
});
}
@Test
public void testAliasGetterShouldThrowExceptionWithoutExplicitAliasWhenWrongAliasGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getTupleResult(entityManager);
Tuple tuple = result.get(0);
tuple.get("e");
}
);
});
}
@Test
public void testAliasGetterWithClassWithoutExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleResult(entityManager);
Tuple tuple = result.get(0);
assertEquals(Long.valueOf(1L), tuple.get("ID", Long.class));
assertEquals("Arnold", tuple.get("FIRSTNAME", String.class));
});
}
@Test
public void testAliasGetterWithExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleAliasedResult(entityManager);
Tuple tuple = result.get(0);
assertEquals(1L, tuple.get("ALIAS1"));
assertEquals("Arnold", tuple.get("ALIAS2"));
});
}
@Test
public void testAliasGetterWithClassWithExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleAliasedResult(entityManager);
Tuple tuple = result.get(0);
assertEquals(Long.valueOf(1L), tuple.get("ALIAS1", Long.class));
assertEquals("Arnold", tuple.get("ALIAS2", String.class));
});
}
@Test
public void testToArrayShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> tuples = getTupleResult(entityManager);
Object[] result = tuples.get(0).toArray();
assertArrayEquals(new Object[]{1L, "Arnold"}, result);
});
}
@Test
public void testGetElementsShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> tuples = getTupleResult(entityManager);
List<TupleElement<?>> result = tuples.get(0).getElements();
assertEquals(2, result.size());
assertEquals(Long.class, result.get(0).getJavaType());
assertEquals("ID", result.get(0).getAlias());
assertEquals(String.class, result.get(1).getJavaType());
assertEquals("FIRSTNAME", result.get(1).getAlias());
});
}
@Test
public void testPositionalGetterWithNamedNativeQueryShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleNamedResult(entityManager, "standard");
Tuple tuple = result.get(0);
assertEquals(1L, tuple.get(0));
assertEquals("Arnold", tuple.get(1));
});
}
@Test
public void testPositionalGetterWithNamedNativeQueryWithClassShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleNamedResult(entityManager, "standard");
Tuple tuple = result.get(0);
assertEquals(Long.valueOf(1L), tuple.get(0, Long.class));
assertEquals("Arnold", tuple.get(1, String.class));
});
}
@Test
public void testPositionalGetterWithNamedNativeQueryShouldThrowExceptionWhenLessThanZeroGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getTupleNamedResult(entityManager, "standard");
Tuple tuple = result.get(0);
tuple.get(-1);
}
);
});
}
@Test
public void testPositionalGetterWithNamedNativeQueryShouldThrowExceptionWhenTupleSizePositionGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getTupleNamedResult(entityManager, "standard");
Tuple tuple = result.get(0);
tuple.get(2);
}
);
});
}
@Test
public void testPositionalGetterWithNamedNativeQueryShouldThrowExceptionWhenExceedingPositionGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getTupleNamedResult(entityManager, "standard");
Tuple tuple = result.get(0);
tuple.get(3);
}
);
});
}
@Test
public void testAliasGetterWithNamedNativeQueryWithoutExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleNamedResult(entityManager, "standard");
Tuple tuple = result.get(0);
assertEquals(1L, tuple.get("ID"));
assertEquals("Arnold", tuple.get("FIRSTNAME"));
});
}
@Test
public void testAliasGetterWithNamedNativeQueryShouldWorkWithoutExplicitAliasWhenLowerCaseAliasGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleNamedResult(entityManager, "standard");
Tuple tuple = result.get(0);
tuple.get("id");
});
}
@Test
public void testAliasGetterWithNamedNativeQueryShouldThrowExceptionWithoutExplicitAliasWhenWrongAliasGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getTupleNamedResult(entityManager, "standard");
Tuple tuple = result.get(0);
tuple.get("e");
}
);
});
}
@Test
public void testAliasGetterWithNamedNativeQueryWithClassWithoutExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleNamedResult(entityManager, "standard");
Tuple tuple = result.get(0);
assertEquals(Long.valueOf(1L), tuple.get("ID", Long.class));
assertEquals("Arnold", tuple.get("FIRSTNAME", String.class));
});
}
@Test
public void testAliasGetterWithNamedNativeQueryWithExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleNamedResult(entityManager, "standard_with_alias");
Tuple tuple = result.get(0);
assertEquals(1L, tuple.get("ALIAS1"));
assertEquals("Arnold", tuple.get("ALIAS2"));
});
}
@Test
public void testAliasGetterWithNamedNativeQueryWithClassWithExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleNamedResult(entityManager, "standard_with_alias");
Tuple tuple = result.get(0);
assertEquals(Long.valueOf(1L), tuple.get("ALIAS1", Long.class));
assertEquals("Arnold", tuple.get("ALIAS2", String.class));
});
}
@Test
public void testToArrayShouldWithNamedNativeQueryWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> tuples = getTupleNamedResult(entityManager, "standard");
Object[] result = tuples.get(0).toArray();
assertArrayEquals(new Object[]{1L, "Arnold"}, result);
});
}
@Test
public void testGetElementsWithNamedNativeQueryShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> tuples = getTupleNamedResult(entityManager, "standard");
List<TupleElement<?>> result = tuples.get(0).getElements();
assertEquals(2, result.size());
assertEquals(Long.class, result.get(0).getJavaType());
assertEquals("ID", result.get(0).getAlias());
assertEquals(String.class, result.get(1).getJavaType());
assertEquals("FIRSTNAME", result.get(1).getAlias());
});
}
@Test
public void testStreamedPositionalGetterShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getStreamedTupleResult(entityManager);
Tuple tuple = result.get(0);
assertEquals(1L, tuple.get(0));
assertEquals("Arnold", tuple.get(1));
});
}
@Test
public void testStreamedPositionalGetterWithClassShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getStreamedTupleResult(entityManager);
Tuple tuple = result.get(0);
assertEquals(Long.valueOf(1L), tuple.get(0, Long.class));
assertEquals("Arnold", tuple.get(1, String.class));
});
}
@Test
public void testStreamedPositionalGetterShouldThrowExceptionWhenLessThanZeroGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getStreamedTupleResult(entityManager);
Tuple tuple = result.get(0);
tuple.get(-1);
}
);
});
}
@Test
public void testStreamedPositionalGetterShouldThrowExceptionWhenTupleSizePositionGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getStreamedTupleResult(entityManager);
Tuple tuple = result.get(0);
tuple.get(2);
}
);
});
}
@Test
public void testStreamedPositionalGetterShouldThrowExceptionWhenExceedingPositionGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getStreamedTupleResult(entityManager);
Tuple tuple = result.get(0);
tuple.get(3);
}
);
});
}
@Test
public void testStreamedAliasGetterWithoutExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getStreamedTupleResult(entityManager);
Tuple tuple = result.get(0);
assertEquals(1L, tuple.get("ID"));
assertEquals("Arnold", tuple.get("FIRSTNAME"));
});
}
@Test
public void testStreamedAliasGetterShouldWorkWithoutExplicitAliasWhenLowerCaseAliasGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getStreamedTupleResult(entityManager);
Tuple tuple = result.get(0);
tuple.get("id");
});
}
@Test
public void testStreamedAliasGetterShouldThrowExceptionWithoutExplicitAliasWhenWrongAliasGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getStreamedTupleResult(entityManager);
Tuple tuple = result.get(0);
tuple.get("e");
}
);
});
}
@Test
public void testStreamedAliasGetterWithClassWithoutExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getStreamedTupleResult(entityManager);
Tuple tuple = result.get(0);
assertEquals(Long.valueOf(1L), tuple.get("ID", Long.class));
assertEquals("Arnold", tuple.get("FIRSTNAME", String.class));
});
}
@Test
public void testStreamedAliasGetterWithExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleAliasedResult(entityManager);
Tuple tuple = result.get(0);
assertEquals(1L, tuple.get("ALIAS1"));
assertEquals("Arnold", tuple.get("ALIAS2"));
});
}
@Test
public void testStreamedAliasGetterWithClassWithExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getTupleAliasedResult(entityManager);
Tuple tuple = result.get(0);
assertEquals(Long.valueOf(1L), tuple.get("ALIAS1", Long.class));
assertEquals("Arnold", tuple.get("ALIAS2", String.class));
});
}
@Test
public void testStreamedToArrayShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> tuples = getStreamedTupleResult(entityManager);
Object[] result = tuples.get(0).toArray();
assertArrayEquals(new Object[]{1L, "Arnold"}, result);
});
}
@Test
public void testStreamedGetElementsShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> tuples = getStreamedTupleResult(entityManager);
List<TupleElement<?>> result = tuples.get(0).getElements();
assertEquals(2, result.size());
assertEquals(Long.class, result.get(0).getJavaType());
assertEquals("ID", result.get(0).getAlias());
assertEquals(String.class, result.get(1).getJavaType());
assertEquals("FIRSTNAME", result.get(1).getAlias());
});
}
@Test
public void testStreamedPositionalGetterWithNamedNativeQueryShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getStreamedNamedTupleResult(entityManager, "standard");
Tuple tuple = result.get(0);
assertEquals(1L, tuple.get(0));
assertEquals("Arnold", tuple.get(1));
});
}
@Test
public void testStreamedPositionalGetterWithNamedNativeQueryWithClassShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getStreamedNamedTupleResult(entityManager, "standard");
Tuple tuple = result.get(0);
assertEquals(Long.valueOf(1L), tuple.get(0, Long.class));
assertEquals("Arnold", tuple.get(1, String.class));
});
}
@Test
public void testStreamedPositionalGetterWithNamedNativeQueryShouldThrowExceptionWhenLessThanZeroGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getStreamedNamedTupleResult(entityManager, "standard");
Tuple tuple = result.get(0);
tuple.get(-1);
}
);
});
}
@Test
public void testStreamedPositionalGetterWithNamedNativeQueryShouldThrowExceptionWhenTupleSizePositionGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getStreamedNamedTupleResult(entityManager, "standard");
Tuple tuple = result.get(0);
tuple.get(2);
}
);
});
}
@Test
public void testStreamedPositionalGetterWithNamedNativeQueryShouldThrowExceptionWhenExceedingPositionGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getStreamedNamedTupleResult(entityManager, "standard");
Tuple tuple = result.get(0);
tuple.get(3);
}
);
});
}
@Test
public void testStreamedAliasGetterWithNamedNativeQueryWithoutExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getStreamedNamedTupleResult(entityManager, "standard");
Tuple tuple = result.get(0);
assertEquals(1L, tuple.get("ID"));
assertEquals("Arnold", tuple.get("FIRSTNAME"));
});
}
@Test
public void testStreamedAliasGetterWithNamedNativeQueryShouldWorkWithoutExplicitAliasWhenLowerCaseAliasGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getStreamedNamedTupleResult(entityManager, "standard");
Tuple tuple = result.get(0);
tuple.get("id");
});
}
@Test
public void testStreamedAliasGetterWithNamedNativeQueryShouldThrowExceptionWithoutExplicitAliasWhenWrongAliasGiven(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
assertThrows(
IllegalArgumentException.class,
() -> {
List<Tuple> result = getStreamedNamedTupleResult(entityManager, "standard");
Tuple tuple = result.get(0);
tuple.get("e");
}
);
});
}
@Test
public void testStreamedAliasGetterWithNamedNativeQueryWithClassWithoutExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getStreamedNamedTupleResult(entityManager, "standard");
Tuple tuple = result.get(0);
assertEquals(Long.valueOf(1L), tuple.get("ID", Long.class));
assertEquals("Arnold", tuple.get("FIRSTNAME", String.class));
});
}
@Test
public void testStreamedAliasGetterWithNamedNativeQueryWithExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getStreamedNamedTupleResult(entityManager, "standard_with_alias");
Tuple tuple = result.get(0);
assertEquals(1L, tuple.get("ALIAS1"));
assertEquals("Arnold", tuple.get("ALIAS2"));
});
}
@Test
public void testStreamedAliasGetterWithNamedNativeQueryWithClassWithExplicitAliasShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> result = getStreamedNamedTupleResult(entityManager, "standard_with_alias");
Tuple tuple = result.get(0);
assertEquals(Long.valueOf(1L), tuple.get("ALIAS1", Long.class));
assertEquals("Arnold", tuple.get("ALIAS2", String.class));
});
}
@Test
public void testStreamedToArrayShouldWithNamedNativeQueryWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> tuples = getStreamedNamedTupleResult(entityManager, "standard");
Object[] result = tuples.get(0).toArray();
assertArrayEquals(new Object[]{1L, "Arnold"}, result);
});
}
@Test
public void testStreamedGetElementsWithNamedNativeQueryShouldWorkProperly(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> tuples = getStreamedNamedTupleResult(entityManager, "standard");
List<TupleElement<?>> result = tuples.get(0).getElements();
assertEquals(2, result.size());
assertEquals(Long.class, result.get(0).getJavaType());
assertEquals("ID", result.get(0).getAlias());
assertEquals(String.class, result.get(1).getJavaType());
assertEquals("FIRSTNAME", result.get(1).getAlias());
});
}
@Test
@JiraKey(value = "HHH-11897")
public void testGetElementsShouldNotThrowExceptionWhenResultContainsNullValue(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
User user = entityManager.find(User.class, 1L);
user.firstName = null;
});
scope.inTransaction( entityManager -> {
List<Tuple> tuples = getTupleResult(entityManager);
final Tuple tuple = tuples.get(0);
List<TupleElement<?>> result = tuple.getElements();
assertEquals(2, result.size());
final TupleElement<?> firstTupleElement = result.get(0);
assertEquals(Long.class, firstTupleElement.getJavaType());
assertEquals("ID", firstTupleElement.getAlias());
assertEquals(1L, tuple.get(firstTupleElement.getAlias()));
final TupleElement<?> secondTupleElement = result.get(1);
assertEquals(Object.class, secondTupleElement.getJavaType());
assertEquals("FIRSTNAME", secondTupleElement.getAlias());
assertNull(tuple.get(secondTupleElement.getAlias()));
});
}
@SuppressWarnings("unchecked")
private List<Tuple> getTupleAliasedResult(EntityManager entityManager) {
Query query = entityManager.createNativeQuery("SELECT id AS alias1, firstname AS alias2 FROM users", Tuple.class);
return (List<Tuple>) query.getResultList();
}
@SuppressWarnings("unchecked")
private List<Tuple> getStreamedTupleAliasedResult(EntityManager entityManager) {
NativeQueryImpl query = (NativeQueryImpl) entityManager.createNativeQuery(
"SELECT id AS alias1, firstname AS alias2 FROM users",
Tuple.class
);
return (List<Tuple>) query.stream().collect(Collectors.toList());
}
@SuppressWarnings("unchecked")
private List<Tuple> getTupleResult(EntityManager entityManager) {
Query query = entityManager.createNativeQuery("SELECT id, firstname FROM users", Tuple.class);
return (List<Tuple>) query.getResultList();
}
private List<Tuple> getTupleNamedResult(EntityManager entityManager, String name) {
return entityManager.createNamedQuery(name, Tuple.class).getResultList();
}
@SuppressWarnings("unchecked")
private List<Tuple> getStreamedTupleResult(EntityManager entityManager) {
NativeQuery query = (NativeQuery) entityManager.createNativeQuery( "SELECT id, firstname FROM users", Tuple.class);
return (List<Tuple>) query.stream().collect(Collectors.toList());
}
@SuppressWarnings("unchecked")
private List<Tuple> getStreamedNamedTupleResult(EntityManager entityManager, String name) {
return (List<Tuple>)((NativeQuery) entityManager.createNamedQuery(name, Tuple.class)).stream().collect(Collectors.toList());
}
@Entity
@Table(name = "users")
@NamedNativeQueries({
@NamedNativeQuery(
name = "standard",
query = "SELECT id, firstname FROM users"
),
@NamedNativeQuery(
name = "standard_with_alias",
query = "SELECT id AS alias1, firstname AS alias2 FROM users"
)
})
public static | TupleNativeQueryTest |
java | quarkusio__quarkus | core/processor/src/main/java/io/quarkus/annotation/processor/documentation/config/scanner/ConfigAnnotationListener.java | {
"start": 612,
"end": 1543
} | interface ____ {
default Optional<DiscoveryConfigRoot> onConfigRoot(TypeElement configRoot) {
return Optional.empty();
}
default void onSuperclass(DiscoveryRootElement discoveryRootElement, TypeElement superClass) {
}
default void onInterface(DiscoveryRootElement discoveryRootElement, TypeElement interfaze) {
}
default Optional<DiscoveryConfigGroup> onConfigGroup(TypeElement configGroup) {
return Optional.empty();
}
default void onEnclosedMethod(DiscoveryRootElement discoveryRootElement, TypeElement clazz, ExecutableElement method,
ResolvedType type) {
}
default void onEnclosedField(DiscoveryRootElement discoveryRootElement, TypeElement clazz, VariableElement field,
ResolvedType type) {
}
default void onResolvedEnum(TypeElement enumTypeElement) {
}
default void finalizeProcessing() {
}
}
| ConfigAnnotationListener |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/tracing/Tracer.java | {
"start": 256,
"end": 882
} | class ____ {
/**
* Returns a new trace {@link Tracer.Span}.
*
* @return a new {@link Span}.
*/
public abstract Span nextSpan();
/**
* Returns a new trace {@link Tracer.Span} associated with {@link TraceContext} or a new one if {@link TraceContext} is
* {@code null}.
*
* @param traceContext the trace context.
* @return a new {@link Span}.
*/
public abstract Span nextSpan(TraceContext traceContext);
/**
* Used to model the latency of an operation along with tags such as name or the {@link Tracing.Endpoint}.
*/
public abstract static | Tracer |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/FopEndpointBuilderFactory.java | {
"start": 1583,
"end": 3637
} | interface ____
extends
EndpointProducerBuilder {
default AdvancedFopEndpointBuilder advanced() {
return (AdvancedFopEndpointBuilder) this;
}
/**
* Allows to use a custom configured or implementation of
* org.apache.fop.apps.FopFactory.
*
* The option is a: <code>org.apache.fop.apps.FopFactory</code> type.
*
* Group: producer
*
* @param fopFactory the value to set
* @return the dsl builder
*/
default FopEndpointBuilder fopFactory(org.apache.fop.apps.FopFactory fopFactory) {
doSetProperty("fopFactory", fopFactory);
return this;
}
/**
* Allows to use a custom configured or implementation of
* org.apache.fop.apps.FopFactory.
*
* The option will be converted to a
* <code>org.apache.fop.apps.FopFactory</code> type.
*
* Group: producer
*
* @param fopFactory the value to set
* @return the dsl builder
*/
default FopEndpointBuilder fopFactory(String fopFactory) {
doSetProperty("fopFactory", fopFactory);
return this;
}
/**
* The location of a configuration file which can be loaded from
* classpath or file system.
*
* This option can also be loaded from an existing file, by prefixing
* with file: or classpath: followed by the location of the file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param userConfigURL the value to set
* @return the dsl builder
*/
default FopEndpointBuilder userConfigURL(String userConfigURL) {
doSetProperty("userConfigURL", userConfigURL);
return this;
}
}
/**
* Advanced builder for endpoint for the FOP component.
*/
public | FopEndpointBuilder |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ParameterCommentTest.java | {
"start": 4130,
"end": 4565
} | class ____ {
Test(int x, int y) {}
{
new Test(/* x= */ 0, /* y= */ 1);
new Test(/* x= */ 0, /* y= */ 1);
}
}
""")
.doTest(TestMode.TEXT_MATCH);
}
@Test
public void parameterComment_doesNotChange_whenNestedComment() {
testHelper
.addInputLines(
"in/Test.java",
"""
abstract | Test |
java | google__auto | common/src/main/java/com/google/auto/common/AnnotationValues.java | {
"start": 1220,
"end": 1326
} | class ____ working with {@link AnnotationValue} instances.
*
* @author Christian Gruber
*/
public final | for |
java | elastic__elasticsearch | x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java | {
"start": 5717,
"end": 55985
} | class ____ extends MlSingleNodeTestCase {
private JobResultsProvider jobProvider;
private ResultsPersisterService resultsPersisterService;
private JobResultsPersister jobResultsPersister;
private AnomalyDetectionAuditor auditor;
@Before
public void createComponents() throws Exception {
Settings.Builder builder = Settings.builder()
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueSeconds(1));
jobProvider = new JobResultsProvider(client(), builder.build(), TestIndexNameExpressionResolver.newInstance());
ThreadPool tp = mockThreadPool();
ClusterSettings clusterSettings = new ClusterSettings(
builder.build(),
new HashSet<>(
Arrays.asList(
InferenceProcessor.MAX_INFERENCE_PROCESSORS,
MasterService.MASTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING,
OperationRouting.USE_ADAPTIVE_REPLICA_SELECTION_SETTING,
ResultsPersisterService.PERSIST_RESULTS_MAX_RETRIES,
ClusterService.USER_DEFINED_METADATA,
ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING,
ClusterApplierService.CLUSTER_SERVICE_SLOW_TASK_THREAD_DUMP_TIMEOUT_SETTING,
ClusterApplierService.CLUSTER_APPLIER_THREAD_WATCHDOG_INTERVAL,
ClusterApplierService.CLUSTER_APPLIER_THREAD_WATCHDOG_QUIET_TIME
)
)
);
ClusterService clusterService = new ClusterService(builder.build(), clusterSettings, tp, null);
OriginSettingClient originSettingClient = new OriginSettingClient(client(), ClientHelper.ML_ORIGIN);
resultsPersisterService = new ResultsPersisterService(tp, originSettingClient, clusterService, builder.build());
jobResultsPersister = new JobResultsPersister(originSettingClient, resultsPersisterService);
// We can't change the signature of createComponents to e.g. pass differing values of includeNodeInfo to pass to the
// AnomalyDetectionAuditor constructor. Instead we generate a random boolean value for that purpose.
boolean includeNodeInfo = randomBoolean();
auditor = new AnomalyDetectionAuditor(client(), clusterService, TestIndexNameExpressionResolver.newInstance(), includeNodeInfo);
waitForMlTemplates();
}
public void testPutJob_CreatesResultsIndex() {
Job.Builder job1 = new Job.Builder("first_job");
job1.setAnalysisConfig(createAnalysisConfig("by_field_1", Collections.emptyList()));
job1.setDataDescription(new DataDescription.Builder());
// Put fist job. This should create the results index as it's the first job.
client().execute(PutJobAction.INSTANCE, new PutJobAction.Request(job1)).actionGet();
String sharedResultsIndex = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT
+ MlIndexAndAlias.FIRST_INDEX_SIX_DIGIT_SUFFIX;
Map<String, Object> mappingProperties = getIndexMappingProperties(sharedResultsIndex);
// Assert mappings have a few fields from the template
assertThat(mappingProperties.keySet(), hasItems("anomaly_score", "bucket_count"));
// Assert mappings have the by field
assertThat(mappingProperties.keySet(), hasItem("by_field_1"));
// Check aliases have been created
assertThat(
getAliases(sharedResultsIndex),
containsInAnyOrder(
AnomalyDetectorsIndex.jobResultsAliasedName(job1.getId()),
AnomalyDetectorsIndex.resultsWriteAlias(job1.getId())
)
);
// Now let's create a second job to test things work when the index exists already
assertThat(mappingProperties.keySet(), not(hasItem("by_field_2")));
Job.Builder job2 = new Job.Builder("second_job");
job2.setAnalysisConfig(createAnalysisConfig("by_field_2", Collections.emptyList()));
job2.setDataDescription(new DataDescription.Builder());
client().execute(PutJobAction.INSTANCE, new PutJobAction.Request(job2)).actionGet();
mappingProperties = getIndexMappingProperties(sharedResultsIndex);
// Assert mappings have a few fields from the template
assertThat(mappingProperties.keySet(), hasItems("anomaly_score", "bucket_count"));
// Assert mappings have the by field
assertThat(mappingProperties.keySet(), hasItems("by_field_1", "by_field_2"));
// Check aliases have been created
assertThat(
getAliases(sharedResultsIndex),
containsInAnyOrder(
AnomalyDetectorsIndex.jobResultsAliasedName(job1.getId()),
AnomalyDetectorsIndex.resultsWriteAlias(job1.getId()),
AnomalyDetectorsIndex.jobResultsAliasedName(job2.getId()),
AnomalyDetectorsIndex.resultsWriteAlias(job2.getId())
)
);
}
public void testPutJob_WithCustomResultsIndex() {
Job.Builder job = new Job.Builder("foo");
job.setResultsIndexName("bar");
job.setAnalysisConfig(createAnalysisConfig("by_field", Collections.emptyList()));
job.setDataDescription(new DataDescription.Builder());
client().execute(PutJobAction.INSTANCE, new PutJobAction.Request(job)).actionGet();
String customIndex = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-bar-000001";
Map<String, Object> mappingProperties = getIndexMappingProperties(customIndex);
// Assert mappings have a few fields from the template
assertThat(mappingProperties.keySet(), hasItems("anomaly_score", "bucket_count"));
// Assert mappings have the by field
assertThat(mappingProperties.keySet(), hasItem("by_field"));
// Check aliases have been created
assertThat(
getAliases(customIndex),
containsInAnyOrder(
AnomalyDetectorsIndex.jobResultsAliasedName(job.getId()),
AnomalyDetectorsIndex.resultsWriteAlias(job.getId())
)
);
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/40134")
public void testMultipleSimultaneousJobCreations() {
int numJobs = randomIntBetween(4, 7);
// Each job should result in one extra field being added to the results index mappings: field1, field2, field3, etc.
// Due to all being created simultaneously this test may reveal race conditions in the code that updates the mappings.
List<PutJobAction.Request> requests = new ArrayList<>(numJobs);
for (int i = 1; i <= numJobs; ++i) {
Job.Builder builder = new Job.Builder("job" + i);
AnalysisConfig.Builder ac = createAnalysisConfig("field" + i, Collections.emptyList());
DataDescription.Builder dc = new DataDescription.Builder();
builder.setAnalysisConfig(ac);
builder.setDataDescription(dc);
requests.add(new PutJobAction.Request(builder));
}
// Start the requests as close together as possible, without waiting for each to complete before starting the next one.
List<ActionFuture<PutJobAction.Response>> futures = new ArrayList<>(numJobs);
for (PutJobAction.Request request : requests) {
futures.add(client().execute(PutJobAction.INSTANCE, request));
}
// Only after all requests are in-flight, wait for all the requests to complete.
for (ActionFuture<PutJobAction.Response> future : futures) {
future.actionGet();
}
// Assert that the mappings contain all the additional fields: field1, field2, field3, etc.
String sharedResultsIndex = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT;
GetMappingsRequest request = new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(sharedResultsIndex);
GetMappingsResponse response = client().execute(GetMappingsAction.INSTANCE, request).actionGet();
Map<String, MappingMetadata> indexMappings = response.getMappings();
assertNotNull(indexMappings);
MappingMetadata typeMappings = indexMappings.get(sharedResultsIndex);
assertNotNull("expected " + sharedResultsIndex + " in " + indexMappings, typeMappings);
Map<String, Object> mappings = typeMappings.getSourceAsMap();
assertNotNull(mappings);
@SuppressWarnings("unchecked")
Map<String, Object> properties = (Map<String, Object>) mappings.get("properties");
assertNotNull("expected 'properties' field in " + mappings, properties);
for (int i = 1; i <= numJobs; ++i) {
String fieldName = "field" + i;
assertNotNull("expected '" + fieldName + "' field in " + properties, properties.get(fieldName));
}
}
public void testGetCalandarByJobId() throws Exception {
List<Calendar> calendars = new ArrayList<>();
calendars.add(new Calendar("empty calendar", Collections.emptyList(), null));
calendars.add(new Calendar("foo calendar", Collections.singletonList("foo"), null));
calendars.add(new Calendar("foo bar calendar", Arrays.asList("foo", "bar"), null));
calendars.add(new Calendar("cat calendar", Collections.singletonList("cat"), null));
calendars.add(new Calendar("cat foo calendar", Arrays.asList("cat", "foo"), null));
indexCalendars(calendars);
List<Calendar> queryResult = getCalendars(CalendarQueryBuilder.builder().jobId("ted"));
assertThat(queryResult, is(empty()));
queryResult = getCalendars(CalendarQueryBuilder.builder().jobId("foo"));
assertThat(queryResult, hasSize(3));
Long matchedCount = queryResult.stream()
.filter(c -> c.getId().equals("foo calendar") || c.getId().equals("foo bar calendar") || c.getId().equals("cat foo calendar"))
.count();
assertEquals(Long.valueOf(3), matchedCount);
queryResult = getCalendars(CalendarQueryBuilder.builder().jobId("bar"));
assertThat(queryResult, hasSize(1));
assertEquals("foo bar calendar", queryResult.get(0).getId());
}
public void testGetCalandarById() throws Exception {
List<Calendar> calendars = new ArrayList<>();
calendars.add(new Calendar("empty calendar", Collections.emptyList(), null));
calendars.add(new Calendar("foo calendar", Collections.singletonList("foo"), null));
calendars.add(new Calendar("foo bar calendar", Arrays.asList("foo", "bar"), null));
calendars.add(new Calendar("cat calendar", Collections.singletonList("cat"), null));
calendars.add(new Calendar("cat foo calendar", Arrays.asList("cat", "foo"), null));
indexCalendars(calendars);
List<Calendar> queryResult = getCalendars(CalendarQueryBuilder.builder().calendarIdTokens(new String[] { "foo*" }).sort(true));
assertThat(queryResult, hasSize(2));
assertThat(queryResult.get(0).getId(), equalTo("foo bar calendar"));
assertThat(queryResult.get(1).getId(), equalTo("foo calendar"));
queryResult = getCalendars(
CalendarQueryBuilder.builder().calendarIdTokens(new String[] { "foo calendar", "cat calendar" }).sort(true)
);
assertThat(queryResult, hasSize(2));
assertThat(queryResult.get(0).getId(), equalTo("cat calendar"));
assertThat(queryResult.get(1).getId(), equalTo("foo calendar"));
}
public void testGetCalendarByIdAndPaging() throws Exception {
List<Calendar> calendars = new ArrayList<>();
calendars.add(new Calendar("empty calendar", Collections.emptyList(), null));
calendars.add(new Calendar("foo calendar", Collections.singletonList("foo"), null));
calendars.add(new Calendar("foo bar calendar", Arrays.asList("foo", "bar"), null));
calendars.add(new Calendar("cat calendar", Collections.singletonList("cat"), null));
calendars.add(new Calendar("cat foo calendar", Arrays.asList("cat", "foo"), null));
indexCalendars(calendars);
List<Calendar> queryResult = getCalendars(
CalendarQueryBuilder.builder().calendarIdTokens(new String[] { "foo*" }).pageParams(new PageParams(0, 1)).sort(true)
);
assertThat(queryResult, hasSize(1));
assertThat(queryResult.get(0).getId(), equalTo("foo bar calendar"));
queryResult = getCalendars(
CalendarQueryBuilder.builder()
.calendarIdTokens(new String[] { "foo calendar", "cat calendar" })
.sort(true)
.pageParams(new PageParams(1, 1))
);
assertThat(queryResult, hasSize(1));
assertThat(queryResult.get(0).getId(), equalTo("foo calendar"));
}
public void testUpdateCalendar() throws Exception {
String calendarId = "empty calendar";
Calendar emptyCal = new Calendar(calendarId, Collections.emptyList(), null);
indexCalendars(Collections.singletonList(emptyCal));
Set<String> addedIds = new HashSet<>();
addedIds.add("foo");
addedIds.add("bar");
updateCalendar(calendarId, addedIds, Collections.emptySet());
Calendar updated = getCalendar(calendarId);
assertEquals(calendarId, updated.getId());
assertEquals(addedIds, new HashSet<>(updated.getJobIds()));
updateCalendar(calendarId, Collections.emptySet(), Collections.singleton("foo"));
updated = getCalendar(calendarId);
assertEquals(calendarId, updated.getId());
assertEquals(1, updated.getJobIds().size());
assertEquals("bar", updated.getJobIds().get(0));
}
public void testRemoveJobFromCalendar() throws Exception {
List<Calendar> calendars = new ArrayList<>();
calendars.add(new Calendar("empty calendar", Collections.emptyList(), null));
calendars.add(new Calendar("foo calendar", Collections.singletonList("foo"), null));
calendars.add(new Calendar("foo bar calendar", Arrays.asList("foo", "bar"), null));
calendars.add(new Calendar("cat calendar", Collections.singletonList("cat"), null));
calendars.add(new Calendar("cat foo calendar", Arrays.asList("cat", "foo"), null));
indexCalendars(calendars);
CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<Exception> exceptionHolder = new AtomicReference<>();
jobProvider.removeJobFromCalendars("bar", ActionListener.wrap(r -> latch.countDown(), e -> {
exceptionHolder.set(e);
latch.countDown();
}));
latch.await();
if (exceptionHolder.get() != null) {
throw exceptionHolder.get();
}
List<Calendar> updatedCalendars = getCalendars(CalendarQueryBuilder.builder());
assertEquals(5, updatedCalendars.size());
for (Calendar cal : updatedCalendars) {
assertThat("bar", is(not(in(cal.getJobIds()))));
}
Calendar catFoo = getCalendar("cat foo calendar");
assertThat(catFoo.getJobIds(), contains("cat", "foo"));
CountDownLatch latch2 = new CountDownLatch(1);
jobProvider.removeJobFromCalendars("cat", ActionListener.wrap(r -> latch2.countDown(), e -> {
exceptionHolder.set(e);
latch2.countDown();
}));
latch2.await();
if (exceptionHolder.get() != null) {
throw exceptionHolder.get();
}
updatedCalendars = getCalendars(CalendarQueryBuilder.builder());
assertEquals(5, updatedCalendars.size());
for (Calendar cal : updatedCalendars) {
assertThat("bar", is(not(in(cal.getJobIds()))));
assertThat("cat", is(not(in(cal.getJobIds()))));
}
}
public void testGetDataCountsModelSizeAndTimingStatsWithNoDocs() throws Exception {
Job.Builder job = new Job.Builder("first_job");
job.setAnalysisConfig(createAnalysisConfig("by_field_1", Collections.emptyList()));
job.setDataDescription(new DataDescription.Builder());
// Put first job. This should create the results index as it's the first job.
client().execute(PutJobAction.INSTANCE, new PutJobAction.Request(job)).actionGet();
AtomicReference<DataCounts> dataCountsAtomicReference = new AtomicReference<>();
AtomicReference<ModelSizeStats> modelSizeStatsAtomicReference = new AtomicReference<>();
AtomicReference<TimingStats> timingStatsAtomicReference = new AtomicReference<>();
AtomicReference<Exception> exceptionAtomicReference = new AtomicReference<>();
getDataCountsModelSizeAndTimingStats(
job.getId(),
dataCountsAtomicReference::set,
modelSizeStatsAtomicReference::set,
timingStatsAtomicReference::set,
exceptionAtomicReference::set
);
if (exceptionAtomicReference.get() != null) {
throw exceptionAtomicReference.get();
}
assertThat(dataCountsAtomicReference.get().getJobId(), equalTo(job.getId()));
assertThat(modelSizeStatsAtomicReference.get().getJobId(), equalTo(job.getId()));
assertThat(timingStatsAtomicReference.get().getJobId(), equalTo(job.getId()));
}
public void testGetDataCountsModelSizeAndTimingStatsWithSomeDocs() throws Exception {
Job.Builder job = new Job.Builder("first_job");
job.setAnalysisConfig(createAnalysisConfig("by_field_1", Collections.emptyList()));
job.setDataDescription(new DataDescription.Builder());
// Put first job. This should create the results index as it's the first job.
client().execute(PutJobAction.INSTANCE, new PutJobAction.Request(job)).actionGet();
AtomicReference<DataCounts> dataCountsAtomicReference = new AtomicReference<>();
AtomicReference<ModelSizeStats> modelSizeStatsAtomicReference = new AtomicReference<>();
AtomicReference<TimingStats> timingStatsAtomicReference = new AtomicReference<>();
AtomicReference<Exception> exceptionAtomicReference = new AtomicReference<>();
CheckedSupplier<Void, Exception> setOrThrow = () -> {
getDataCountsModelSizeAndTimingStats(
job.getId(),
dataCountsAtomicReference::set,
modelSizeStatsAtomicReference::set,
timingStatsAtomicReference::set,
exceptionAtomicReference::set
);
if (exceptionAtomicReference.get() != null) {
throw exceptionAtomicReference.get();
}
return null;
};
ModelSizeStats storedModelSizeStats = new ModelSizeStats.Builder(job.getId()).setModelBytes(10L).build();
jobResultsPersister.persistModelSizeStats(storedModelSizeStats, () -> false);
jobResultsPersister.commitWrites(job.getId(), JobResultsPersister.CommitType.RESULTS);
setOrThrow.get();
assertThat(dataCountsAtomicReference.get().getJobId(), equalTo(job.getId()));
assertThat(modelSizeStatsAtomicReference.get(), equalTo(storedModelSizeStats));
assertThat(timingStatsAtomicReference.get().getJobId(), equalTo(job.getId()));
TimingStats storedTimingStats = new TimingStats(job.getId());
storedTimingStats.updateStats(10);
jobResultsPersister.bulkPersisterBuilder(job.getId()).persistTimingStats(storedTimingStats).executeRequest();
jobResultsPersister.commitWrites(job.getId(), JobResultsPersister.CommitType.RESULTS);
setOrThrow.get();
assertThat(dataCountsAtomicReference.get().getJobId(), equalTo(job.getId()));
assertThat(modelSizeStatsAtomicReference.get(), equalTo(storedModelSizeStats));
assertThat(timingStatsAtomicReference.get(), equalTo(storedTimingStats));
DataCounts storedDataCounts = new DataCounts(job.getId());
storedDataCounts.incrementInputBytes(1L);
storedDataCounts.incrementMissingFieldCount(1L);
JobDataCountsPersister jobDataCountsPersister = new JobDataCountsPersister(client(), resultsPersisterService, auditor);
jobDataCountsPersister.persistDataCounts(job.getId(), storedDataCounts, true);
jobResultsPersister.commitWrites(job.getId(), JobResultsPersister.CommitType.RESULTS);
setOrThrow.get();
assertThat(dataCountsAtomicReference.get(), equalTo(storedDataCounts));
assertThat(modelSizeStatsAtomicReference.get(), equalTo(storedModelSizeStats));
assertThat(timingStatsAtomicReference.get(), equalTo(storedTimingStats));
}
private Map<String, Object> getIndexMappingProperties(String index) {
GetMappingsRequest request = new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(index);
GetMappingsResponse response = client().execute(GetMappingsAction.INSTANCE, request).actionGet();
Map<String, MappingMetadata> indexMappings = response.getMappings();
assertNotNull(indexMappings);
MappingMetadata typeMappings = indexMappings.get(index);
assertNotNull("expected " + index + " in " + indexMappings, typeMappings);
Map<String, Object> mappings = typeMappings.getSourceAsMap();
assertNotNull(mappings);
// Assert _meta info is present
assertThat(mappings.keySet(), hasItem("_meta"));
@SuppressWarnings("unchecked")
Map<String, Object> meta = (Map<String, Object>) mappings.get("_meta");
assertThat(meta.keySet(), hasItem("version"));
assertThat(meta.get("version"), equalTo(MlIndexAndAlias.BWC_MAPPINGS_VERSION));
assertThat(meta.get("managed_index_mappings_version"), equalTo(AnomalyDetectorsIndex.RESULTS_INDEX_MAPPINGS_VERSION));
@SuppressWarnings("unchecked")
Map<String, Object> properties = (Map<String, Object>) mappings.get("properties");
assertNotNull("expected 'properties' field in " + mappings, properties);
return properties;
}
private Set<String> getAliases(String index) {
GetAliasesResponse getAliasesResponse = client().admin()
.indices()
.getAliases(new GetAliasesRequest(TEST_REQUEST_TIMEOUT).indices(index))
.actionGet();
Map<String, List<AliasMetadata>> aliases = getAliasesResponse.getAliases();
assertThat(aliases.containsKey(index), is(true));
List<AliasMetadata> aliasMetadataList = aliases.get(index);
for (AliasMetadata aliasMetadata : aliasMetadataList) {
assertThat("Anomalies aliases should be hidden but are not: " + aliases, aliasMetadata.isHidden(), is(true));
}
return aliasMetadataList.stream().map(AliasMetadata::alias).collect(Collectors.toSet());
}
private List<Calendar> getCalendars(CalendarQueryBuilder query) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<Exception> exceptionHolder = new AtomicReference<>();
AtomicReference<QueryPage<Calendar>> result = new AtomicReference<>();
jobProvider.calendars(query, ActionListener.wrap(r -> {
result.set(r);
latch.countDown();
}, e -> {
exceptionHolder.set(e);
latch.countDown();
}));
latch.await();
if (exceptionHolder.get() != null) {
throw exceptionHolder.get();
}
return result.get().results();
}
private void updateCalendar(String calendarId, Set<String> idsToAdd, Set<String> idsToRemove) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<Exception> exceptionHolder = new AtomicReference<>();
jobProvider.updateCalendar(calendarId, idsToAdd, idsToRemove, r -> latch.countDown(), e -> {
exceptionHolder.set(e);
latch.countDown();
});
latch.await();
if (exceptionHolder.get() != null) {
throw exceptionHolder.get();
}
client().admin().indices().prepareRefresh(MlMetaIndex.indexName()).get();
}
private Calendar getCalendar(String calendarId) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<Exception> exceptionHolder = new AtomicReference<>();
AtomicReference<Calendar> calendarHolder = new AtomicReference<>();
jobProvider.calendar(calendarId, ActionListener.wrap(c -> {
calendarHolder.set(c);
latch.countDown();
}, e -> {
exceptionHolder.set(e);
latch.countDown();
}));
latch.await();
if (exceptionHolder.get() != null) {
throw exceptionHolder.get();
}
return calendarHolder.get();
}
private void getDataCountsModelSizeAndTimingStats(
String jobId,
Consumer<DataCounts> dataCountsConsumer,
Consumer<ModelSizeStats> modelSizeStatsConsumer,
Consumer<TimingStats> timingStatsConsumer,
Consumer<Exception> exceptionConsumer
) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
jobProvider.getDataCountsModelSizeAndTimingStats(jobId, null, (dataCounts, modelSizeStats, timingStats) -> {
dataCountsConsumer.accept(dataCounts);
modelSizeStatsConsumer.accept(modelSizeStats);
timingStatsConsumer.accept(timingStats);
latch.countDown();
}, e -> {
exceptionConsumer.accept(e);
latch.countDown();
});
latch.await();
}
public void testScheduledEventsForJobs() throws Exception {
Job.Builder jobA = createJob("job_a");
Job.Builder jobB = createJob("job_b");
Job.Builder jobC = createJob("job_c");
String calendarAId = "maintenance_a";
List<Calendar> calendars = new ArrayList<>();
calendars.add(new Calendar(calendarAId, Collections.singletonList("job_a"), null));
ZonedDateTime now = ZonedDateTime.now();
List<ScheduledEvent> events = new ArrayList<>();
events.add(buildScheduledEvent("downtime", now.plusDays(1), now.plusDays(2), calendarAId));
events.add(buildScheduledEvent("downtime_AA", now.plusDays(8), now.plusDays(9), calendarAId));
events.add(buildScheduledEvent("downtime_AAA", now.plusDays(15), now.plusDays(16), calendarAId));
String calendarABId = "maintenance_a_and_b";
calendars.add(new Calendar(calendarABId, Arrays.asList("job_a", "job_b"), null));
events.add(buildScheduledEvent("downtime_AB", now.plusDays(12), now.plusDays(13), calendarABId));
indexCalendars(calendars);
indexScheduledEvents(events);
ScheduledEventsQueryBuilder query = new ScheduledEventsQueryBuilder();
List<ScheduledEvent> returnedEvents = getScheduledEventsForJob(jobA.getId(), Collections.emptyList(), query);
assertEquals(4, returnedEvents.size());
assertEquals(events.get(0), returnedEvents.get(0));
assertEquals(events.get(1), returnedEvents.get(1));
assertEquals(events.get(3), returnedEvents.get(2));
assertEquals(events.get(2), returnedEvents.get(3));
returnedEvents = getScheduledEventsForJob(jobB.getId(), Collections.singletonList("unrelated-job-group"), query);
assertEquals(1, returnedEvents.size());
assertEquals(events.get(3), returnedEvents.get(0));
returnedEvents = getScheduledEventsForJob(jobC.getId(), Collections.emptyList(), query);
assertEquals(0, returnedEvents.size());
// Test time filters
// Lands halfway through the second event which should be returned
query.start(Long.toString(now.plusDays(8).plusHours(1).toInstant().toEpochMilli()));
// Lands halfway through the 3rd event which should be returned
query.end(Long.toString(now.plusDays(12).plusHours(1).toInstant().toEpochMilli()));
returnedEvents = getScheduledEventsForJob(jobA.getId(), Collections.emptyList(), query);
assertEquals(2, returnedEvents.size());
assertEquals(events.get(1), returnedEvents.get(0));
assertEquals(events.get(3), returnedEvents.get(1));
}
public void testScheduledEvents() throws Exception {
createJob("job_a");
createJob("job_b");
createJob("job_c");
String calendarAId = "maintenance_a";
List<Calendar> calendars = new ArrayList<>();
calendars.add(new Calendar(calendarAId, Collections.singletonList("job_a"), null));
ZonedDateTime now = ZonedDateTime.now();
List<ScheduledEvent> events = new ArrayList<>();
events.add(buildScheduledEvent("downtime", now.plusDays(1), now.plusDays(2), calendarAId));
events.add(buildScheduledEvent("downtime_AA", now.plusDays(8), now.plusDays(9), calendarAId));
events.add(buildScheduledEvent("downtime_AAA", now.plusDays(15), now.plusDays(16), calendarAId));
String calendarABId = "maintenance_a_and_b";
calendars.add(new Calendar(calendarABId, Arrays.asList("job_a", "job_b"), null));
events.add(buildScheduledEvent("downtime_AB", now.plusDays(12), now.plusDays(13), calendarABId));
indexCalendars(calendars);
indexScheduledEvents(events);
List<ScheduledEvent> returnedEvents = getScheduledEvents(new ScheduledEventsQueryBuilder());
assertEquals(4, returnedEvents.size());
assertEquals(events.get(0), returnedEvents.get(0));
assertEquals(events.get(1), returnedEvents.get(1));
assertEquals(events.get(3), returnedEvents.get(2));
assertEquals(events.get(2), returnedEvents.get(3));
returnedEvents = getScheduledEvents(ScheduledEventsQueryBuilder.builder().calendarIds(new String[] { "maintenance_a" }));
assertEquals(3, returnedEvents.size());
assertEquals(events.get(0), returnedEvents.get(0));
assertEquals(events.get(1), returnedEvents.get(1));
assertEquals(events.get(2), returnedEvents.get(2));
returnedEvents = getScheduledEvents(
ScheduledEventsQueryBuilder.builder().calendarIds(new String[] { "maintenance_a", "maintenance_a_and_b" })
);
assertEquals(4, returnedEvents.size());
assertEquals(events.get(0), returnedEvents.get(0));
assertEquals(events.get(1), returnedEvents.get(1));
assertEquals(events.get(3), returnedEvents.get(2));
assertEquals(events.get(2), returnedEvents.get(3));
returnedEvents = getScheduledEvents(ScheduledEventsQueryBuilder.builder().calendarIds(new String[] { "maintenance_a*" }));
assertEquals(4, returnedEvents.size());
assertEquals(events.get(0), returnedEvents.get(0));
assertEquals(events.get(1), returnedEvents.get(1));
assertEquals(events.get(3), returnedEvents.get(2));
assertEquals(events.get(2), returnedEvents.get(3));
}
public void testScheduledEventsForJob_withGroup() throws Exception {
String groupA = "group-a";
String groupB = "group-b";
createJob("job-in-group-a", Collections.emptyList(), Collections.singletonList(groupA));
createJob("job-in-group-a-and-b", Collections.emptyList(), Arrays.asList(groupA, groupB));
String calendarAId = "calendar_a";
List<Calendar> calendars = new ArrayList<>();
calendars.add(new Calendar(calendarAId, Collections.singletonList(groupA), null));
ZonedDateTime now = ZonedDateTime.now();
List<ScheduledEvent> events = new ArrayList<>();
events.add(buildScheduledEvent("downtime_A", now.plusDays(1), now.plusDays(2), calendarAId));
String calendarBId = "calendar_b";
calendars.add(new Calendar(calendarBId, Collections.singletonList(groupB), null));
events.add(buildScheduledEvent("downtime_B", now.plusDays(12), now.plusDays(13), calendarBId));
indexCalendars(calendars);
indexScheduledEvents(events);
ScheduledEventsQueryBuilder query = new ScheduledEventsQueryBuilder();
List<ScheduledEvent> returnedEvents = getScheduledEventsForJob("job-in-group-a", Collections.singletonList(groupA), query);
assertEquals(1, returnedEvents.size());
assertEquals(events.get(0), returnedEvents.get(0));
query = new ScheduledEventsQueryBuilder();
returnedEvents = getScheduledEventsForJob("job-in-group-a-and-b", Collections.singletonList(groupB), query);
assertEquals(1, returnedEvents.size());
assertEquals(events.get(1), returnedEvents.get(0));
}
private ScheduledEvent buildScheduledEvent(String description, ZonedDateTime start, ZonedDateTime end, String calendarId) {
return new ScheduledEvent.Builder().description(description)
.startTime(start.toInstant())
.endTime(end.toInstant())
.calendarId(calendarId)
.build();
}
public void testGetSnapshots() {
String jobId = "test_get_snapshots";
createJob(jobId);
indexModelSnapshot(
new ModelSnapshot.Builder(jobId).setSnapshotId("snap_2")
.setTimestamp(Date.from(Instant.ofEpochMilli(10)))
.setMinVersion(MlConfigVersion.V_7_4_0)
.setQuantiles(new Quantiles(jobId, Date.from(Instant.ofEpochMilli(10)), randomAlphaOfLength(20)))
.build()
);
indexModelSnapshot(
new ModelSnapshot.Builder(jobId).setSnapshotId("snap_1")
.setTimestamp(Date.from(Instant.ofEpochMilli(11)))
.setMinVersion(MlConfigVersion.V_7_2_0)
.setQuantiles(new Quantiles(jobId, Date.from(Instant.ofEpochMilli(11)), randomAlphaOfLength(20)))
.build()
);
indexModelSnapshot(
new ModelSnapshot.Builder(jobId).setSnapshotId("other_snap")
.setTimestamp(Date.from(Instant.ofEpochMilli(12)))
.setMinVersion(MlConfigVersion.V_7_3_0)
.setQuantiles(new Quantiles(jobId, Date.from(Instant.ofEpochMilli(12)), randomAlphaOfLength(20)))
.build()
);
createJob("other_job");
indexModelSnapshot(
new ModelSnapshot.Builder("other_job").setSnapshotId("other_snap")
.setTimestamp(Date.from(Instant.ofEpochMilli(10)))
.setMinVersion(MlConfigVersion.CURRENT)
.setQuantiles(new Quantiles("other_job", Date.from(Instant.ofEpochMilli(10)), randomAlphaOfLength(20)))
.build()
);
// Add a snapshot WITHOUT a min version.
prepareIndex(AnomalyDetectorsIndex.jobResultsAliasedName("other_job")).setId(ModelSnapshot.documentId("other_job", "11"))
.setSource("""
{"job_id":"other_job","snapshot_id":"11", "snapshot_doc_count":1,"retain":false}""", XContentType.JSON)
.get();
indicesAdmin().prepareRefresh(AnomalyDetectorsIndex.jobStateIndexPattern(), AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*")
.get();
PlainActionFuture<QueryPage<ModelSnapshot>> future = new PlainActionFuture<>();
jobProvider.modelSnapshots(jobId, 0, 4, "9", "15", "", false, "snap_2,snap_1", null, future::onResponse, future::onFailure);
List<ModelSnapshot> snapshots = future.actionGet().results();
assertThat(snapshots.get(0).getSnapshotId(), equalTo("snap_2"));
assertNull(snapshots.get(0).getQuantiles());
assertThat(snapshots.get(1).getSnapshotId(), equalTo("snap_1"));
assertNull(snapshots.get(1).getQuantiles());
future = new PlainActionFuture<>();
jobProvider.modelSnapshots(jobId, 0, 4, "9", "15", "", false, "snap_*", null, future::onResponse, future::onFailure);
snapshots = future.actionGet().results();
assertThat(snapshots.get(0).getSnapshotId(), equalTo("snap_2"));
assertThat(snapshots.get(1).getSnapshotId(), equalTo("snap_1"));
assertNull(snapshots.get(0).getQuantiles());
assertNull(snapshots.get(1).getQuantiles());
future = new PlainActionFuture<>();
jobProvider.modelSnapshots(jobId, 0, 4, "9", "15", "", false, "snap_*,other_snap", null, future::onResponse, future::onFailure);
snapshots = future.actionGet().results();
assertThat(snapshots.get(0).getSnapshotId(), equalTo("snap_2"));
assertThat(snapshots.get(1).getSnapshotId(), equalTo("snap_1"));
assertThat(snapshots.get(2).getSnapshotId(), equalTo("other_snap"));
future = new PlainActionFuture<>();
jobProvider.modelSnapshots(jobId, 0, 4, "9", "15", "", false, "*", null, future::onResponse, future::onFailure);
snapshots = future.actionGet().results();
assertThat(snapshots.get(0).getSnapshotId(), equalTo("snap_2"));
assertThat(snapshots.get(1).getSnapshotId(), equalTo("snap_1"));
assertThat(snapshots.get(2).getSnapshotId(), equalTo("other_snap"));
future = new PlainActionFuture<>();
jobProvider.modelSnapshots("*", 0, 5, null, null, "min_version", false, null, null, future::onResponse, future::onFailure);
snapshots = future.actionGet().results();
assertThat(snapshots.get(0).getSnapshotId(), equalTo("other_snap"));
assertThat(snapshots.get(1).getSnapshotId(), equalTo("11"));
assertThat(snapshots.get(2).getSnapshotId(), equalTo("snap_1"));
assertThat(snapshots.get(3).getSnapshotId(), equalTo("other_snap"));
assertThat(snapshots.get(4).getSnapshotId(), equalTo("snap_2"));
// assert that quantiles are not loaded
assertNull(snapshots.get(0).getQuantiles());
assertNull(snapshots.get(1).getQuantiles());
assertNull(snapshots.get(2).getQuantiles());
assertNull(snapshots.get(3).getQuantiles());
assertNull(snapshots.get(4).getQuantiles());
// test get single snapshot
PlainActionFuture<Result<ModelSnapshot>> singleFuture = new PlainActionFuture<>();
jobProvider.getModelSnapshot(jobId, "snap_1", true, singleFuture::onResponse, singleFuture::onFailure);
ModelSnapshot withQuantiles = singleFuture.actionGet().result;
assertThat(withQuantiles.getQuantiles().getTimestamp().getTime(), equalTo(11L));
singleFuture = new PlainActionFuture<>();
jobProvider.getModelSnapshot(jobId, "snap_2", false, singleFuture::onResponse, singleFuture::onFailure);
ModelSnapshot withoutQuantiles = singleFuture.actionGet().result;
assertNull(withoutQuantiles.getQuantiles());
}
public void testGetAutodetectParams() throws Exception {
String jobId = "test_get_autodetect_params";
Job.Builder job = createJob(jobId, Arrays.asList("fruit", "tea"));
String calendarId = "downtime";
Calendar calendar = new Calendar(calendarId, Collections.singletonList(jobId), null);
indexCalendars(Collections.singletonList(calendar));
// index the param docs
ZonedDateTime now = ZonedDateTime.now();
List<ScheduledEvent> events = new ArrayList<>();
// events in the past should be filtered out
events.add(buildScheduledEvent("In the past", now.minusDays(7), now.minusDays(6), calendarId));
events.add(buildScheduledEvent("A_downtime", now.plusDays(1), now.plusDays(2), calendarId));
events.add(buildScheduledEvent("A_downtime2", now.plusDays(8), now.plusDays(9), calendarId));
indexScheduledEvents(events);
List<MlFilter> filters = new ArrayList<>();
filters.add(MlFilter.builder("fruit").setItems("apple", "pear").build());
filters.add(MlFilter.builder("tea").setItems("green", "builders").build());
indexFilters(filters);
DataCounts earliestCounts = DataCountsTests.createTestInstance(jobId);
earliestCounts.setLatestRecordTimeStamp(new Date(1500000000000L));
indexDataCounts(earliestCounts, jobId);
DataCounts latestCounts = DataCountsTests.createTestInstance(jobId);
latestCounts.setLatestRecordTimeStamp(new Date(1510000000000L));
indexDataCounts(latestCounts, jobId);
ModelSizeStats earliestSizeStats = new ModelSizeStats.Builder(jobId).setLogTime(new Date(1500000000000L)).build();
ModelSizeStats latestSizeStats = new ModelSizeStats.Builder(jobId).setLogTime(new Date(1510000000000L)).build();
indexModelSizeStats(earliestSizeStats);
indexModelSizeStats(latestSizeStats);
job.setModelSnapshotId("snap_1");
ModelSnapshot snapshot = new ModelSnapshot.Builder(jobId).setSnapshotId("snap_1").build();
indexModelSnapshot(snapshot);
Quantiles quantiles = new Quantiles(jobId, new Date(), "quantile-state");
indexQuantiles(quantiles);
indicesAdmin().prepareRefresh(
MlMetaIndex.indexName(),
AnomalyDetectorsIndex.jobStateIndexPattern(),
AnomalyDetectorsIndex.jobResultsAliasedName(jobId)
).get();
AutodetectParams params = getAutodetectParams(job.build(new Date()));
// events
assertNotNull(params.scheduledEvents());
assertEquals(3, params.scheduledEvents().size());
assertEquals(events.get(0), params.scheduledEvents().get(0));
assertEquals(events.get(1), params.scheduledEvents().get(1));
assertEquals(events.get(2), params.scheduledEvents().get(2));
// filters
assertNotNull(params.filters());
assertEquals(2, params.filters().size());
assertTrue(params.filters().contains(filters.get(0)));
assertTrue(params.filters().contains(filters.get(1)));
// datacounts
assertNotNull(params.dataCounts());
assertEquals(latestCounts, params.dataCounts());
// model size stats
assertNotNull(params.modelSizeStats());
assertEquals(latestSizeStats, params.modelSizeStats());
// model snapshot
assertNotNull(params.modelSnapshot());
assertEquals(snapshot, params.modelSnapshot());
// quantiles
assertNotNull(params.quantiles());
assertEquals(quantiles, params.quantiles());
}
private AutodetectParams getAutodetectParams(Job job) throws Exception {
AtomicReference<Exception> errorHolder = new AtomicReference<>();
AtomicReference<AutodetectParams> searchResultHolder = new AtomicReference<>();
CountDownLatch latch = new CountDownLatch(1);
jobProvider.getAutodetectParams(job, params -> {
searchResultHolder.set(params);
latch.countDown();
}, e -> {
errorHolder.set(e);
latch.countDown();
});
latch.await();
if (errorHolder.get() != null) {
throw errorHolder.get();
}
return searchResultHolder.get();
}
private List<ScheduledEvent> getScheduledEventsForJob(String jobId, List<String> jobGroups, ScheduledEventsQueryBuilder query)
throws Exception {
AtomicReference<Exception> errorHolder = new AtomicReference<>();
AtomicReference<QueryPage<ScheduledEvent>> searchResultHolder = new AtomicReference<>();
CountDownLatch latch = new CountDownLatch(1);
jobProvider.scheduledEventsForJob(jobId, jobGroups, query, ActionListener.wrap(params -> {
searchResultHolder.set(params);
latch.countDown();
}, e -> {
errorHolder.set(e);
latch.countDown();
}));
latch.await();
if (errorHolder.get() != null) {
throw errorHolder.get();
}
return searchResultHolder.get().results();
}
private List<ScheduledEvent> getScheduledEvents(ScheduledEventsQueryBuilder query) throws Exception {
AtomicReference<Exception> errorHolder = new AtomicReference<>();
AtomicReference<QueryPage<ScheduledEvent>> searchResultHolder = new AtomicReference<>();
CountDownLatch latch = new CountDownLatch(1);
jobProvider.scheduledEvents(query, ActionListener.wrap(params -> {
searchResultHolder.set(params);
latch.countDown();
}, e -> {
errorHolder.set(e);
latch.countDown();
}));
latch.await();
if (errorHolder.get() != null) {
throw errorHolder.get();
}
return searchResultHolder.get().results();
}
private Job.Builder createJob(String jobId) {
return createJob(jobId, Collections.emptyList());
}
private Job.Builder createJob(String jobId, List<String> filterIds) {
return createJob(jobId, filterIds, Collections.emptyList());
}
private Job.Builder createJob(String jobId, List<String> filterIds, List<String> jobGroups) {
Job.Builder builder = new Job.Builder(jobId);
builder.setGroups(jobGroups);
AnalysisConfig.Builder ac = createAnalysisConfig("by_field", filterIds);
DataDescription.Builder dc = new DataDescription.Builder();
builder.setAnalysisConfig(ac);
builder.setDataDescription(dc);
PutJobAction.Request request = new PutJobAction.Request(builder);
client().execute(PutJobAction.INSTANCE, request).actionGet();
return builder;
}
private AnalysisConfig.Builder createAnalysisConfig(String byFieldName, List<String> filterIds) {
Detector.Builder detector = new Detector.Builder("mean", "field");
detector.setByFieldName(byFieldName);
List<DetectionRule> rules = new ArrayList<>();
for (String filterId : filterIds) {
RuleScope.Builder ruleScope = RuleScope.builder();
ruleScope.include(byFieldName, filterId);
rules.add(new DetectionRule.Builder(ruleScope).setActions(RuleAction.SKIP_RESULT).build());
}
detector.setRules(rules);
return new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
}
private void indexScheduledEvents(List<ScheduledEvent> events) throws IOException {
BulkRequestBuilder bulkRequest = client().prepareBulk();
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (ScheduledEvent event : events) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
ToXContent.MapParams params = new ToXContent.MapParams(
Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true")
);
indexRequest.source(event.toXContent(builder, params));
bulkRequest.add(indexRequest);
}
}
BulkResponse response = bulkRequest.get();
if (response.hasFailures()) {
throw new IllegalStateException(Strings.toString(response));
}
}
private void indexDataCounts(DataCounts counts, String jobId) throws InterruptedException {
JobDataCountsPersister persister = new JobDataCountsPersister(client(), resultsPersisterService, auditor);
persister.persistDataCounts(jobId, counts, true);
}
private void indexFilters(List<MlFilter> filters) throws IOException {
BulkRequestBuilder bulkRequest = client().prepareBulk();
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (MlFilter filter : filters) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName()).id(filter.documentId());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
ToXContent.MapParams params = new ToXContent.MapParams(
Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true")
);
indexRequest.source(filter.toXContent(builder, params));
bulkRequest.add(indexRequest);
}
}
bulkRequest.get();
}
private void indexModelSizeStats(ModelSizeStats modelSizeStats) {
JobResultsPersister persister = new JobResultsPersister(
new OriginSettingClient(client(), ClientHelper.ML_ORIGIN),
resultsPersisterService
);
persister.persistModelSizeStats(modelSizeStats, () -> true);
}
private void indexModelSnapshot(ModelSnapshot snapshot) {
JobResultsPersister persister = new JobResultsPersister(
new OriginSettingClient(client(), ClientHelper.ML_ORIGIN),
resultsPersisterService
);
persister.persistModelSnapshot(snapshot, WriteRequest.RefreshPolicy.IMMEDIATE, () -> true);
}
private void indexQuantiles(Quantiles quantiles) {
PlainActionFuture<Boolean> future = new PlainActionFuture<>();
createStateIndexAndAliasIfNecessary(
client(),
ClusterState.EMPTY_STATE,
TestIndexNameExpressionResolver.newInstance(),
TEST_REQUEST_TIMEOUT,
future
);
future.actionGet();
JobResultsPersister persister = new JobResultsPersister(
new OriginSettingClient(client(), ClientHelper.ML_ORIGIN),
resultsPersisterService
);
persister.persistQuantiles(quantiles, () -> true);
}
private void indexCalendars(List<Calendar> calendars) throws IOException {
BulkRequestBuilder bulkRequest = client().prepareBulk();
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (Calendar calendar : calendars) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName()).id(calendar.documentId());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
ToXContent.MapParams params = new ToXContent.MapParams(
Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true")
);
indexRequest.source(calendar.toXContent(builder, params));
bulkRequest.add(indexRequest);
}
}
bulkRequest.get();
}
}
| JobResultsProviderIT |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/customwordembedding/FeatureValue.java | {
"start": 405,
"end": 523
} | class ____ {
public double getWeight() {
return 1.0;
}
public abstract int getRow();
}
| FeatureValue |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/junit/jupiter/SoftAssertionsExtension_InjectionSanityChecking_Test.java | {
"start": 3387,
"end": 3843
} | class ____ extends TestBase {
@InjectSoftAssertions
static SoftAssertions usp = null;
@Override
@Test
void myTest() {}
}
@Test
void static_field_throws_exception() {
assertThatTest(StaticField.class).isInstanceOf(ExtensionConfigurationException.class)
.hasMessage("[usp] SoftAssertionsProvider field must not be static or final.");
}
@Disabled("Run by the testkit")
static | StaticField |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/component/bean/RouterBean.java | {
"start": 1032,
"end": 1201
} | class ____ {
@Consume("direct:start")
@RecipientList
public String[] route(String body) {
return new String[] { "mock:a", "mock:b" };
}
}
| RouterBean |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/XDR.java | {
"start": 1702,
"end": 7613
} | enum ____ {
READING, WRITING,
}
private final State state;
/**
* Construct a new XDR message buffer.
*
* @param initialCapacity
* the initial capacity of the buffer.
*/
public XDR(int initialCapacity) {
this(ByteBuffer.allocate(initialCapacity), State.WRITING);
}
public XDR() {
this(DEFAULT_INITIAL_CAPACITY);
}
public XDR(ByteBuffer buf, State state) {
this.buf = buf;
this.state = state;
}
/**
* Wraps a byte array as a read-only XDR message. There's no copy involved,
* thus it is the client's responsibility to ensure that the byte array
* remains unmodified when using the XDR object.
*
* @param src
* the byte array to be wrapped.
*/
public XDR(byte[] src) {
this(ByteBuffer.wrap(src).asReadOnlyBuffer(), State.READING);
}
public XDR asReadOnlyWrap() {
ByteBuffer b = buf.asReadOnlyBuffer();
if (state == State.WRITING) {
b.flip();
}
XDR n = new XDR(b, State.READING);
return n;
}
public ByteBuffer buffer() {
return buf.duplicate();
}
public int size() {
// TODO: This overloading intends to be compatible with the semantics of
// the previous version of the class. This function should be separated into
// two with clear semantics.
return state == State.READING ? buf.limit() : buf.position();
}
public int readInt() {
Preconditions.checkState(state == State.READING);
return buf.getInt();
}
public void writeInt(int v) {
ensureFreeSpace(SIZEOF_INT);
buf.putInt(v);
}
public boolean readBoolean() {
Preconditions.checkState(state == State.READING);
return buf.getInt() != 0;
}
public void writeBoolean(boolean v) {
ensureFreeSpace(SIZEOF_INT);
buf.putInt(v ? 1 : 0);
}
public long readHyper() {
Preconditions.checkState(state == State.READING);
return buf.getLong();
}
public void writeLongAsHyper(long v) {
ensureFreeSpace(SIZEOF_LONG);
buf.putLong(v);
}
public byte[] readFixedOpaque(int size) {
Preconditions.checkState(state == State.READING);
byte[] r = new byte[size];
buf.get(r);
alignPosition();
return r;
}
public void writeFixedOpaque(byte[] src, int length) {
ensureFreeSpace(alignUp(length));
buf.put(src, 0, length);
writePadding();
}
public void writeFixedOpaque(byte[] src) {
writeFixedOpaque(src, src.length);
}
public byte[] readVariableOpaque() {
Preconditions.checkState(state == State.READING);
int size = readInt();
return readFixedOpaque(size);
}
public void writeVariableOpaque(byte[] src) {
ensureFreeSpace(SIZEOF_INT + alignUp(src.length));
buf.putInt(src.length);
writeFixedOpaque(src);
}
public String readString() {
return new String(readVariableOpaque(), StandardCharsets.UTF_8);
}
public void writeString(String s) {
writeVariableOpaque(s.getBytes(StandardCharsets.UTF_8));
}
private void writePadding() {
Preconditions.checkState(state == State.WRITING);
int p = pad(buf.position());
ensureFreeSpace(p);
buf.put(PADDING_BYTES, 0, p);
}
private int alignUp(int length) {
return length + pad(length);
}
private int pad(int length) {
switch (length % 4) {
case 1:
return 3;
case 2:
return 2;
case 3:
return 1;
default:
return 0;
}
}
private void alignPosition() {
buf.position(alignUp(buf.position()));
}
private void ensureFreeSpace(int size) {
Preconditions.checkState(state == State.WRITING);
if (buf.remaining() < size) {
int newCapacity = buf.capacity() * 2;
int newRemaining = buf.capacity() + buf.remaining();
while (newRemaining < size) {
newRemaining += newCapacity;
newCapacity *= 2;
}
ByteBuffer newbuf = ByteBuffer.allocate(newCapacity);
buf.flip();
newbuf.put(buf);
buf = newbuf;
}
}
/**
* check if the rest of data has more than len bytes.
* @param xdr XDR message
* @param len minimum remaining length
* @return specify remaining length is enough or not
*/
public static boolean verifyLength(XDR xdr, int len) {
return xdr.buf.remaining() >= len;
}
static byte[] recordMark(int size, boolean last) {
byte[] b = new byte[SIZEOF_INT];
ByteBuffer buf = ByteBuffer.wrap(b);
buf.putInt(!last ? size : size | 0x80000000);
return b;
}
/**
* Write an XDR message to a TCP ChannelBuffer.
* @param request XDR request
* @param last specifies last request or not
* @return TCP buffer
*/
public static ByteBuf writeMessageTcp(XDR request, boolean last) {
Preconditions.checkState(request.state == XDR.State.WRITING);
ByteBuffer b = request.buf.duplicate();
b.flip();
byte[] fragmentHeader = XDR.recordMark(b.limit(), last);
ByteBuffer headerBuf = ByteBuffer.wrap(fragmentHeader);
// TODO: Investigate whether making a copy of the buffer is necessary.
return Unpooled.wrappedBuffer(headerBuf, b);
}
/**
* Write an XDR message to a UDP ChannelBuffer.
* @param response XDR response
* @return UDP buffer
*/
public static ByteBuf writeMessageUdp(XDR response) {
Preconditions.checkState(response.state == XDR.State.READING);
// TODO: Investigate whether making a copy of the buffer is necessary.
return Unpooled.copiedBuffer(response.buf);
}
public static int fragmentSize(byte[] mark) {
ByteBuffer b = ByteBuffer.wrap(mark);
int n = b.getInt();
return n & 0x7fffffff;
}
public static boolean isLastFragment(byte[] mark) {
ByteBuffer b = ByteBuffer.wrap(mark);
int n = b.getInt();
return (n & 0x80000000) != 0;
}
@VisibleForTesting
public byte[] getBytes() {
ByteBuffer d = asReadOnlyWrap().buffer();
byte[] b = new byte[d.remaining()];
d.get(b);
return b;
}
}
| State |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/inference/rerank/RerankOperatorTests.java | {
"start": 976,
"end": 4778
} | class ____ extends InferenceOperatorTestCase<RankedDocsResults> {
private static final String SIMPLE_INFERENCE_ID = "test_reranker";
private static final String SIMPLE_QUERY = "query text";
private int inputChannel;
private int scoreChannel;
@Override
protected Operator.OperatorFactory simple(SimpleOptions options) {
return new RerankOperator.Factory(
mockedInferenceService(),
SIMPLE_INFERENCE_ID,
SIMPLE_QUERY,
evaluatorFactory(inputChannel),
scoreChannel
);
}
@Before
public void initRerankChannels() {
inputChannel = between(0, inputsCount - 1);
scoreChannel = between(0, inputsCount - 1);
if (scoreChannel == inputChannel) {
scoreChannel++;
}
}
@Override
protected void assertSimpleOutput(List<Page> inputPages, List<Page> resultPages) {
assertThat(inputPages, hasSize(resultPages.size()));
for (int pageId = 0; pageId < inputPages.size(); pageId++) {
Page inputPage = inputPages.get(pageId);
Page resultPage = resultPages.get(pageId);
assertThat(resultPage.getPositionCount(), equalTo(inputPage.getPositionCount()));
assertThat(resultPage.getBlockCount(), equalTo(Integer.max(scoreChannel + 1, inputPage.getBlockCount())));
for (int channel = 0; channel < inputPage.getBlockCount(); channel++) {
Block resultBlock = resultPage.getBlock(channel);
if (channel == scoreChannel) {
assertExpectedScore(inputPage.getBlock(inputChannel), (DoubleBlock) resultBlock);
} else {
Block inputBlock = inputPage.getBlock(channel);
assertThat(resultBlock.getPositionCount(), equalTo(resultPage.getPositionCount()));
assertThat(resultBlock.elementType(), equalTo(inputBlock.elementType()));
assertBlockContentEquals(inputBlock, resultBlock);
}
}
}
}
private void assertExpectedScore(BytesRefBlock inputBlock, DoubleBlock scoreBlock) {
assertThat(scoreBlock.getPositionCount(), equalTo(inputBlock.getPositionCount()));
BlockStringReader inputBlockReader = new InferenceOperatorTestCase.BlockStringReader();
for (int pos = 0; pos < inputBlock.getPositionCount(); pos++) {
if (inputBlock.isNull(pos)) {
assertThat(scoreBlock.isNull(pos), equalTo(true));
} else {
double score = scoreBlock.getDouble(scoreBlock.getFirstValueIndex(pos));
double expectedScore = score(inputBlockReader.readString(inputBlock, pos));
assertThat(score, equalTo(expectedScore));
}
}
}
@Override
protected Matcher<String> expectedDescriptionOfSimple() {
return expectedToStringOfSimple();
}
@Override
protected Matcher<String> expectedToStringOfSimple() {
return equalTo(
"RerankOperator[inference_id=[" + SIMPLE_INFERENCE_ID + "], query=[" + SIMPLE_QUERY + "], score_channel=[" + scoreChannel + "]]"
);
}
@Override
protected RankedDocsResults mockInferenceResult(InferenceAction.Request request) {
List<RankedDocsResults.RankedDoc> rankedDocs = new ArrayList<>();
for (int rank = 0; rank < request.getInput().size(); rank++) {
String inputText = request.getInput().get(rank);
rankedDocs.add(new RankedDocsResults.RankedDoc(rank, score(inputText), inputText));
}
return new RankedDocsResults(rankedDocs);
}
private float score(String inputText) {
return (float) inputText.hashCode() / 100;
}
}
| RerankOperatorTests |
java | google__auto | common/src/test/java/com/google/auto/common/OverridesTest.java | {
"start": 4876,
"end": 4926
} | class ____ extends Parent {}
static | ChildOfParent |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/DefaultThreadPoolFactory.java | {
"start": 5831,
"end": 6128
} | class ____), which mean
// we could potentially keep adding tasks, and run out of memory.
if (profile.getMaxPoolSize() > 0) {
return new SizedScheduledExecutorService(answer, profile.getMaxQueueSize());
} else {
return answer;
}
}
}
| javadoc |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsAction.java | {
"start": 931,
"end": 2132
} | class ____ extends HandledTransportAction<
UpdateConnectorSyncJobIngestionStatsAction.Request,
ConnectorUpdateActionResponse> {
protected final ConnectorSyncJobIndexService connectorSyncJobIndexService;
@Inject
public TransportUpdateConnectorSyncJobIngestionStatsAction(
TransportService transportService,
ActionFilters actionFilters,
Client client
) {
super(
UpdateConnectorSyncJobIngestionStatsAction.NAME,
transportService,
actionFilters,
UpdateConnectorSyncJobIngestionStatsAction.Request::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client);
}
@Override
protected void doExecute(
Task task,
UpdateConnectorSyncJobIngestionStatsAction.Request request,
ActionListener<ConnectorUpdateActionResponse> listener
) {
connectorSyncJobIndexService.updateConnectorSyncJobIngestionStats(
request,
listener.map(r -> new ConnectorUpdateActionResponse(r.getResult()))
);
}
}
| TransportUpdateConnectorSyncJobIngestionStatsAction |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/KDE.java | {
"start": 534,
"end": 882
} | class ____ {
private static final double SQRT2 = FastMath.sqrt(2.0);
private static final double ESTIMATOR_EPS = 1e-10;
private static int lowerBound(double[] xs, double x) {
int retVal = Arrays.binarySearch(xs, x);
if (retVal < 0) {
retVal = -1 - retVal;
}
return retVal;
}
private | KDE |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/snapshots/MetadataLoadingDuringSnapshotRestoreIT.java | {
"start": 10326,
"end": 11310
} | class ____ extends MockRepository.Plugin {
public static final String TYPE = "countingmock";
@Override
public Map<String, Repository.Factory> getRepositories(
Environment env,
NamedXContentRegistry namedXContentRegistry,
ClusterService clusterService,
BigArrays bigArrays,
RecoverySettings recoverySettings,
RepositoriesMetrics repositoriesMetrics,
SnapshotMetrics snapshotMetrics
) {
return Collections.singletonMap(
TYPE,
(projectId, metadata) -> new CountingMockRepository(
projectId,
metadata,
env,
namedXContentRegistry,
clusterService,
bigArrays,
recoverySettings,
snapshotMetrics
)
);
}
}
}
| CountingMockRepositoryPlugin |
java | spring-projects__spring-framework | spring-aop/src/test/java/org/springframework/aop/framework/ProxyFactoryTests.java | {
"start": 16274,
"end": 16367
} | class ____ implements Runnable {
@Override
public void run() {
}
}
@Order(1)
static | A |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/doublearray/DoubleArrayAssert_doesNotContain_at_Index_Test.java | {
"start": 1287,
"end": 2177
} | class ____ extends DoubleArrayAssertBaseTest {
private final Index index = someIndex();
@Override
protected DoubleArrayAssert invoke_api_method() {
return assertions.doesNotContain(8d, index);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertDoesNotContain(getInfo(assertions), getActual(assertions), 8d, index);
}
@Test
void should_pass_with_precision_specified_as_last_argument() {
// GIVEN
double[] actual = arrayOf(1.0, 2.0);
// THEN
assertThat(actual).doesNotContain(1.01, atIndex(0), withPrecision(0.0001));
}
@Test
void should_pass_with_precision_specified_in_comparator() {
// GIVEN
double[] actual = arrayOf(1.0, 2.0);
// THEN
assertThat(actual).usingComparatorWithPrecision(0.1)
.doesNotContain(2.2, atIndex(1));
}
}
| DoubleArrayAssert_doesNotContain_at_Index_Test |
java | quarkusio__quarkus | extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/BlockingStubInjectionTest.java | {
"start": 758,
"end": 1598
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.addClasses(MyConsumer.class, GreeterGrpc.class, GreeterGrpc.GreeterBlockingStub.class,
HelloService.class, HelloRequest.class, HelloReply.class,
HelloReplyOrBuilder.class, HelloRequestOrBuilder.class))
.withConfigurationResource("hello-config.properties");
@Inject
MyConsumer service;
@Test
public void test() {
String neo = service.invoke("neo");
assertThat(neo).isEqualTo("Hello neo");
neo = service.invokeWeird("neo");
assertThat(neo).isEqualTo("Hello neo");
}
@ApplicationScoped
static | BlockingStubInjectionTest |
java | google__auto | value/src/it/functional/src/test/java/com/google/auto/value/AutoValueTest.java | {
"start": 28500,
"end": 28604
} | interface ____<M extends Mergeable<M>> {
M merge(M other);
}
@AutoValue
abstract static | Mergeable |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/relationship/JoinedInheritanceWithOneToManyTest.java | {
"start": 2469,
"end": 3142
} | class ____ {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "seq")
protected Integer id;
@Temporal(TemporalType.TIMESTAMP)
protected Date correctDate;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Date getCorrectDate() {
return correctDate;
}
public void setCorrectDate(Date correctDate) {
this.correctDate = correctDate;
}
}
@Entity(name = "BuildingList")
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "TB_BUILDING_LIST")
@SequenceGenerator(name = "seq",
sequenceName = "sq_building_list_id",
allocationSize = 1)
public static | DBObject |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/type/RecursiveType1658Test.java | {
"start": 645,
"end": 2043
} | class ____<T> extends HashMap<T, Tree<T>> // implements Serializable
{
public Tree() { }
public Tree(List<T> children) {
this();
for (final T t : children) {
this.put(t, new Tree<T>());
}
}
public List<Tree<T>> getLeafTrees() {
return null;
}
}
@Test
public void testRecursive1658() throws Exception
{
Tree<String> t = new Tree<String>(Arrays.asList("hello", "world"));
final TypeResolverBuilder<?> typer = new StdTypeResolverBuilder(JsonTypeInfo.Id.CLASS,
JsonTypeInfo.As.PROPERTY, null);
ObjectMapper mapper = jsonMapperBuilder()
.setDefaultTyping(typer)
.build();
String res = mapper.writeValueAsString(t);
Tree<?> tRead = mapper.readValue(res, Tree.class);
assertNotNull(tRead);
// 30-Oct-2019, tatu: Let's actually verify that description will be safe to use, too
JavaType resolved = mapper.getTypeFactory()
.constructType(new TypeReference<Tree<String>> () { });
final String namePath = Tree.class.getName().replace('.', '/');
assertEquals("L"+namePath+";", resolved.getErasedSignature());
assertEquals("L"+namePath+"<Ljava/lang/String;L"+namePath+";>;",
resolved.getGenericSignature());
}
}
| Tree |
java | quarkusio__quarkus | extensions/smallrye-reactive-messaging/deployment/src/test/java/io/quarkus/smallrye/reactivemessaging/channels/MutinyEmitterTest.java | {
"start": 680,
"end": 1311
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MutinyEmitterExample.class));
@Inject
MutinyEmitterExample example;
@Test
public void testMutinyEmitter() {
example.run();
List<String> list = example.list();
assertEquals(4, list.size());
assertEquals("a", list.get(0));
assertEquals("b", list.get(1));
assertEquals("c", list.get(2));
assertEquals("d", list.get(3));
}
@ApplicationScoped
public static | MutinyEmitterTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithTasksMatchTest.java | {
"start": 1132,
"end": 2395
} | class ____ extends ContextTestSupport {
@Test
public void testReplaceMultipleIds() throws Exception {
AdviceWith.adviceWith(context.getRouteDefinitions().get(0), context, new AdviceWithRouteBuilder() {
@Override
public void configure() throws Exception {
// replace all gold id's with the following route path
weaveById("gold*").replace().multicast().to("mock:a").to("mock:b");
}
});
getMockEndpoint("mock:foo").expectedMessageCount(0);
getMockEndpoint("mock:bar").expectedMessageCount(0);
getMockEndpoint("mock:a").expectedMessageCount(2);
getMockEndpoint("mock:b").expectedMessageCount(2);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("mock:foo").id("gold-1").to("mock:bar").id("gold-2").to("mock:result").id("silver-1");
}
};
}
}
| AdviceWithTasksMatchTest |
java | spring-projects__spring-boot | module/spring-boot-health/src/main/java/org/springframework/boot/health/actuate/endpoint/HealthEndpointWebExtension.java | {
"start": 2134,
"end": 4584
} | class ____ extends HealthEndpointSupport<Health, HealthDescriptor> {
/**
* Create a new {@link HealthEndpointWebExtension} instance.
* @param registry the health contributor registry
* @param fallbackRegistry the fallback registry or {@code null}
* @param groups the health endpoint groups
* @param slowContributorLoggingThreshold duration after which slow health indicator
* logging should occur
*/
public HealthEndpointWebExtension(HealthContributorRegistry registry,
@Nullable ReactiveHealthContributorRegistry fallbackRegistry, HealthEndpointGroups groups,
@Nullable Duration slowContributorLoggingThreshold) {
super(Contributor.blocking(registry, fallbackRegistry), groups, slowContributorLoggingThreshold);
}
@ReadOperation
public WebEndpointResponse<HealthDescriptor> health(ApiVersion apiVersion, WebServerNamespace serverNamespace,
SecurityContext securityContext) {
return health(apiVersion, serverNamespace, securityContext, false, EMPTY_PATH);
}
@ReadOperation
public WebEndpointResponse<HealthDescriptor> health(ApiVersion apiVersion, WebServerNamespace serverNamespace,
SecurityContext securityContext, @Selector(match = Match.ALL_REMAINING) String... path) {
return health(apiVersion, serverNamespace, securityContext, false, path);
}
public WebEndpointResponse<HealthDescriptor> health(ApiVersion apiVersion,
@Nullable WebServerNamespace serverNamespace, SecurityContext securityContext, boolean showAll,
String... path) {
Result<HealthDescriptor> result = getResult(apiVersion, serverNamespace, securityContext, showAll, path);
if (result == null) {
return (Arrays.equals(path, EMPTY_PATH))
? new WebEndpointResponse<>(IndicatedHealthDescriptor.UP, WebEndpointResponse.STATUS_OK)
: new WebEndpointResponse<>(WebEndpointResponse.STATUS_NOT_FOUND);
}
HealthDescriptor descriptor = result.descriptor();
HealthEndpointGroup group = result.group();
int statusCode = group.getHttpCodeStatusMapper().getStatusCode(descriptor.getStatus());
return new WebEndpointResponse<>(descriptor, statusCode);
}
@Override
protected HealthDescriptor aggregateDescriptors(ApiVersion apiVersion, Map<String, HealthDescriptor> contributions,
StatusAggregator statusAggregator, boolean showComponents, @Nullable Set<String> groupNames) {
return getCompositeDescriptor(apiVersion, contributions, statusAggregator, showComponents, groupNames);
}
}
| HealthEndpointWebExtension |
java | netty__netty | handler/src/test/java/io/netty/handler/NativeImageHandlerMetadataTest.java | {
"start": 762,
"end": 940
} | class ____ {
@Test
public void collectAndCompareMetadata() {
ChannelHandlerMetadataUtil.generateMetadata("io.netty.handler");
}
}
| NativeImageHandlerMetadataTest |
java | google__guava | android/guava/src/com/google/common/util/concurrent/AbstractFuture.java | {
"start": 5579,
"end": 6212
} | class ____ {
static final Listener TOMBSTONE = new Listener();
// null only for TOMBSTONE
final @Nullable Runnable task;
// null only for TOMBSTONE
final @Nullable Executor executor;
// writes to next are made visible by subsequent CAS's on the listeners field
@Nullable Listener next;
Listener(Runnable task, Executor executor) {
this.task = task;
this.executor = executor;
}
Listener() {
this.task = null;
this.executor = null;
}
}
/** A special value to represent failure, when {@link #setException} is called successfully. */
private static final | Listener |
java | apache__kafka | clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/consumer/ShareConsumerTest.java | {
"start": 25168,
"end": 82213
} | class ____ implements AcknowledgementCommitCallback {
private final Map<TopicPartition, Set<Long>> partitionOffsetsMap;
private final Map<TopicPartition, Exception> partitionExceptionMap;
public TestableAcknowledgementCommitCallback(Map<TopicPartition, Set<Long>> partitionOffsetsMap,
Map<TopicPartition, Exception> partitionExceptionMap) {
this.partitionOffsetsMap = partitionOffsetsMap;
this.partitionExceptionMap = partitionExceptionMap;
}
@Override
public void onComplete(Map<TopicIdPartition, Set<Long>> offsetsMap, Exception exception) {
offsetsMap.forEach((partition, offsets) -> {
partitionOffsetsMap.merge(partition.topicPartition(), offsets, (oldOffsets, newOffsets) -> {
Set<Long> mergedOffsets = new HashSet<>();
mergedOffsets.addAll(oldOffsets);
mergedOffsets.addAll(newOffsets);
return mergedOffsets;
});
if (!partitionExceptionMap.containsKey(partition.topicPartition()) && exception != null) {
partitionExceptionMap.put(partition.topicPartition(), exception);
}
});
}
}
@ClusterTest
public void testHeaders() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1")) {
int numRecords = 1;
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
record.headers().add("headerKey", "headerValue".getBytes());
record.headers().add("headerKey2", "headerValue2".getBytes());
record.headers().add("headerKey3", "headerValue3".getBytes());
producer.send(record);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
List<ConsumerRecord<byte[], byte[]>> records = consumeRecords(shareConsumer, numRecords);
assertEquals(numRecords, records.size());
Header header = records.get(0).headers().lastHeader("headerKey");
assertEquals("headerValue", new String(header.value()));
// Test the order of headers in a record is preserved when producing and consuming
Header[] headers = records.get(0).headers().toArray();
assertEquals("headerKey", headers[0].key());
assertEquals("headerKey2", headers[1].key());
assertEquals("headerKey3", headers[2].key());
verifyShareGroupStateTopicRecordsProduced();
}
}
private void testHeadersSerializeDeserialize(Serializer<byte[]> serializer, Deserializer<byte[]> deserializer) {
alterShareAutoOffsetReset("group1", "earliest");
Map<String, Object> producerConfig = Map.of(
ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, serializer.getClass().getName()
);
Map<String, Object> consumerConfig = Map.of(
ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, deserializer.getClass().getName()
);
try (Producer<byte[], byte[]> producer = createProducer(producerConfig);
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1", consumerConfig)) {
int numRecords = 1;
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
List<ConsumerRecord<byte[], byte[]>> records = consumeRecords(shareConsumer, numRecords);
assertEquals(numRecords, records.size());
}
}
@ClusterTest
public void testHeadersSerializerDeserializer() {
testHeadersSerializeDeserialize(new SerializerImpl(), new DeserializerImpl());
verifyShareGroupStateTopicRecordsProduced();
}
@ClusterTest
public void testMaxPollRecords() {
int numRecords = 10000;
int maxPollRecords = 2;
alterShareAutoOffsetReset("group1", "earliest");
try (ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1",
Map.of(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, String.valueOf(maxPollRecords)))) {
long startingTimestamp = System.currentTimeMillis();
produceMessagesWithTimestamp(numRecords, startingTimestamp);
shareConsumer.subscribe(Set.of(tp.topic()));
List<ConsumerRecord<byte[], byte[]>> records = consumeRecords(shareConsumer, numRecords);
long i = 0L;
for (ConsumerRecord<byte[], byte[]> record : records) {
assertEquals(tp.topic(), record.topic());
assertEquals(tp.partition(), record.partition());
assertEquals(TimestampType.CREATE_TIME, record.timestampType());
assertEquals(startingTimestamp + i, record.timestamp());
assertEquals("key " + i, new String(record.key()));
assertEquals("value " + i, new String(record.value()));
// this is true only because K and V are byte arrays
assertEquals(("key " + i).length(), record.serializedKeySize());
assertEquals(("value " + i).length(), record.serializedValueSize());
i++;
}
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testControlRecordsSkipped() throws Exception {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> transactionalProducer = createProducer("T1");
Producer<byte[], byte[]> nonTransactionalProducer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1")) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
transactionalProducer.initTransactions();
transactionalProducer.beginTransaction();
RecordMetadata transactional1 = transactionalProducer.send(record).get();
RecordMetadata nonTransactional1 = nonTransactionalProducer.send(record).get();
transactionalProducer.commitTransaction();
transactionalProducer.beginTransaction();
RecordMetadata transactional2 = transactionalProducer.send(record).get();
transactionalProducer.abortTransaction();
RecordMetadata nonTransactional2 = nonTransactionalProducer.send(record).get();
transactionalProducer.close();
nonTransactionalProducer.close();
shareConsumer.subscribe(Set.of(tp.topic()));
ConsumerRecords<byte[], byte[]> records = waitedPoll(shareConsumer, 2500L, 4);
assertEquals(4, records.count());
assertEquals(transactional1.offset(), records.records(tp).get(0).offset());
assertEquals(nonTransactional1.offset(), records.records(tp).get(1).offset());
assertEquals(transactional2.offset(), records.records(tp).get(2).offset());
assertEquals(nonTransactional2.offset(), records.records(tp).get(3).offset());
// There will be control records on the topic-partition, so the offsets of the non-control records
// are not 0, 1, 2, 3. Just assert that the offset of the final one is not 3.
assertNotEquals(3, nonTransactional2.offset());
records = shareConsumer.poll(Duration.ofMillis(500));
assertEquals(0, records.count());
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testExplicitAcknowledgeSuccess() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1", Map.of(ConsumerConfig.SHARE_ACKNOWLEDGEMENT_MODE_CONFIG, EXPLICIT))) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
ConsumerRecords<byte[], byte[]> records = waitedPoll(shareConsumer, 2500L, 1);
assertEquals(1, records.count());
records.forEach(shareConsumer::acknowledge);
producer.send(record);
records = shareConsumer.poll(Duration.ofMillis(5000));
assertEquals(1, records.count());
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testExplicitAcknowledgeCommitSuccess() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1", Map.of(ConsumerConfig.SHARE_ACKNOWLEDGEMENT_MODE_CONFIG, EXPLICIT))) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
ConsumerRecords<byte[], byte[]> records = waitedPoll(shareConsumer, 2500L, 1);
assertEquals(1, records.count());
records.forEach(shareConsumer::acknowledge);
producer.send(record);
Map<TopicIdPartition, Optional<KafkaException>> result = shareConsumer.commitSync();
assertEquals(1, result.size());
records = shareConsumer.poll(Duration.ofMillis(5000));
assertEquals(1, records.count());
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testExplicitAcknowledgementCommitAsync() throws InterruptedException {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer1 = createShareConsumer("group1", Map.of(ConsumerConfig.SHARE_ACKNOWLEDGEMENT_MODE_CONFIG, EXPLICIT));
ShareConsumer<byte[], byte[]> shareConsumer2 = createShareConsumer("group1")) {
ProducerRecord<byte[], byte[]> record1 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
ProducerRecord<byte[], byte[]> record2 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
ProducerRecord<byte[], byte[]> record3 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record1);
producer.send(record2);
producer.send(record3);
producer.flush();
shareConsumer1.subscribe(Set.of(tp.topic()));
shareConsumer2.subscribe(Set.of(tp.topic()));
Map<TopicPartition, Set<Long>> partitionOffsetsMap1 = new HashMap<>();
Map<TopicPartition, Exception> partitionExceptionMap1 = new HashMap<>();
shareConsumer1.setAcknowledgementCommitCallback(new TestableAcknowledgementCommitCallback(partitionOffsetsMap1, partitionExceptionMap1));
ConsumerRecords<byte[], byte[]> records = waitedPoll(shareConsumer1, 2500L, 3);
assertEquals(3, records.count());
Iterator<ConsumerRecord<byte[], byte[]>> iterator = records.iterator();
// Acknowledging 2 out of the 3 records received via commitAsync.
ConsumerRecord<byte[], byte[]> firstRecord = iterator.next();
ConsumerRecord<byte[], byte[]> secondRecord = iterator.next();
ConsumerRecord<byte[], byte[]> thirdRecord = iterator.next();
assertEquals(0L, firstRecord.offset());
assertEquals(1L, secondRecord.offset());
shareConsumer1.acknowledge(firstRecord);
shareConsumer1.acknowledge(secondRecord);
shareConsumer1.acknowledge(thirdRecord, AcknowledgeType.RELEASE);
shareConsumer1.commitAsync();
// The 3rd record should be reassigned to 2nd consumer when it polls.
TestUtils.waitForCondition(() -> {
ConsumerRecords<byte[], byte[]> records2 = shareConsumer2.poll(Duration.ofMillis(1000));
return records2.count() == 1 && records2.iterator().next().offset() == 2L;
}, 30000, 100L, () -> "Didn't receive timed out record");
assertFalse(partitionExceptionMap1.containsKey(tp));
// The callback will receive the acknowledgement responses asynchronously after the next poll.
TestUtils.waitForCondition(() -> {
shareConsumer1.poll(Duration.ofMillis(1000));
return partitionOffsetsMap1.containsKey(tp);
}, 30000, 100L, () -> "Didn't receive call to callback");
assertFalse(partitionExceptionMap1.containsKey(tp));
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testExplicitAcknowledgementCommitAsyncPartialBatch() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer1 = createShareConsumer("group1", Map.of(ConsumerConfig.SHARE_ACKNOWLEDGEMENT_MODE_CONFIG, EXPLICIT))) {
ProducerRecord<byte[], byte[]> record1 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
ProducerRecord<byte[], byte[]> record2 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
ProducerRecord<byte[], byte[]> record3 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
ProducerRecord<byte[], byte[]> record4 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record1);
producer.send(record2);
producer.send(record3);
producer.flush();
shareConsumer1.subscribe(Set.of(tp.topic()));
Map<TopicPartition, Set<Long>> partitionOffsetsMap = new HashMap<>();
Map<TopicPartition, Exception> partitionExceptionMap = new HashMap<>();
shareConsumer1.setAcknowledgementCommitCallback(new TestableAcknowledgementCommitCallback(partitionOffsetsMap, partitionExceptionMap));
ConsumerRecords<byte[], byte[]> records = waitedPoll(shareConsumer1, 2500L, 3);
assertEquals(3, records.count());
Iterator<ConsumerRecord<byte[], byte[]>> iterator = records.iterator();
// Acknowledging 2 out of the 3 records received via commitAsync.
ConsumerRecord<byte[], byte[]> firstRecord = iterator.next();
ConsumerRecord<byte[], byte[]> secondRecord = iterator.next();
ConsumerRecord<byte[], byte[]> thirdRecord = iterator.next();
assertEquals(0L, firstRecord.offset());
assertEquals(1L, secondRecord.offset());
shareConsumer1.acknowledge(firstRecord);
shareConsumer1.acknowledge(secondRecord);
shareConsumer1.commitAsync();
producer.send(record4);
producer.flush();
// The next poll() should throw an IllegalStateException as there is still 1 unacknowledged record.
// In EXPLICIT acknowledgement mode, we are not allowed to have unacknowledged records from a batch.
assertThrows(IllegalStateException.class, () -> shareConsumer1.poll(Duration.ofMillis(5000)));
// Acknowledging the 3rd record
shareConsumer1.acknowledge(thirdRecord);
shareConsumer1.commitAsync();
// The next poll() will not throw an exception, it would continue to fetch more records.
records = shareConsumer1.poll(Duration.ofMillis(5000));
assertEquals(1, records.count());
iterator = records.iterator();
ConsumerRecord<byte[], byte[]> fourthRecord = iterator.next();
assertEquals(3L, fourthRecord.offset());
shareConsumer1.acknowledge(fourthRecord);
// The callback will receive the acknowledgement responses after polling. The callback is
// called on entry to the poll method or during close. The commit is being performed asynchronously, so
// we can only rely on the completion once the consumer has closed because that waits for the response.
shareConsumer1.poll(Duration.ofMillis(500));
shareConsumer1.close();
assertFalse(partitionExceptionMap.containsKey(tp));
assertTrue(partitionOffsetsMap.containsKey(tp) && partitionOffsetsMap.get(tp).size() == 4);
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testExplicitAcknowledgeReleasePollAccept() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1", Map.of(ConsumerConfig.SHARE_ACKNOWLEDGEMENT_MODE_CONFIG, EXPLICIT))) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
ConsumerRecords<byte[], byte[]> records = waitedPoll(shareConsumer, 2500L, 1);
assertEquals(1, records.count());
records.forEach(consumedRecord -> shareConsumer.acknowledge(consumedRecord, AcknowledgeType.RELEASE));
records = shareConsumer.poll(Duration.ofMillis(5000));
assertEquals(1, records.count());
records.forEach(consumedRecord -> shareConsumer.acknowledge(consumedRecord, AcknowledgeType.ACCEPT));
records = shareConsumer.poll(Duration.ofMillis(500));
assertEquals(0, records.count());
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testExplicitAcknowledgeReleaseAccept() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1", Map.of(ConsumerConfig.SHARE_ACKNOWLEDGEMENT_MODE_CONFIG, EXPLICIT))) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
ConsumerRecords<byte[], byte[]> records = waitedPoll(shareConsumer, 2500L, 1);
assertEquals(1, records.count());
records.forEach(consumedRecord -> shareConsumer.acknowledge(consumedRecord, AcknowledgeType.RELEASE));
records.forEach(consumedRecord -> shareConsumer.acknowledge(consumedRecord, AcknowledgeType.ACCEPT));
records = shareConsumer.poll(Duration.ofMillis(500));
assertEquals(0, records.count());
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testExplicitAcknowledgeReleaseClose() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1", Map.of(ConsumerConfig.SHARE_ACKNOWLEDGEMENT_MODE_CONFIG, EXPLICIT))) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
ConsumerRecords<byte[], byte[]> records = waitedPoll(shareConsumer, 2500L, 1);
assertEquals(1, records.count());
records.forEach(consumedRecord -> shareConsumer.acknowledge(consumedRecord, AcknowledgeType.RELEASE));
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testExplicitAcknowledgeThrowsNotInBatch() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1", Map.of(ConsumerConfig.SHARE_ACKNOWLEDGEMENT_MODE_CONFIG, EXPLICIT))) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
ConsumerRecords<byte[], byte[]> records = waitedPoll(shareConsumer, 2500L, 1);
assertEquals(1, records.count());
ConsumerRecord<byte[], byte[]> consumedRecord = records.records(tp).get(0);
shareConsumer.acknowledge(consumedRecord);
records = shareConsumer.poll(Duration.ofMillis(500));
assertEquals(0, records.count());
assertThrows(IllegalStateException.class, () -> shareConsumer.acknowledge(consumedRecord));
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testExplicitOverrideAcknowledgeCorruptedMessage() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer(
"group1",
Map.of(ConsumerConfig.SHARE_ACKNOWLEDGEMENT_MODE_CONFIG, EXPLICIT),
null,
mockErrorDeserializer(3))) {
ProducerRecord<byte[], byte[]> record1 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
ProducerRecord<byte[], byte[]> record2 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
ProducerRecord<byte[], byte[]> record3 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record1);
producer.send(record2);
producer.send(record3);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
ConsumerRecords<byte[], byte[]> records = shareConsumer.poll(Duration.ofSeconds(60));
assertEquals(2, records.count());
Iterator<ConsumerRecord<byte[], byte[]>> iterator = records.iterator();
ConsumerRecord<byte[], byte[]> firstRecord = iterator.next();
ConsumerRecord<byte[], byte[]> secondRecord = iterator.next();
assertEquals(0L, firstRecord.offset());
assertEquals(1L, secondRecord.offset());
shareConsumer.acknowledge(firstRecord);
shareConsumer.acknowledge(secondRecord);
RecordDeserializationException rde = assertThrows(RecordDeserializationException.class, () -> shareConsumer.poll(Duration.ofSeconds(60)));
assertEquals(2, rde.offset());
shareConsumer.commitSync();
// The corrupted record was automatically released, so we can still obtain it.
rde = assertThrows(RecordDeserializationException.class, () -> shareConsumer.poll(Duration.ofSeconds(60)));
assertEquals(2, rde.offset());
// Reject this record
shareConsumer.acknowledge(rde.topicPartition().topic(), rde.topicPartition().partition(), rde.offset(), AcknowledgeType.REJECT);
shareConsumer.commitSync();
records = shareConsumer.poll(Duration.ZERO);
assertEquals(0, records.count());
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testExplicitAcknowledgeOffsetThrowsNotException() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer(
"group1",
Map.of(ConsumerConfig.SHARE_ACKNOWLEDGEMENT_MODE_CONFIG, EXPLICIT))) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
ConsumerRecords<byte[], byte[]> records = shareConsumer.poll(Duration.ofSeconds(60));
assertEquals(1, records.count());
ConsumerRecord<byte[], byte[]> consumedRecord = records.records(tp).get(0);
assertEquals(0L, consumedRecord.offset());
assertThrows(IllegalStateException.class, () -> shareConsumer.acknowledge(tp.topic(), tp.partition(), consumedRecord.offset(), AcknowledgeType.ACCEPT));
shareConsumer.acknowledge(consumedRecord);
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testExplicitAcknowledgeOffsetThrowsParametersError() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer(
"group1",
Map.of(ConsumerConfig.SHARE_ACKNOWLEDGEMENT_MODE_CONFIG, EXPLICIT),
null,
mockErrorDeserializer(2))) {
ProducerRecord<byte[], byte[]> record1 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
ProducerRecord<byte[], byte[]> record2 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record1);
producer.send(record2);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
ConsumerRecords<byte[], byte[]> records = shareConsumer.poll(Duration.ofSeconds(60));
assertEquals(1, records.count());
Iterator<ConsumerRecord<byte[], byte[]>> iterator = records.iterator();
ConsumerRecord<byte[], byte[]> firstRecord = iterator.next();
assertEquals(0L, firstRecord.offset());
shareConsumer.acknowledge(firstRecord);
final RecordDeserializationException rde = assertThrows(RecordDeserializationException.class, () -> shareConsumer.poll(Duration.ofSeconds(60)));
assertEquals(1, rde.offset());
assertThrows(IllegalStateException.class, () -> shareConsumer.acknowledge("foo", rde.topicPartition().partition(), rde.offset(), AcknowledgeType.REJECT));
assertThrows(IllegalStateException.class, () -> shareConsumer.acknowledge(rde.topicPartition().topic(), 1, rde.offset(), AcknowledgeType.REJECT));
assertThrows(IllegalStateException.class, () -> shareConsumer.acknowledge(rde.topicPartition().topic(), tp2.partition(), 0, AcknowledgeType.REJECT));
// Reject this record.
shareConsumer.acknowledge(rde.topicPartition().topic(), rde.topicPartition().partition(), rde.offset(), AcknowledgeType.REJECT);
shareConsumer.commitSync();
// The next acknowledge() should throw an IllegalStateException as the record has been acked.
assertThrows(IllegalStateException.class, () -> shareConsumer.acknowledge(rde.topicPartition().topic(), rde.topicPartition().partition(), rde.offset(), AcknowledgeType.REJECT));
records = shareConsumer.poll(Duration.ZERO);
assertEquals(0, records.count());
verifyShareGroupStateTopicRecordsProduced();
}
}
private ByteArrayDeserializer mockErrorDeserializer(int recordNumber) {
int recordIndex = recordNumber - 1;
return new ByteArrayDeserializer() {
int i = 0;
@Override
public byte[] deserialize(String topic, Headers headers, ByteBuffer data) {
if (i == recordIndex) {
throw new SerializationException();
} else {
i++;
return super.deserialize(topic, headers, data);
}
}
};
}
@ClusterTest
public void testImplicitAcknowledgeFailsExplicit() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1")) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
ConsumerRecords<byte[], byte[]> records = waitedPoll(shareConsumer, 2500L, 1);
assertEquals(1, records.count());
ConsumerRecord<byte[], byte[]> consumedRecord = records.records(tp).get(0);
records = shareConsumer.poll(Duration.ofMillis(500));
assertEquals(0, records.count());
assertThrows(IllegalStateException.class, () -> shareConsumer.acknowledge(consumedRecord));
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testImplicitAcknowledgeCommitSync() throws InterruptedException {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1")) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
AtomicReference<ConsumerRecords<byte[], byte[]>> recordsAtomic = new AtomicReference<>();
waitForCondition(() -> {
ConsumerRecords<byte[], byte[]> recs = shareConsumer.poll(Duration.ofMillis(2500L));
recordsAtomic.set(recs);
return recs.count() == 1;
},
DEFAULT_MAX_WAIT_MS,
500L,
() -> "records not found"
);
ConsumerRecords<byte[], byte[]> records = recordsAtomic.get();
assertEquals(1, records.count());
Map<TopicIdPartition, Optional<KafkaException>> result = shareConsumer.commitSync();
assertEquals(1, result.size());
result = shareConsumer.commitSync();
assertEquals(0, result.size());
records = shareConsumer.poll(Duration.ofMillis(500));
assertEquals(0, records.count());
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testImplicitAcknowledgementCommitAsync() throws InterruptedException {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1")) {
ProducerRecord<byte[], byte[]> record1 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
ProducerRecord<byte[], byte[]> record2 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
ProducerRecord<byte[], byte[]> record3 = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record1);
producer.send(record2);
producer.send(record3);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
Map<TopicPartition, Set<Long>> partitionOffsetsMap1 = new HashMap<>();
Map<TopicPartition, Exception> partitionExceptionMap1 = new HashMap<>();
shareConsumer.setAcknowledgementCommitCallback(new TestableAcknowledgementCommitCallback(partitionOffsetsMap1, partitionExceptionMap1));
ConsumerRecords<byte[], byte[]> records = waitedPoll(shareConsumer, 2500L, 3);
assertEquals(3, records.count());
// Implicitly acknowledging all the records received.
shareConsumer.commitAsync();
assertFalse(partitionExceptionMap1.containsKey(tp));
// The callback will receive the acknowledgement responses after the next poll.
TestUtils.waitForCondition(() -> {
shareConsumer.poll(Duration.ofMillis(1000));
return partitionOffsetsMap1.containsKey(tp);
}, DEFAULT_MAX_WAIT_MS, 100L, () -> "Acknowledgement commit callback did not receive the response yet");
assertFalse(partitionExceptionMap1.containsKey(tp));
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testConfiguredExplicitAcknowledgeCommitSuccess() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer(
"group1",
Map.of(ConsumerConfig.SHARE_ACKNOWLEDGEMENT_MODE_CONFIG, "explicit"))) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
ConsumerRecords<byte[], byte[]> records = waitedPoll(shareConsumer, 2500L, 1);
assertEquals(1, records.count());
records.forEach(shareConsumer::acknowledge);
producer.send(record);
Map<TopicIdPartition, Optional<KafkaException>> result = shareConsumer.commitSync();
assertEquals(1, result.size());
records = shareConsumer.poll(Duration.ofMillis(5000));
assertEquals(1, records.count());
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testConfiguredImplicitAcknowledgeExplicitAcknowledgeFails() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer(
"group1",
Map.of(ConsumerConfig.SHARE_ACKNOWLEDGEMENT_MODE_CONFIG, IMPLICIT))) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record);
producer.flush();
shareConsumer.subscribe(Set.of(tp.topic()));
ConsumerRecords<byte[], byte[]> records = waitedPoll(shareConsumer, 2500L, 1);
assertEquals(1, records.count());
assertThrows(IllegalStateException.class, () -> shareConsumer.acknowledge(records.iterator().next()));
}
}
@ClusterTest
public void testFetchRecordLargerThanMaxPartitionFetchBytes() throws Exception {
int maxPartitionFetchBytes = 10000;
alterShareAutoOffsetReset("group1", "earliest");
try (
Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer(
"group1",
Map.of(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, String.valueOf(maxPartitionFetchBytes)))) {
ProducerRecord<byte[], byte[]> smallRecord = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
ProducerRecord<byte[], byte[]> bigRecord = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), new byte[maxPartitionFetchBytes]);
producer.send(smallRecord).get();
producer.send(bigRecord).get();
shareConsumer.subscribe(Set.of(tp.topic()));
ConsumerRecords<byte[], byte[]> records = waitedPoll(shareConsumer, 2500L, 2);
assertEquals(2, records.count());
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testMultipleConsumersWithDifferentGroupIds() throws InterruptedException {
alterShareAutoOffsetReset("group1", "earliest");
alterShareAutoOffsetReset("group2", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer1 = createShareConsumer("group1");
ShareConsumer<byte[], byte[]> shareConsumer2 = createShareConsumer("group2")) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
shareConsumer1.subscribe(Set.of(tp.topic()));
shareConsumer2.subscribe(Set.of(tp.topic()));
// producing 3 records to the topic
producer.send(record);
producer.send(record);
producer.send(record);
producer.flush();
// Both the consumers should read all the messages, because they are part of different share groups (both have different group IDs)
AtomicInteger shareConsumer1Records = new AtomicInteger();
AtomicInteger shareConsumer2Records = new AtomicInteger();
TestUtils.waitForCondition(() -> {
int records1 = shareConsumer1Records.addAndGet(shareConsumer1.poll(Duration.ofMillis(2000)).count());
int records2 = shareConsumer2Records.addAndGet(shareConsumer2.poll(Duration.ofMillis(2000)).count());
return records1 == 3 && records2 == 3;
}, DEFAULT_MAX_WAIT_MS, 100L, () -> "Failed to consume records for both consumers");
producer.send(record);
producer.send(record);
shareConsumer1Records.set(0);
TestUtils.waitForCondition(() -> shareConsumer1Records.addAndGet(shareConsumer1.poll(Duration.ofMillis(2000)).count()) == 2,
DEFAULT_MAX_WAIT_MS, 100L, () -> "Failed to consume records for share consumer 1");
producer.send(record);
producer.send(record);
producer.send(record);
shareConsumer1Records.set(0);
shareConsumer2Records.set(0);
TestUtils.waitForCondition(() -> {
int records1 = shareConsumer1Records.addAndGet(shareConsumer1.poll(Duration.ofMillis(2000)).count());
int records2 = shareConsumer2Records.addAndGet(shareConsumer2.poll(Duration.ofMillis(2000)).count());
return records1 == 3 && records2 == 5;
}, DEFAULT_MAX_WAIT_MS, 100L, () -> "Failed to consume records for both consumers for the last batch");
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testConsumerCloseOnBrokerShutdown() {
alterShareAutoOffsetReset("group1", "earliest");
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1");
shareConsumer.subscribe(Set.of(tp.topic()));
// To ensure coordinator discovery is complete before shutting down the broker
shareConsumer.poll(Duration.ofMillis(100));
// Shutdown the broker.
assertEquals(1, cluster.brokers().size());
KafkaBroker broker = cluster.brokers().get(0);
cluster.shutdownBroker(0);
broker.awaitShutdown();
// Assert that close completes in less than 5 seconds, not the full 30-second timeout.
assertTimeoutPreemptively(Duration.ofSeconds(5), () -> {
shareConsumer.close();
}, "Consumer close should not wait for full timeout when broker is already shutdown");
}
@ClusterTest
public void testMultipleConsumersInGroupSequentialConsumption() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer1 = createShareConsumer("group1");
ShareConsumer<byte[], byte[]> shareConsumer2 = createShareConsumer("group1")) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
shareConsumer1.subscribe(Set.of(tp.topic()));
shareConsumer2.subscribe(Set.of(tp.topic()));
int totalMessages = 2000;
for (int i = 0; i < totalMessages; i++) {
producer.send(record);
}
producer.flush();
int consumer1MessageCount = 0;
int consumer2MessageCount = 0;
int maxRetries = 10;
int retries = 0;
while (retries < maxRetries) {
ConsumerRecords<byte[], byte[]> records1 = shareConsumer1.poll(Duration.ofMillis(2000));
consumer1MessageCount += records1.count();
ConsumerRecords<byte[], byte[]> records2 = shareConsumer2.poll(Duration.ofMillis(2000));
consumer2MessageCount += records2.count();
if (records1.count() + records2.count() == 0)
break;
retries++;
}
assertEquals(totalMessages, consumer1MessageCount + consumer2MessageCount);
}
}
@ClusterTest
public void testMultipleConsumersInGroupConcurrentConsumption()
throws InterruptedException, ExecutionException, TimeoutException {
AtomicInteger totalMessagesConsumed = new AtomicInteger(0);
int consumerCount = 4;
int producerCount = 4;
int messagesPerProducer = 5000;
String groupId = "group1";
alterShareAutoOffsetReset(groupId, "earliest");
List<CompletableFuture<Void>> producerFutures = new ArrayList<>();
for (int i = 0; i < producerCount; i++) {
producerFutures.add(CompletableFuture.runAsync(() -> produceMessages(messagesPerProducer)));
}
int maxBytes = 100000;
List<CompletableFuture<Integer>> consumerFutures = new ArrayList<>();
for (int i = 0; i < consumerCount; i++) {
final int consumerNumber = i + 1;
consumerFutures.add(CompletableFuture.supplyAsync(() ->
consumeMessages(totalMessagesConsumed,
producerCount * messagesPerProducer, groupId, consumerNumber,
30, true, maxBytes)));
}
CompletableFuture.allOf(producerFutures.toArray(CompletableFuture[]::new)).get(60, TimeUnit.SECONDS);
CompletableFuture.allOf(consumerFutures.toArray(CompletableFuture[]::new)).get(60, TimeUnit.SECONDS);
int totalResult = consumerFutures.stream().mapToInt(CompletableFuture::join).sum();
assertEquals(producerCount * messagesPerProducer, totalResult);
}
@ClusterTest
public void testMultipleConsumersInMultipleGroupsConcurrentConsumption()
throws ExecutionException, InterruptedException, TimeoutException {
AtomicInteger totalMessagesConsumedGroup1 = new AtomicInteger(0);
AtomicInteger totalMessagesConsumedGroup2 = new AtomicInteger(0);
AtomicInteger totalMessagesConsumedGroup3 = new AtomicInteger(0);
int producerCount = 4;
int messagesPerProducer = 2000;
final int totalMessagesSent = producerCount * messagesPerProducer;
String groupId1 = "group1";
String groupId2 = "group2";
String groupId3 = "group3";
alterShareAutoOffsetReset(groupId1, "earliest");
alterShareAutoOffsetReset(groupId2, "earliest");
alterShareAutoOffsetReset(groupId3, "earliest");
List<CompletableFuture<Integer>> producerFutures = new ArrayList<>();
for (int i = 0; i < producerCount; i++) {
producerFutures.add(CompletableFuture.supplyAsync(() -> produceMessages(messagesPerProducer)));
}
// Wait for the producers to run
assertDoesNotThrow(() -> CompletableFuture.allOf(producerFutures.toArray(CompletableFuture[]::new))
.get(15, TimeUnit.SECONDS), "Exception awaiting produceMessages");
int actualMessageSent = producerFutures.stream().mapToInt(CompletableFuture::join).sum();
List<CompletableFuture<Integer>> consumeMessagesFutures1 = new ArrayList<>();
List<CompletableFuture<Integer>> consumeMessagesFutures2 = new ArrayList<>();
List<CompletableFuture<Integer>> consumeMessagesFutures3 = new ArrayList<>();
int maxBytes = 100000;
for (int i = 0; i < 2; i++) {
final int consumerNumber = i + 1;
consumeMessagesFutures1.add(CompletableFuture.supplyAsync(() ->
consumeMessages(totalMessagesConsumedGroup1, totalMessagesSent,
"group1", consumerNumber, 100, true, maxBytes)));
consumeMessagesFutures2.add(CompletableFuture.supplyAsync(() ->
consumeMessages(totalMessagesConsumedGroup2, totalMessagesSent,
"group2", consumerNumber, 100, true, maxBytes)));
consumeMessagesFutures3.add(CompletableFuture.supplyAsync(() ->
consumeMessages(totalMessagesConsumedGroup3, totalMessagesSent,
"group3", consumerNumber, 100, true, maxBytes)));
}
CompletableFuture.allOf(Stream.of(consumeMessagesFutures1.stream(), consumeMessagesFutures2.stream(),
consumeMessagesFutures3.stream()).flatMap(Function.identity()).toArray(CompletableFuture[]::new))
.get(120, TimeUnit.SECONDS);
int totalResult1 = consumeMessagesFutures1.stream().mapToInt(CompletableFuture::join).sum();
int totalResult2 = consumeMessagesFutures2.stream().mapToInt(CompletableFuture::join).sum();
int totalResult3 = consumeMessagesFutures3.stream().mapToInt(CompletableFuture::join).sum();
assertEquals(totalMessagesSent, totalResult1);
assertEquals(totalMessagesSent, totalResult2);
assertEquals(totalMessagesSent, totalResult3);
assertEquals(totalMessagesSent, actualMessageSent);
verifyShareGroupStateTopicRecordsProduced();
}
@ClusterTest
public void testConsumerCloseInGroupSequential() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer1 = createShareConsumer("group1");
ShareConsumer<byte[], byte[]> shareConsumer2 = createShareConsumer("group1")) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
shareConsumer1.subscribe(Set.of(tp.topic()));
shareConsumer2.subscribe(Set.of(tp.topic()));
int totalMessages = 1500;
for (int i = 0; i < totalMessages; i++) {
producer.send(record);
}
producer.close();
int consumer1MessageCount = 0;
int consumer2MessageCount = 0;
// Poll until we receive all the records. The second poll acknowledges the records
// from the first poll, and so on.
// The last poll's records are not acknowledged
// because the consumer is closed, which makes the broker release the records fetched.
int maxRetries = 10;
int retries = 0;
int lastPollRecordCount = 0;
while (consumer1MessageCount < totalMessages && retries < maxRetries) {
lastPollRecordCount = shareConsumer1.poll(Duration.ofMillis(5000)).count();
consumer1MessageCount += lastPollRecordCount;
retries++;
}
assertEquals(totalMessages, consumer1MessageCount);
shareConsumer1.close();
// These records are released after the first consumer closes.
consumer1MessageCount -= lastPollRecordCount;
retries = 0;
while (consumer1MessageCount + consumer2MessageCount < totalMessages && retries < maxRetries) {
ConsumerRecords<byte[], byte[]> records2 = shareConsumer2.poll(Duration.ofMillis(5000));
consumer2MessageCount += records2.count();
retries++;
}
shareConsumer2.close();
assertEquals(totalMessages, consumer1MessageCount + consumer2MessageCount);
verifyShareGroupStateTopicRecordsProduced();
}
}
@ClusterTest
public void testMultipleConsumersInGroupFailureConcurrentConsumption()
throws InterruptedException, ExecutionException, TimeoutException {
AtomicInteger totalMessagesConsumed = new AtomicInteger(0);
int consumerCount = 4;
int producerCount = 4;
int messagesPerProducer = 5000;
String groupId = "group1";
alterShareAutoOffsetReset(groupId, "earliest");
List<CompletableFuture<Void>> produceMessageFutures = new ArrayList<>();
for (int i = 0; i < producerCount; i++) {
produceMessageFutures.add(CompletableFuture.runAsync(() -> produceMessages(messagesPerProducer)));
}
int maxBytes = 1000000;
// The "failing" consumer polls but immediately closes, which releases the records for the other consumers
CompletableFuture<Integer> failedMessagesConsumedFuture = CompletableFuture.supplyAsync(
() -> consumeMessages(new AtomicInteger(0), producerCount * messagesPerProducer, groupId,
0, 1, false));
// Wait for the failed consumer to run
assertDoesNotThrow(() -> failedMessagesConsumedFuture.get(15, TimeUnit.SECONDS),
"Exception awaiting consumeMessages");
List<CompletableFuture<Integer>> consumeMessagesFutures = new ArrayList<>();
for (int i = 0; i < consumerCount; i++) {
final int consumerNumber = i + 1;
consumeMessagesFutures.add(CompletableFuture.supplyAsync(
() -> consumeMessages(totalMessagesConsumed, producerCount * messagesPerProducer,
groupId, consumerNumber, 40, true, maxBytes)));
}
CompletableFuture.allOf(produceMessageFutures.toArray(CompletableFuture[]::new)).get(60, TimeUnit.SECONDS);
CompletableFuture.allOf(consumeMessagesFutures.toArray(CompletableFuture[]::new)).get(60, TimeUnit.SECONDS);
int totalSuccessResult = consumeMessagesFutures.stream().mapToInt(CompletableFuture::join).sum();
assertEquals(producerCount * messagesPerProducer, totalSuccessResult);
verifyShareGroupStateTopicRecordsProduced();
}
@ClusterTest
public void testAcquisitionLockTimeoutOnConsumer() throws InterruptedException {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1")) {
ProducerRecord<byte[], byte[]> producerRecord1 = new ProducerRecord<>(tp.topic(), tp.partition(), null,
"key_1".getBytes(), "value_1".getBytes());
ProducerRecord<byte[], byte[]> producerRecord2 = new ProducerRecord<>(tp.topic(), tp.partition(), null,
"key_2".getBytes(), "value_2".getBytes());
shareConsumer.subscribe(Set.of(tp.topic()));
// Produce a first record which is consumed and acknowledged normally.
producer.send(producerRecord1);
producer.flush();
// Poll twice to receive records. The first poll fetches the record and starts the acquisition lock timer.
// Since, we are only sending one record and the acquisition lock hasn't timed out, the second poll only
// acknowledges the record from the first poll and does not fetch any more records.
ConsumerRecords<byte[], byte[]> consumerRecords = waitedPoll(shareConsumer, 2500L, 1);
ConsumerRecord<byte[], byte[]> consumerRecord = consumerRecords.records(tp).get(0);
assertEquals("key_1", new String(consumerRecord.key()));
assertEquals("value_1", new String(consumerRecord.value()));
assertEquals(1, consumerRecords.count());
consumerRecords = shareConsumer.poll(Duration.ofMillis(1000));
assertEquals(0, consumerRecords.count());
// Produce a second record which is fetched, but not acknowledged before it times out. The record will
// be released automatically by the broker. It is then fetched again and acknowledged normally.
producer.send(producerRecord2);
producer.flush();
// Poll three more times. The first poll fetches the second record and starts the acquisition lock timer.
// Before the second poll, acquisition lock times out and hence the consumer needs to fetch the record again.
// The acquisition lock doesn't time out between the second and third polls, so the third poll only acknowledges
// the record from the second poll and does not fetch any more records.
consumerRecords = shareConsumer.poll(Duration.ofMillis(5000));
consumerRecord = consumerRecords.records(tp).get(0);
assertEquals("key_2", new String(consumerRecord.key()));
assertEquals("value_2", new String(consumerRecord.value()));
assertEquals(1, consumerRecords.count());
// Allow the acquisition lock to time out.
Thread.sleep(20000);
consumerRecords = shareConsumer.poll(Duration.ofMillis(5000));
consumerRecord = consumerRecords.records(tp).get(0);
// By checking the key and value before the count, we get a bit more information if too many records are returned.
// This test has been observed to fail very occasionally because of this.
assertEquals("key_2", new String(consumerRecord.key()));
assertEquals("value_2", new String(consumerRecord.value()));
assertEquals(1, consumerRecords.count());
consumerRecords = shareConsumer.poll(Duration.ofMillis(1000));
assertEquals(0, consumerRecords.count());
verifyShareGroupStateTopicRecordsProduced();
}
}
/**
* Test to verify that the acknowledgement commit callback cannot invoke methods of ShareConsumer.
* The exception thrown is verified in {@link TestableAcknowledgementCommitCallbackWithShareConsumer}
*/
@ClusterTest
public void testAcknowledgementCommitCallbackCallsShareConsumerDisallowed() {
alterShareAutoOffsetReset("group1", "earliest");
try (Producer<byte[], byte[]> producer = createProducer();
ShareConsumer<byte[], byte[]> shareConsumer = createShareConsumer("group1")) {
ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(tp.topic(), tp.partition(), null, "key".getBytes(), "value".getBytes());
producer.send(record);
producer.flush();
shareConsumer.setAcknowledgementCommitCallback(new TestableAcknowledgementCommitCallbackWithShareConsumer<>(shareConsumer));
shareConsumer.subscribe(Set.of(tp.topic()));
// The acknowledgement commit callback will try to call a method of ShareConsumer
shareConsumer.poll(Duration.ofMillis(5000));
// The second poll sends the acknowledgements implicitly.
// The acknowledgement commit callback will be called and the exception is thrown.
// This is verified inside the onComplete() method implementation.
shareConsumer.poll(Duration.ofMillis(500));
verifyShareGroupStateTopicRecordsProduced();
}
}
private | TestableAcknowledgementCommitCallback |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/TestTemplateInvocationTests.java | {
"start": 32740,
"end": 33141
} | class ____
implements TestTemplateInvocationContextProvider {
@Override
public boolean supportsTestTemplate(ExtensionContext context) {
return true;
}
@Override
public Stream<TestTemplateInvocationContext> provideTestTemplateInvocationContexts(ExtensionContext context) {
return Stream.empty();
}
}
private static | InvocationContextProviderThatSupportsEverythingButProvidesNothing |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java | {
"start": 1797,
"end": 19420
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(
SharedKeyCredentials.class);
private static final int EXPECTED_BLOB_QUEUE_CANONICALIZED_STRING_LENGTH = 300;
private static final Pattern CRLF = Pattern.compile("\r\n", Pattern.LITERAL);
private static final String HMAC_SHA256 = "HmacSHA256";
/**
* Stores a reference to the RFC1123 date/time pattern.
*/
private static final String RFC1123_PATTERN = "EEE, dd MMM yyyy HH:mm:ss z";
private String accountName;
private byte[] accountKey;
private Mac hmacSha256;
public SharedKeyCredentials(final String accountName,
final String accountKey) {
if (accountName == null || accountName.isEmpty()) {
throw new IllegalArgumentException("Invalid account name.");
}
if (accountKey == null || accountKey.isEmpty()) {
throw new IllegalArgumentException("Invalid account key.");
}
this.accountName = accountName;
this.accountKey = Base64.decode(accountKey);
initializeMac();
}
public void signRequest(AbfsHttpOperation connection, final long contentLength) throws UnsupportedEncodingException {
String gmtTime = getGMTTime();
connection.setRequestProperty(HttpHeaderConfigurations.X_MS_DATE, gmtTime);
final String stringToSign = canonicalize(connection, accountName, contentLength);
final String computedBase64Signature = computeHmac256(stringToSign);
String signature = String.format("%s %s:%s", "SharedKey", accountName,
computedBase64Signature);
connection.setRequestProperty(HttpHeaderConfigurations.AUTHORIZATION,
signature);
LOG.debug("Signing request with timestamp of {} and signature {}",
gmtTime, signature);
}
private String computeHmac256(final String stringToSign) {
byte[] utf8Bytes = stringToSign.getBytes(StandardCharsets.UTF_8);
byte[] hmac;
synchronized (this) {
hmac = hmacSha256.doFinal(utf8Bytes);
}
return Base64.encode(hmac);
}
/**
* Add x-ms- prefixed headers in a fixed order.
*
* @param conn the HttpURLConnection for the operation
* @param canonicalizedString the canonicalized string to add the canonicalized headerst to.
*/
private static void addCanonicalizedHeaders(final AbfsHttpOperation conn, final StringBuilder canonicalizedString) {
// Look for header names that start with
// HeaderNames.PrefixForStorageHeader
// Then sort them in case-insensitive manner.
final Map<String, List<String>> headers = conn.getRequestProperties();
final ArrayList<String> httpStorageHeaderNameArray = new ArrayList<String>();
for (final String key : headers.keySet()) {
if (key.toLowerCase(Locale.ROOT).startsWith(AbfsHttpConstants.HTTP_HEADER_PREFIX)) {
httpStorageHeaderNameArray.add(key.toLowerCase(Locale.ROOT));
}
}
Collections.sort(httpStorageHeaderNameArray);
// Now go through each header's values in the sorted order and append
// them to the canonicalized string.
for (final String key : httpStorageHeaderNameArray) {
final StringBuilder canonicalizedElement = new StringBuilder(key);
String delimiter = ":";
final ArrayList<String> values = getHeaderValues(headers, key);
boolean appendCanonicalizedElement = false;
// Go through values, unfold them, and then append them to the
// canonicalized element string.
for (final String value : values) {
if (value != null) {
appendCanonicalizedElement = true;
}
// Unfolding is simply removal of CRLF.
final String unfoldedValue = CRLF.matcher(value)
.replaceAll(Matcher.quoteReplacement(""));
// Append it to the canonicalized element string.
canonicalizedElement.append(delimiter);
canonicalizedElement.append(unfoldedValue);
delimiter = ",";
}
// Now, add this canonicalized element to the canonicalized header
// string.
if (appendCanonicalizedElement) {
appendCanonicalizedElement(canonicalizedString, canonicalizedElement.toString());
}
}
}
/**
* Initialize the HmacSha256 associated with the account key.
*/
private void initializeMac() {
// Initializes the HMAC-SHA256 Mac and SecretKey.
try {
hmacSha256 = Mac.getInstance(HMAC_SHA256);
hmacSha256.init(new SecretKeySpec(accountKey, HMAC_SHA256));
} catch (final Exception e) {
throw new IllegalArgumentException(e);
}
}
/**
* Append a string to a string builder with a newline constant.
*
* @param builder the StringBuilder object
* @param element the string to append.
*/
private static void appendCanonicalizedElement(final StringBuilder builder, final String element) {
builder.append("\n");
builder.append(element);
}
/**
* Constructs a canonicalized string from the request's headers that will be used to construct the signature string
* for signing a Blob or Queue service request under the Shared Key Full authentication scheme.
*
* @param address the request URI
* @param accountName the account name associated with the request
* @param method the verb to be used for the HTTP request.
* @param contentType the content type of the HTTP request.
* @param contentLength the length of the content written to the outputstream in bytes, -1 if unknown
* @param date the date/time specification for the HTTP request
* @param conn the HttpURLConnection for the operation.
* @return A canonicalized string.
*/
private static String canonicalizeHttpRequest(final URL address,
final String accountName, final String method, final String contentType,
final long contentLength, final String date, final AbfsHttpOperation conn)
throws UnsupportedEncodingException {
// The first element should be the Method of the request.
// I.e. GET, POST, PUT, or HEAD.
final StringBuilder canonicalizedString = new StringBuilder(EXPECTED_BLOB_QUEUE_CANONICALIZED_STRING_LENGTH);
canonicalizedString.append(conn.getMethod());
// The next elements are
// If any element is missing it may be empty.
appendCanonicalizedElement(canonicalizedString,
getHeaderValue(conn, HttpHeaderConfigurations.CONTENT_ENCODING, AbfsHttpConstants.EMPTY_STRING));
appendCanonicalizedElement(canonicalizedString,
getHeaderValue(conn, HttpHeaderConfigurations.CONTENT_LANGUAGE, AbfsHttpConstants.EMPTY_STRING));
appendCanonicalizedElement(canonicalizedString,
contentLength <= 0 ? "" : String.valueOf(contentLength));
appendCanonicalizedElement(canonicalizedString,
getHeaderValue(conn, HttpHeaderConfigurations.CONTENT_MD5, AbfsHttpConstants.EMPTY_STRING));
appendCanonicalizedElement(canonicalizedString, contentType != null ? contentType : AbfsHttpConstants.EMPTY_STRING);
final String dateString = getHeaderValue(conn, HttpHeaderConfigurations.X_MS_DATE, AbfsHttpConstants.EMPTY_STRING);
// If x-ms-date header exists, Date should be empty string
appendCanonicalizedElement(canonicalizedString, dateString.equals(AbfsHttpConstants.EMPTY_STRING) ? date
: "");
appendCanonicalizedElement(canonicalizedString,
getHeaderValue(conn, HttpHeaderConfigurations.IF_MODIFIED_SINCE, AbfsHttpConstants.EMPTY_STRING));
appendCanonicalizedElement(canonicalizedString,
getHeaderValue(conn, HttpHeaderConfigurations.IF_MATCH, AbfsHttpConstants.EMPTY_STRING));
appendCanonicalizedElement(canonicalizedString,
getHeaderValue(conn, HttpHeaderConfigurations.IF_NONE_MATCH, AbfsHttpConstants.EMPTY_STRING));
appendCanonicalizedElement(canonicalizedString,
getHeaderValue(conn, HttpHeaderConfigurations.IF_UNMODIFIED_SINCE, AbfsHttpConstants.EMPTY_STRING));
appendCanonicalizedElement(canonicalizedString,
getHeaderValue(conn, HttpHeaderConfigurations.RANGE, AbfsHttpConstants.EMPTY_STRING));
addCanonicalizedHeaders(conn, canonicalizedString);
appendCanonicalizedElement(canonicalizedString, getCanonicalizedResource(address, accountName));
return canonicalizedString.toString();
}
/**
* Gets the canonicalized resource string for a Blob or Queue service request under the Shared Key Lite
* authentication scheme.
*
* @param address the resource URI.
* @param accountName the account name for the request.
* @return the canonicalized resource string.
*/
private static String getCanonicalizedResource(final URL address,
final String accountName) throws UnsupportedEncodingException {
// Resource path
final StringBuilder resourcepath = new StringBuilder(AbfsHttpConstants.FORWARD_SLASH);
resourcepath.append(accountName);
// Note that AbsolutePath starts with a '/'.
resourcepath.append(address.getPath());
final StringBuilder canonicalizedResource = new StringBuilder(resourcepath.toString());
// query parameters
if (address.getQuery() == null || !address.getQuery().contains(AbfsHttpConstants.EQUAL)) {
//no query params.
return canonicalizedResource.toString();
}
final Map<String, String[]> queryVariables = parseQueryString(address.getQuery());
final Map<String, String> lowercasedKeyNameValue = new HashMap<>();
for (final Entry<String, String[]> entry : queryVariables.entrySet()) {
// sort the value and organize it as comma separated values
final List<String> sortedValues = Arrays.asList(entry.getValue());
Collections.sort(sortedValues);
final StringBuilder stringValue = new StringBuilder();
for (final String value : sortedValues) {
if (stringValue.length() > 0) {
stringValue.append(AbfsHttpConstants.COMMA);
}
stringValue.append(value);
}
// key turns out to be null for ?a&b&c&d
lowercasedKeyNameValue.put((entry.getKey()) == null ? null
: entry.getKey().toLowerCase(Locale.ROOT), stringValue.toString());
}
final ArrayList<String> sortedKeys = new ArrayList<String>(lowercasedKeyNameValue.keySet());
Collections.sort(sortedKeys);
for (final String key : sortedKeys) {
final StringBuilder queryParamString = new StringBuilder();
queryParamString.append(key);
queryParamString.append(":");
queryParamString.append(lowercasedKeyNameValue.get(key));
appendCanonicalizedElement(canonicalizedResource, queryParamString.toString());
}
return canonicalizedResource.toString();
}
/**
* Gets all the values for the given header in the one to many map,
* performs a trimStart() on each return value.
*
* @param headers a one to many map of key / values representing the header values for the connection.
* @param headerName the name of the header to lookup
* @return an ArrayList<String> of all trimmed values corresponding to the requested headerName. This may be empty
* if the header is not found.
*/
private static ArrayList<String> getHeaderValues(
final Map<String, List<String>> headers,
final String headerName) {
final ArrayList<String> arrayOfValues = new ArrayList<String>();
List<String> values = null;
for (final Entry<String, List<String>> entry : headers.entrySet()) {
if (entry.getKey().toLowerCase(Locale.ROOT).equals(headerName)) {
values = entry.getValue();
break;
}
}
if (values != null) {
for (final String value : values) {
// canonicalization formula requires the string to be left
// trimmed.
arrayOfValues.add(trimStart(value));
}
}
return arrayOfValues;
}
/**
* Parses a query string into a one to many hashmap.
*
* @param parseString the string to parse
* @return a HashMap<String, String[]> of the key values.
*/
private static HashMap<String, String[]> parseQueryString(String parseString) throws UnsupportedEncodingException {
final HashMap<String, String[]> retVals = new HashMap<>();
if (parseString == null || parseString.isEmpty()) {
return retVals;
}
// 1. Remove ? if present
final int queryDex = parseString.indexOf(AbfsHttpConstants.QUESTION_MARK);
if (queryDex >= 0 && parseString.length() > 0) {
parseString = parseString.substring(queryDex + 1);
}
// 2. split name value pairs by splitting on the 'c&' character
final String[] valuePairs = parseString.contains(AbfsHttpConstants.AND_MARK)
? parseString.split(AbfsHttpConstants.AND_MARK)
: parseString.split(AbfsHttpConstants.SEMICOLON);
// 3. for each field value pair parse into appropriate map entries
for (int m = 0; m < valuePairs.length; m++) {
final int equalDex = valuePairs[m].indexOf(AbfsHttpConstants.EQUAL);
if (equalDex < 0 || equalDex == valuePairs[m].length() - 1) {
continue;
}
String key = valuePairs[m].substring(0, equalDex);
String value = valuePairs[m].substring(equalDex + 1);
key = safeDecode(key);
value = safeDecode(value);
// 3.1 add to map
String[] values = retVals.get(key);
if (values == null) {
values = new String[]{value};
if (!value.equals("")) {
retVals.put(key, values);
}
}
}
return retVals;
}
/**
* Performs safe decoding of the specified string, taking care to preserve each <code>+</code> character, rather
* than replacing it with a space character.
*
* @param stringToDecode A <code>String</code> that represents the string to decode.
* @return A <code>String</code> that represents the decoded string.
* <p>
* If a storage service error occurred.
*/
private static String safeDecode(final String stringToDecode) throws UnsupportedEncodingException {
if (stringToDecode == null) {
return null;
}
if (stringToDecode.length() == 0) {
return "";
}
if (stringToDecode.contains(AbfsHttpConstants.PLUS)) {
final StringBuilder outBuilder = new StringBuilder();
int startDex = 0;
for (int m = 0; m < stringToDecode.length(); m++) {
if (stringToDecode.charAt(m) == '+') {
if (m > startDex) {
outBuilder.append(URLDecoder.decode(stringToDecode.substring(startDex, m),
AbfsHttpConstants.UTF_8));
}
outBuilder.append(AbfsHttpConstants.PLUS);
startDex = m + 1;
}
}
if (startDex != stringToDecode.length()) {
outBuilder.append(URLDecoder.decode(stringToDecode.substring(startDex, stringToDecode.length()),
AbfsHttpConstants.UTF_8));
}
return outBuilder.toString();
} else {
return URLDecoder.decode(stringToDecode, AbfsHttpConstants.UTF_8);
}
}
private static String trimStart(final String value) {
int spaceDex = 0;
while (spaceDex < value.length() && value.charAt(spaceDex) == ' ') {
spaceDex++;
}
return value.substring(spaceDex);
}
private static String getHeaderValue(final AbfsHttpOperation conn, final String headerName, final String defaultValue) {
final String headerValue = conn.getRequestProperty(headerName);
return headerValue == null ? defaultValue : headerValue;
}
/**
* Constructs a canonicalized string for signing a request.
*
* @param conn the HttpURLConnection to canonicalize
* @param accountName the account name associated with the request
* @param contentLength the length of the content written to the outputstream in bytes,
* -1 if unknown
* @return a canonicalized string.
*/
private String canonicalize(final AbfsHttpOperation conn,
final String accountName,
final Long contentLength) throws UnsupportedEncodingException {
if (contentLength < -1) {
throw new IllegalArgumentException(
"The Content-Length header must be greater than or equal to -1.");
}
String contentType = getHeaderValue(conn, HttpHeaderConfigurations.CONTENT_TYPE, "");
return canonicalizeHttpRequest(conn.getConnUrl(), accountName,
conn.getMethod(), contentType, contentLength, null, conn);
}
/**
* Thread local for storing GMT date format.
*/
private static ThreadLocal<DateFormat> rfc1123GmtDateTimeFormatter
= new ThreadLocal<DateFormat>() {
@Override
protected DateFormat initialValue() {
final DateFormat formatter = new SimpleDateFormat(RFC1123_PATTERN, Locale.ROOT);
formatter.setTimeZone(GMT_ZONE);
return formatter;
}
};
public static final TimeZone GMT_ZONE = TimeZone.getTimeZone(AbfsHttpConstants.GMT_TIMEZONE);
/**
* Returns the current GMT date/time String using the RFC1123 pattern.
*
* @return A <code>String</code> that represents the current GMT date/time using the RFC1123 pattern.
*/
static String getGMTTime() {
return getGMTTime(new Date());
}
/**
* Returns the GTM date/time String for the specified value using the RFC1123 pattern.
*
* @param date
* A <code>Date</code> object that represents the date to convert to GMT date/time in the RFC1123
* pattern.
*
* @return A <code>String</code> that represents the GMT date/time for the specified value using the RFC1123
* pattern.
*/
static String getGMTTime(final Date date) {
return rfc1123GmtDateTimeFormatter.get().format(date);
}
}
| SharedKeyCredentials |
java | google__guava | android/guava-tests/test/com/google/common/reflect/TypeTokenResolutionTest.java | {
"start": 7808,
"end": 7873
} | class ____<T> {
@Keep public Inner field;
| ParameterizedOuter |
java | apache__camel | core/camel-util/src/main/java/org/apache/camel/util/HostUtils.java | {
"start": 1498,
"end": 4793
} | interface ____ (by using a TreeMap).
Map<String, Set<InetAddress>> interfaceAddressMap = new TreeMap<>();
try {
Enumeration<NetworkInterface> ifaces = NetworkInterface.getNetworkInterfaces();
while (ifaces.hasMoreElements()) {
NetworkInterface iface = ifaces.nextElement();
//We only care about usable non-loopback interfaces.
if (iface.isUp() && !iface.isLoopback() && !iface.isPointToPoint()) {
String name = iface.getName();
Enumeration<InetAddress> ifaceAdresses = iface.getInetAddresses();
while (ifaceAdresses.hasMoreElements()) {
InetAddress ia = ifaceAdresses.nextElement();
//We want to filter out mac addresses
if (!ia.isLoopbackAddress() && !ia.getHostAddress().contains(":")) {
Set<InetAddress> addresses = interfaceAddressMap.get(name);
if (addresses == null) {
addresses = new LinkedHashSet<>();
}
addresses.add(ia);
interfaceAddressMap.put(name, addresses);
}
}
}
}
} catch (SocketException ex) {
//noop
}
return interfaceAddressMap;
}
/**
* Returns a {@link Set} of {@link InetAddress} that are non-loopback or mac.
*/
public static Set<InetAddress> getAddresses() {
Set<InetAddress> allAddresses = new LinkedHashSet<>();
Map<String, Set<InetAddress>> interfaceAddressMap = getNetworkInterfaceAddresses();
for (Map.Entry<String, Set<InetAddress>> entry : interfaceAddressMap.entrySet()) {
Set<InetAddress> addresses = entry.getValue();
if (!addresses.isEmpty()) {
allAddresses.addAll(addresses);
}
}
return allAddresses;
}
/**
* Chooses one of the available {@link InetAddress} based on the specified preference.
*/
private static InetAddress chooseAddress() throws UnknownHostException {
Set<InetAddress> addresses = getAddresses();
if (addresses.contains(InetAddress.getLocalHost())) {
// if local host address is not bound to a loop-back interface, use it
return InetAddress.getLocalHost();
} else if (!addresses.isEmpty()) {
// else return the first available address
return addresses.iterator().next();
} else {
// else we are forced to use the localhost address.
return InetAddress.getLocalHost();
}
}
/**
* Returns the local hostname. It loops through the network interfaces and returns the first non loopback hostname
*/
public static String getLocalHostName() throws UnknownHostException {
return chooseAddress().getHostName();
}
/**
* Returns the local IP. It loops through the network interfaces and returns the first non loopback address
*/
public static String getLocalIp() throws UnknownHostException {
return chooseAddress().getHostAddress();
}
}
| name |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/SubclassGenerator.java | {
"start": 36897,
"end": 51068
} | interface ____ calls
// (see e.g. https://wiki.openjdk.org/display/HotSpot/InterfaceCalls)
fc.setType(Object.class);
});
nextDecoratorTypes.add(desc.type());
nextDecoratorToField.put(nextDecorator, desc);
}
// Constructor
cc.constructor(mc -> {
ParamVar subclassParam = mc.parameter("subclass", subclass.type());
delegateSubclassCtorParams.add(subclass.type());
List<ParamVar> nextDecoratorParams = new ArrayList<>();
for (int i = 0; i < nextDecoratorTypes.size(); i++) {
nextDecoratorParams.add(mc.parameter("nextDecorator" + i, nextDecoratorTypes.get(i)));
delegateSubclassCtorParams.add(nextDecoratorTypes.get(i));
}
mc.body(bc -> {
if (delegateTypeIsInterface) {
bc.invokeSpecial(MethodDescs.OBJECT_CONSTRUCTOR, cc.this_());
} else {
bc.invokeSpecial(ConstructorDesc.of(classDescOf(delegateTypeClass)), cc.this_());
}
// Set fields
bc.set(cc.this_().field(subclassField), subclassParam);
for (int i = 0; i < decoratorParameters.size(); i++) {
DecoratorInfo nextDecorator = decoratorParameters.get(i);
bc.set(cc.this_().field(nextDecoratorToField.get(nextDecorator)), nextDecoratorParams.get(i));
}
bc.return_();
});
});
IndexView index = bean.getDeployment().getBeanArchiveIndex();
// Identify the set of methods that should be delegated
// Note that the delegate subclass must override ALL methods from the delegate type
// This is not enough if the delegate type is parameterized
Set<MethodKey> methods = new HashSet<>();
Methods.addDelegateTypeMethods(index, delegateTypeClass, methods);
// The delegate type can declare type parameters
// For example @Delegate Converter<String> should result in a T -> String mapping
List<TypeVariable> typeParameters = delegateTypeClass.typeParameters();
Map<String, Type> resolvedTypeParameters;
if (!typeParameters.isEmpty()) {
resolvedTypeParameters = new HashMap<>();
// The delegate type can be used to infer the parameter types
Type delegateType = decorator.getDelegateType();
if (delegateType.kind() == Kind.PARAMETERIZED_TYPE) {
List<Type> typeArguments = delegateType.asParameterizedType().arguments();
for (int i = 0; i < typeParameters.size(); i++) {
resolvedTypeParameters.put(typeParameters.get(i).identifier(), typeArguments.get(i));
}
}
} else {
resolvedTypeParameters = Map.of();
}
for (MethodKey m : methods) {
MethodInfo method = m.method;
MethodDesc methodDescriptor = methodDescOf(method);
cc.method(method.name(), mc -> {
mc.public_();
mc.returning(classDescOf(method.returnType()));
List<ParamVar> params = new ArrayList<>();
for (int i = 0; i < method.parametersCount(); i++) {
String paramName = method.parameterName(i);
if (paramName == null || paramName.isBlank()) {
paramName = "param" + i;
}
params.add(mc.parameter(paramName, classDescOf(method.parameterType(i))));
}
for (Type exception : method.exceptions()) {
mc.throws_(classDescOf(exception));
}
mc.body(bc -> {
// Create a resolved descriptor variant if a param contains a type variable
// E.g. ping(T) -> ping(String)
MethodDesc resolvedMethodDesc;
if (typeParameters.isEmpty() || (!Methods.containsTypeVariableParameter(method)
&& !Types.containsTypeVariable(method.returnType()))) {
resolvedMethodDesc = null;
} else {
Type returnType = Types.resolveTypeParam(method.returnType(), resolvedTypeParameters, index);
List<Type> paramTypes = Types.getResolvedParameters(delegateTypeClass, resolvedTypeParameters,
method, index);
ClassDesc[] paramTypesArray = new ClassDesc[paramTypes.size()];
for (int i = 0; i < paramTypesArray.length; i++) {
paramTypesArray[i] = classDescOf(paramTypes.get(i));
}
resolvedMethodDesc = ClassMethodDesc.of(classDescOf(method.declaringClass()),
method.name(), MethodTypeDesc.of(classDescOf(returnType), paramTypesArray));
}
DecoratorMethod nextDecorator = null;
MethodDesc nextDecoratorDecorated = null;
for (Entry<MethodDesc, DecoratorMethod> e : nextDecorators.entrySet()) {
// Find the next decorator for the current delegate type method
if (Methods.descriptorMatches(e.getKey(), methodDescriptor)
|| (resolvedMethodDesc != null
&& Methods.descriptorMatches(e.getKey(), resolvedMethodDesc))
|| Methods.descriptorMatches(methodDescOf(e.getValue().method), methodDescriptor)) {
nextDecorator = e.getValue();
nextDecoratorDecorated = e.getKey();
break;
}
}
if (nextDecorator != null
&& isDecorated(decoratedMethodDescriptors, methodDescriptor, resolvedMethodDesc,
nextDecoratorDecorated)) {
// This method is decorated by this decorator and there is a next decorator in the chain
// Just delegate to the next decorator
FieldVar delegateTo = cc.this_().field(nextDecoratorToField.get(nextDecorator.decorator));
bc.return_(bc.invokeInterface(methodDescOf(nextDecorator.method), delegateTo, params));
} else {
// This method is not decorated or no next decorator was found in the chain
MethodDesc forwardingMethod = null;
MethodInfo decoratedMethod = bean.getDecoratedMethod(method, decorator);
MethodDesc decoratedMethodDesc = decoratedMethod != null ? methodDescOf(decoratedMethod) : null;
for (Entry<MethodDesc, MethodDesc> entry : forwardingMethods.entrySet()) {
if (Methods.descriptorMatches(entry.getKey(), methodDescriptor)
|| (resolvedMethodDesc != null // Also try to find the forwarding method for the resolved variant
&& Methods.descriptorMatches(entry.getKey(), resolvedMethodDesc))
|| (decoratedMethodDesc != null // Finally, try to match the decorated method
&& Methods.descriptorMatches(entry.getKey(), decoratedMethodDesc))) {
forwardingMethod = entry.getValue();
break;
}
}
InstanceFieldVar delegateTo = cc.this_().field(subclassField);
if (forwardingMethod != null) {
// Delegate to the subclass forwarding method
List<Expr> args = new ArrayList<>();
for (int i = 0; i < params.size(); i++) {
args.add(bc.cast(params.get(i), forwardingMethod.parameterType(i)));
}
bc.return_(bc.invokeVirtual(forwardingMethod, delegateTo, args));
} else {
// No forwarding method exists
// Simply delegate to subclass
if (method.declaringClass().isInterface()) {
bc.return_(bc.invokeInterface(methodDescriptor, delegateTo, params));
} else {
bc.return_(bc.invokeVirtual(methodDescriptor, delegateTo, params));
}
}
}
});
});
}
});
// Now modify the subclass constructor
LocalVar cc = subclassCtor.localVar("cc", subclassCtor.invokeStatic(MethodDescs.CREATIONAL_CTX_CHILD, ccParam));
// Create new delegate subclass instance
Expr[] params = new Expr[1 + decoratorParameters.size()];
params[0] = subclass.this_();
int paramIdx = 1;
for (DecoratorInfo decoratorParameter : decoratorParameters) {
LocalVar decoratorVar = decoratorToLocalVar.get(decoratorParameter.getIdentifier());
if (decoratorVar == null) {
throw new IllegalStateException("Decorator var must not be null");
}
params[paramIdx] = decoratorVar;
paramIdx++;
}
Expr delegateSubclassInstance = subclassCtor.new_(ConstructorDesc.of(
delegateSubclass, delegateSubclassCtorParams), params);
// Set the DecoratorDelegateProvider to satisfy the delegate IP
LocalVar prev = subclassCtor.localVar("prev", subclassCtor.invokeStatic(
MethodDescs.DECORATOR_DELEGATE_PROVIDER_SET, cc, delegateSubclassInstance));
// Create the decorator instance
LocalVar decoratorInstance = subclassCtor.localVar("decoratorInstance",
subclassCtor.invokeInterface(MethodDescs.INJECTABLE_REF_PROVIDER_GET, decoratorParam, cc));
// And unset the delegate IP afterwards
subclassCtor.invokeStatic(MethodDescs.DECORATOR_DELEGATE_PROVIDER_SET, cc, prev);
decoratorToLocalVar.put(decorator.getIdentifier(), decoratorInstance);
// Store the decorator instance in a field
FieldDesc decoratorField = subclass.field(decorator.getIdentifier(), fc -> {
fc.private_();
fc.final_();
fc.setType(Object.class);
});
subclassCtor.set(subclass.this_().field(decoratorField), decoratorInstance);
}
private boolean isDecorated(Set<MethodDesc> decoratedMethodDescriptors, MethodDesc original,
MethodDesc resolved, MethodDesc nextDecoratorDecorated) {
for (MethodDesc decorated : decoratedMethodDescriptors) {
if (Methods.descriptorMatches(decorated, original)
|| (resolved != null && Methods.descriptorMatches(decorated, resolved))
|| Methods.descriptorMatches(decorated, nextDecoratorDecorated)) {
return true;
}
}
return false;
}
static MethodDesc createForwardingMethod(io.quarkus.gizmo2.creator.ClassCreator subclass, ClassDesc providerType,
MethodInfo method, boolean implementingInterface) {
return subclass.method(method.name() + "$$superforward", mc -> {
mc.returning(classDescOf(method.returnType()));
List<ParamVar> params = new ArrayList<>(method.parametersCount());
for (MethodParameterInfo param : method.parameters()) {
params.add(mc.parameter(param.nameOrDefault(), classDescOf(param.type())));
}
mc.body(bc -> {
// `invokespecial` requires the descriptor to point to a method on a _direct_ supertype
// if we're extending a class, we have to always create a `ClassMethodDesc`
// if we're implementing an interface, we have to always create an `InterfaceMethodDesc`
// in both cases, the direct supertype is `providerType`
MethodDesc methodDesc = methodDescOf(method);
MethodDesc superMethod = implementingInterface
? InterfaceMethodDesc.of(providerType, methodDesc.name(), methodDesc.type())
: ClassMethodDesc.of(providerType, methodDesc.name(), methodDesc.type());
Expr result = bc.invokeSpecial(superMethod, subclass.this_(), params);
bc.return_(bc.cast(result, classDescOf(method.returnType())));
});
});
}
static void createInterceptedMethod(MethodInfo method, ClassCreator subclass,
FieldDesc metadataField, FieldDesc constructedField, MethodDesc forwardMethod,
Supplier<Expr> getTarget) {
subclass.method(methodDescOf(method), mc -> {
mc.public_();
List<ParamVar> params = IntStream.range(0, method.parametersCount())
.mapToObj(i -> mc.parameter("param" + i, i))
.toList();
for (Type exception : method.exceptions()) {
mc.throws_(classDescOf(exception));
}
mc.body(b0 -> {
// Delegate to super | method |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationprocessor/test/CompiledMetadataReader.java | {
"start": 1148,
"end": 1862
} | class ____ {
private static final String METADATA_FILE = "META-INF/spring-configuration-metadata.json";
private CompiledMetadataReader() {
}
public static ConfigurationMetadata getMetadata(Compiled compiled) {
return getMetadata(compiled, METADATA_FILE);
}
public static ConfigurationMetadata getMetadata(Compiled compiled, String location) {
InputStream inputStream = compiled.getClassLoader().getResourceAsStream(location);
try {
if (inputStream != null) {
return new JsonMarshaller().read(inputStream);
}
else {
return null;
}
}
catch (Exception ex) {
throw new RuntimeException("Failed to read metadata fom '%s'".formatted(location), ex);
}
}
}
| CompiledMetadataReader |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/batch/BatchAndEmbeddedIdId2Test.java | {
"start": 5238,
"end": 5559
} | class ____ implements Serializable {
@ManyToOne
@JoinColumn(name = "parent_id")
private Parent parent;
public NestedEmbeddableId() {
}
public NestedEmbeddableId(Parent parent) {
this.parent = parent;
}
public Parent getParent() {
return parent;
}
}
@Embeddable
public static | NestedEmbeddableId |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/placement/VariableContext.java | {
"start": 1281,
"end": 1562
} | class ____ support for immutable
* variables, which can be set only once, and has helper methods for replacing
* the variables with their respective values in provided strings.
* We don't extend the map interface, because we don't need all the features
* a map provides, this | gives |
java | hibernate__hibernate-orm | hibernate-community-dialects/src/test/java/org/hibernate/community/dialect/DerbyCustomSQLTest.java | {
"start": 479,
"end": 544
} | class ____ extends CustomStoredProcTestSupport {
}
| DerbyCustomSQLTest |
java | google__guice | core/test/com/googlecode/guice/BytecodeGenTest.java | {
"start": 7178,
"end": 7392
} | class ____ be marked as public or protected so that the Guice custom classloader
* will intercept it. Private and implementation classes are not intercepted by the custom
* classloader.
*/
public static | must |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/constructor/nestedtarget/NestedProductPropertiesConstructorTest.java | {
"start": 1144,
"end": 3601
} | class ____ {
@RegisterExtension
GeneratedSource generatedSource = new GeneratedSource().addComparisonToFixtureFor(
ChartEntryToArtist.class
);
@ProcessorTest
public void shouldMapNestedTarget() {
ChartEntry chartEntry = new ChartEntry(
"US Billboard Hot Rock Songs",
"Purple Rain",
"Prince",
"Live, First Avenue, Minneapolis",
"Minneapolis",
1
);
Chart result = ChartEntryToArtist.MAPPER.map( chartEntry );
assertThat( result.getName() ).isEqualTo( "US Billboard Hot Rock Songs" );
assertThat( result.getSong() ).isNotNull();
assertThat( result.getSong().getArtist() ).isNotNull();
assertThat( result.getSong().getTitle() ).isEqualTo( "Purple Rain" );
assertThat( result.getSong().getArtist().getName() ).isEqualTo( "Prince" );
assertThat( result.getSong().getArtist().getLabel() ).isNotNull();
assertThat( result.getSong().getArtist().getLabel().getStudio() ).isNotNull();
assertThat( result.getSong().getArtist().getLabel().getStudio().getName() )
.isEqualTo( "Live, First Avenue, Minneapolis" );
assertThat( result.getSong().getArtist().getLabel().getStudio().getCity() )
.isEqualTo( "Minneapolis" );
assertThat( result.getSong().getPositions() ).hasSize( 1 );
assertThat( result.getSong().getPositions().get( 0 ) ).isEqualTo( 1 );
}
@ProcessorTest
public void shouldReverseNestedTarget() {
ChartEntry chartEntry = new ChartEntry(
"US Billboard Hot Rock Songs",
"Purple Rain",
"Prince",
"Live, First Avenue, Minneapolis",
"Minneapolis",
1
);
Chart chart = ChartEntryToArtist.MAPPER.map( chartEntry );
ChartEntry result = ChartEntryToArtist.MAPPER.map( chart );
assertThat( result ).isNotNull();
assertThat( result.getArtistName() ).isEqualTo( "Prince" );
assertThat( result.getChartName() ).isEqualTo( "US Billboard Hot Rock Songs" );
assertThat( result.getCity() ).isEqualTo( "Minneapolis" );
assertThat( result.getPosition() ).isEqualTo( 1 );
assertThat( result.getRecordedAt() ).isEqualTo( "Live, First Avenue, Minneapolis" );
assertThat( result.getSongTitle() ).isEqualTo( "Purple Rain" );
}
}
| NestedProductPropertiesConstructorTest |
java | apache__rocketmq | common/src/test/java/org/apache/rocketmq/common/message/MessageDecoderTest.java | {
"start": 1524,
"end": 17758
} | class ____ {
@Test
public void testDecodeProperties() {
MessageExt messageExt = new MessageExt();
messageExt.setMsgId("645100FA00002A9F000000489A3AA09E");
messageExt.setTopic("abc");
messageExt.setBody("hello!q!".getBytes());
try {
messageExt.setBornHost(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 0));
} catch (UnknownHostException e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
messageExt.setBornTimestamp(System.currentTimeMillis());
messageExt.setCommitLogOffset(123456);
messageExt.setPreparedTransactionOffset(0);
messageExt.setQueueId(0);
messageExt.setQueueOffset(123);
messageExt.setReconsumeTimes(0);
try {
messageExt.setStoreHost(new InetSocketAddress(InetAddress.getLocalHost(), 0));
} catch (UnknownHostException e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
messageExt.putUserProperty("a", "123");
messageExt.putUserProperty("b", "hello");
messageExt.putUserProperty("c", "3.14");
{
byte[] msgBytes = new byte[0];
try {
msgBytes = MessageDecoder.encode(messageExt, false);
} catch (Exception e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
ByteBuffer byteBuffer = ByteBuffer.allocate(msgBytes.length);
byteBuffer.put(msgBytes);
Map<String, String> properties = MessageDecoder.decodeProperties(byteBuffer);
assertThat(properties).isNotNull();
assertThat("123").isEqualTo(properties.get("a"));
assertThat("hello").isEqualTo(properties.get("b"));
assertThat("3.14").isEqualTo(properties.get("c"));
}
{
byte[] msgBytes = new byte[0];
try {
msgBytes = MessageDecoder.encode(messageExt, false);
} catch (Exception e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
ByteBuffer byteBuffer = ByteBuffer.allocate(msgBytes.length);
byteBuffer.put(msgBytes);
Map<String, String> properties = MessageDecoder.decodeProperties(byteBuffer);
assertThat(properties).isNotNull();
assertThat("123").isEqualTo(properties.get("a"));
assertThat("hello").isEqualTo(properties.get("b"));
assertThat("3.14").isEqualTo(properties.get("c"));
}
}
@Test
public void testDecodePropertiesOnIPv6Host() {
MessageExt messageExt = new MessageExt();
messageExt.setMsgId("24084004018081003FAA1DDE2B3F898A00002A9F0000000000000CA0");
messageExt.setBornHostV6Flag();
messageExt.setStoreHostAddressV6Flag();
messageExt.setTopic("abc");
messageExt.setBody("hello!q!".getBytes());
try {
messageExt.setBornHost(new InetSocketAddress(InetAddress.getByName("1050:0000:0000:0000:0005:0600:300c:326b"), 0));
} catch (UnknownHostException e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
messageExt.setBornTimestamp(System.currentTimeMillis());
messageExt.setCommitLogOffset(123456);
messageExt.setPreparedTransactionOffset(0);
messageExt.setQueueId(0);
messageExt.setQueueOffset(123);
messageExt.setReconsumeTimes(0);
try {
messageExt.setStoreHost(new InetSocketAddress(InetAddress.getByName("::1"), 0));
} catch (UnknownHostException e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
messageExt.putUserProperty("a", "123");
messageExt.putUserProperty("b", "hello");
messageExt.putUserProperty("c", "3.14");
byte[] msgBytes = new byte[0];
try {
msgBytes = MessageDecoder.encode(messageExt, false);
} catch (Exception e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
ByteBuffer byteBuffer = ByteBuffer.allocate(msgBytes.length);
byteBuffer.put(msgBytes);
Map<String, String> properties = MessageDecoder.decodeProperties(byteBuffer);
assertThat(properties).isNotNull();
assertThat("123").isEqualTo(properties.get("a"));
assertThat("hello").isEqualTo(properties.get("b"));
assertThat("3.14").isEqualTo(properties.get("c"));
}
@Test
public void testEncodeAndDecode() {
MessageExt messageExt = new MessageExt();
messageExt.setMsgId("645100FA00002A9F000000489A3AA09E");
messageExt.setTopic("abc");
messageExt.setBody("hello!q!".getBytes());
try {
messageExt.setBornHost(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 0));
} catch (UnknownHostException e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
messageExt.setBornTimestamp(System.currentTimeMillis());
messageExt.setCommitLogOffset(123456);
messageExt.setPreparedTransactionOffset(0);
messageExt.setQueueId(1);
messageExt.setQueueOffset(123);
messageExt.setReconsumeTimes(0);
try {
messageExt.setStoreHost(new InetSocketAddress(InetAddress.getLocalHost(), 0));
} catch (UnknownHostException e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
messageExt.putUserProperty("a", "123");
messageExt.putUserProperty("b", "hello");
messageExt.putUserProperty("c", "3.14");
messageExt.setBodyCRC(UtilAll.crc32(messageExt.getBody()));
byte[] msgBytes = new byte[0];
try {
msgBytes = MessageDecoder.encode(messageExt, false);
} catch (Exception e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
ByteBuffer byteBuffer = ByteBuffer.allocate(msgBytes.length);
byteBuffer.put(msgBytes);
byteBuffer.flip();
MessageExt decodedMsg = MessageDecoder.decode(byteBuffer);
assertThat(decodedMsg).isNotNull();
assertThat(1).isEqualTo(decodedMsg.getQueueId());
assertThat(123456L).isEqualTo(decodedMsg.getCommitLogOffset());
assertThat("hello!q!".getBytes()).isEqualTo(decodedMsg.getBody());
int msgIDLength = 4 + 4 + 8;
ByteBuffer byteBufferMsgId = ByteBuffer.allocate(msgIDLength);
String msgId = createMessageId(byteBufferMsgId, messageExt.getStoreHostBytes(), messageExt.getCommitLogOffset());
assertThat(msgId).isEqualTo(decodedMsg.getMsgId());
assertThat("abc").isEqualTo(decodedMsg.getTopic());
}
@Test
public void testEncodeAndDecodeOnIPv6Host() {
MessageExt messageExt = new MessageExt();
messageExt.setMsgId("24084004018081003FAA1DDE2B3F898A00002A9F0000000000000CA0");
messageExt.setBornHostV6Flag();
messageExt.setStoreHostAddressV6Flag();
messageExt.setTopic("abc");
messageExt.setBody("hello!q!".getBytes());
try {
messageExt.setBornHost(new InetSocketAddress(InetAddress.getByName("1050:0000:0000:0000:0005:0600:300c:326b"), 0));
} catch (UnknownHostException e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
messageExt.setBornTimestamp(System.currentTimeMillis());
messageExt.setCommitLogOffset(123456);
messageExt.setPreparedTransactionOffset(0);
messageExt.setQueueId(1);
messageExt.setQueueOffset(123);
messageExt.setReconsumeTimes(0);
try {
messageExt.setStoreHost(new InetSocketAddress(InetAddress.getByName("::1"), 0));
} catch (UnknownHostException e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
messageExt.putUserProperty("a", "123");
messageExt.putUserProperty("b", "hello");
messageExt.putUserProperty("c", "3.14");
messageExt.setBodyCRC(UtilAll.crc32(messageExt.getBody()));
byte[] msgBytes = new byte[0];
try {
msgBytes = MessageDecoder.encode(messageExt, false);
} catch (Exception e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
ByteBuffer byteBuffer = ByteBuffer.allocate(msgBytes.length);
byteBuffer.put(msgBytes);
byteBuffer.flip();
MessageExt decodedMsg = MessageDecoder.decode(byteBuffer);
assertThat(decodedMsg).isNotNull();
assertThat(1).isEqualTo(decodedMsg.getQueueId());
assertThat(123456L).isEqualTo(decodedMsg.getCommitLogOffset());
assertThat("hello!q!".getBytes()).isEqualTo(decodedMsg.getBody());
// assertThat(48).isEqualTo(decodedMsg.getSysFlag());
assertThat(MessageSysFlag.check(messageExt.getSysFlag(), MessageSysFlag.STOREHOSTADDRESS_V6_FLAG)).isTrue();
int msgIDLength = 16 + 4 + 8;
ByteBuffer byteBufferMsgId = ByteBuffer.allocate(msgIDLength);
String msgId = createMessageId(byteBufferMsgId, messageExt.getStoreHostBytes(), messageExt.getCommitLogOffset());
assertThat(msgId).isEqualTo(decodedMsg.getMsgId());
assertThat("abc").isEqualTo(decodedMsg.getTopic());
}
@Test
public void testNullValueProperty() throws Exception {
MessageExt msg = new MessageExt();
msg.setBody("x".getBytes());
msg.setTopic("x");
msg.setBornHost(new InetSocketAddress("127.0.0.1", 9000));
msg.setStoreHost(new InetSocketAddress("127.0.0.1", 9000));
String key = "NullValueKey";
msg.putProperty(key, null);
try {
byte[] encode = MessageDecoder.encode(msg, false);
MessageExt decode = MessageDecoder.decode(ByteBuffer.wrap(encode));
assertThat(decode.getProperty(key)).isNull();
} catch (Exception e) {
e.printStackTrace();
assertThat(Boolean.FALSE).isTrue();
}
}
@Test
public void testString2messageProperties() {
StringBuilder sb = new StringBuilder();
sb.append("k1").append(NAME_VALUE_SEPARATOR).append("v1");
Map<String,String> m = MessageDecoder.string2messageProperties(sb.toString());
assertThat(m).size().isEqualTo(1);
assertThat(m.get("k1")).isEqualTo("v1");
m = MessageDecoder.string2messageProperties("");
assertThat(m).size().isEqualTo(0);
m = MessageDecoder.string2messageProperties(" ");
assertThat(m).size().isEqualTo(0);
m = MessageDecoder.string2messageProperties("aaa");
assertThat(m).size().isEqualTo(0);
sb.setLength(0);
sb.append("k1").append(NAME_VALUE_SEPARATOR);
m = MessageDecoder.string2messageProperties(sb.toString());
assertThat(m).size().isEqualTo(0);
sb.setLength(0);
sb.append(NAME_VALUE_SEPARATOR).append("v1");
m = MessageDecoder.string2messageProperties(sb.toString());
assertThat(m).size().isEqualTo(0);
sb.setLength(0);
sb.append("k1").append(NAME_VALUE_SEPARATOR).append("v1").append(PROPERTY_SEPARATOR);
m = MessageDecoder.string2messageProperties(sb.toString());
assertThat(m).size().isEqualTo(1);
assertThat(m.get("k1")).isEqualTo("v1");
sb.setLength(0);
sb.append("k1").append(NAME_VALUE_SEPARATOR).append("v1").append(PROPERTY_SEPARATOR)
.append("k2").append(NAME_VALUE_SEPARATOR).append("v2");
m = MessageDecoder.string2messageProperties(sb.toString());
assertThat(m).size().isEqualTo(2);
assertThat(m.get("k1")).isEqualTo("v1");
assertThat(m.get("k2")).isEqualTo("v2");
sb.setLength(0);
sb.append("k1").append(NAME_VALUE_SEPARATOR).append("v1").append(PROPERTY_SEPARATOR)
.append(NAME_VALUE_SEPARATOR).append("v2");
m = MessageDecoder.string2messageProperties(sb.toString());
assertThat(m).size().isEqualTo(1);
assertThat(m.get("k1")).isEqualTo("v1");
sb.setLength(0);
sb.append("k1").append(NAME_VALUE_SEPARATOR).append("v1").append(PROPERTY_SEPARATOR)
.append("k2").append(NAME_VALUE_SEPARATOR);
m = MessageDecoder.string2messageProperties(sb.toString());
assertThat(m).size().isEqualTo(1);
assertThat(m.get("k1")).isEqualTo("v1");
sb.setLength(0);
sb.append(NAME_VALUE_SEPARATOR).append("v1").append(PROPERTY_SEPARATOR)
.append("k2").append(NAME_VALUE_SEPARATOR).append("v2");
m = MessageDecoder.string2messageProperties(sb.toString());
assertThat(m).size().isEqualTo(1);
assertThat(m.get("k2")).isEqualTo("v2");
sb.setLength(0);
sb.append("k1").append(NAME_VALUE_SEPARATOR).append(PROPERTY_SEPARATOR)
.append("k2").append(NAME_VALUE_SEPARATOR).append("v2");
m = MessageDecoder.string2messageProperties(sb.toString());
assertThat(m).size().isEqualTo(1);
assertThat(m.get("k2")).isEqualTo("v2");
sb.setLength(0);
sb.append("1").append(NAME_VALUE_SEPARATOR).append("1").append(PROPERTY_SEPARATOR)
.append("2").append(NAME_VALUE_SEPARATOR).append("2");
m = MessageDecoder.string2messageProperties(sb.toString());
assertThat(m).size().isEqualTo(2);
assertThat(m.get("1")).isEqualTo("1");
assertThat(m.get("2")).isEqualTo("2");
sb.setLength(0);
sb.append("1").append(NAME_VALUE_SEPARATOR).append(PROPERTY_SEPARATOR)
.append("2").append(NAME_VALUE_SEPARATOR).append("2");
m = MessageDecoder.string2messageProperties(sb.toString());
assertThat(m).size().isEqualTo(1);
assertThat(m.get("2")).isEqualTo("2");
sb.setLength(0);
sb.append(NAME_VALUE_SEPARATOR).append("1").append(PROPERTY_SEPARATOR)
.append("2").append(NAME_VALUE_SEPARATOR).append("2");
m = MessageDecoder.string2messageProperties(sb.toString());
assertThat(m).size().isEqualTo(1);
assertThat(m.get("2")).isEqualTo("2");
sb.setLength(0);
sb.append("1").append(NAME_VALUE_SEPARATOR).append("1").append(PROPERTY_SEPARATOR)
.append("2").append(NAME_VALUE_SEPARATOR);
m = MessageDecoder.string2messageProperties(sb.toString());
assertThat(m).size().isEqualTo(1);
assertThat(m.get("1")).isEqualTo("1");
sb.setLength(0);
sb.append("1").append(NAME_VALUE_SEPARATOR).append("1").append(PROPERTY_SEPARATOR)
.append(NAME_VALUE_SEPARATOR).append("2");
m = MessageDecoder.string2messageProperties(sb.toString());
assertThat(m).size().isEqualTo(1);
assertThat(m.get("1")).isEqualTo("1");
}
@Test
public void testMessageId() throws Exception {
// ipv4 messageId test
MessageExt msgExt = new MessageExt();
msgExt.setStoreHost(new InetSocketAddress("127.0.0.1", 9103));
msgExt.setCommitLogOffset(123456);
verifyMessageId(msgExt);
// ipv6 messageId test
msgExt.setStoreHostAddressV6Flag();
msgExt.setStoreHost(new InetSocketAddress(InetAddress.getByName("::1"), 0));
verifyMessageId(msgExt);
}
private void verifyMessageId(MessageExt msgExt) throws UnknownHostException {
int storehostIPLength = (msgExt.getSysFlag() & MessageSysFlag.STOREHOSTADDRESS_V6_FLAG) == 0 ? 4 : 16;
int msgIDLength = storehostIPLength + 4 + 8;
ByteBuffer byteBufferMsgId = ByteBuffer.allocate(msgIDLength);
String msgId = createMessageId(byteBufferMsgId, msgExt.getStoreHostBytes(), msgExt.getCommitLogOffset());
MessageId messageId = decodeMessageId(msgId);
assertThat(messageId.getAddress()).isEqualTo(msgExt.getStoreHost());
assertThat(messageId.getOffset()).isEqualTo(msgExt.getCommitLogOffset());
}
} | MessageDecoderTest |
java | google__guice | core/src/com/google/inject/internal/ProvidedByInternalFactory.java | {
"start": 1105,
"end": 4620
} | class ____<T> extends ProviderInternalFactory<T> implements DelayedInitialize {
private final Class<? extends Provider<?>> providerType;
private final Key<? extends Provider<T>> providerKey;
private InternalFactory<? extends Provider<T>> providerFactory;
private ProvisionListenerStackCallback<T> provisionCallback;
ProvidedByInternalFactory(
Class<?> rawType,
Class<? extends Provider<?>> providerType,
Key<? extends Provider<T>> providerKey,
int circularFactoryId) {
super(rawType, providerKey, circularFactoryId);
this.providerType = providerType;
this.providerKey = providerKey;
}
void setProvisionListenerCallback(ProvisionListenerStackCallback<T> listener) {
provisionCallback = listener;
}
@Override
public void initialize(InjectorImpl injector, Errors errors) throws ErrorsException {
providerFactory =
injector.getInternalFactory(providerKey, errors, JitLimitation.NEW_OR_EXISTING_JIT);
}
@Override
public T get(InternalContext context, Dependency<?> dependency, boolean linked)
throws InternalProvisionException {
InternalFactory<? extends Provider<T>> localProviderFactory = providerFactory;
if (localProviderFactory == null) {
throw new IllegalStateException("not initialized");
}
try {
// TODO: lukes - Is this the right 'dependency' to pass?
Provider<? extends T> provider = localProviderFactory.get(context, dependency, true);
return circularGet(provider, context, dependency, provisionCallback);
} catch (InternalProvisionException ipe) {
throw ipe.addSource(providerKey);
}
}
@Override
MethodHandleResult makeHandle(LinkageContext context, boolean linked) {
return makeCachable(
InternalMethodHandles.catchInternalProvisionExceptionAndRethrowWithSource(
circularGetHandle(
providerFactory.getHandle(context, /* linked= */ true), provisionCallback),
providerKey));
}
@Override
protected MethodHandle validateReturnTypeHandle(MethodHandle providerHandle) {
return MethodHandles.filterReturnValue(
providerHandle,
MethodHandles.insertArguments(
CHECK_SUBTYPE_NOT_PROVIDED_MH, 1, source, providerType, providedRawType));
}
private static final MethodHandle CHECK_SUBTYPE_NOT_PROVIDED_MH =
InternalMethodHandles.findStaticOrDie(
ProvidedByInternalFactory.class,
"doCheckSubtypeNotProvided",
methodType(Object.class, Object.class, Object.class, Class.class, Class.class));
// Historically this had a different error check than other providers,
// so we preserve that behavior.
@Keep
static Object doCheckSubtypeNotProvided(
Object result,
Object source,
Class<? extends jakarta.inject.Provider<?>> providerType,
Class<?> providedType)
throws InternalProvisionException {
if (result != null && !providedType.isInstance(result)) {
throw InternalProvisionException.subtypeNotProvided(providerType, providedType)
.addSource(source);
}
return result;
}
// Historically this had a different error check than other providers,
// so we preserve that behavior.
@Override
protected void validateReturnType(T t) throws InternalProvisionException {
if (t != null && !providedRawType.isInstance(t)) {
throw InternalProvisionException.subtypeNotProvided(providerType, providedRawType)
.addSource(source);
}
}
}
| ProvidedByInternalFactory |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java | {
"start": 4010,
"end": 5749
} | class ____<KType, VType> extends AbstractSet<Map.Entry<KType, VType>> {
private final ObjectObjectHashMap<KType, VType> map;
private EntrySet(ObjectObjectHashMap<KType, VType> map) {
this.map = map;
}
@Override
public int size() {
return map.size();
}
@Override
public boolean isEmpty() {
return map.isEmpty();
}
@Override
public Iterator<Map.Entry<KType, VType>> iterator() {
return Iterators.map(map.iterator(), c -> new AbstractMap.SimpleImmutableEntry<>(c.key, c.value));
}
@Override
public Spliterator<Map.Entry<KType, VType>> spliterator() {
return Spliterators.spliterator(iterator(), size(), Spliterator.IMMUTABLE);
}
@Override
public void forEach(Consumer<? super Map.Entry<KType, VType>> action) {
map.forEach(
(Consumer<ObjectObjectCursor<KType, VType>>) c -> action.accept(new AbstractMap.SimpleImmutableEntry<>(c.key, c.value))
);
}
@SuppressWarnings("unchecked")
@Override
public boolean contains(Object o) {
if (o instanceof Map.Entry<?, ?> == false) {
return false;
}
Map.Entry<?, ?> e = (Map.Entry<?, ?>) o;
Object key = e.getKey();
Object v = map.get((KType) key);
if (v == null && map.containsKey((KType) key) == false) {
return false;
}
return Objects.equals(v, e.getValue());
}
@Override
public String toString() {
return map.toString();
}
}
private static | EntrySet |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/argumentselectiondefects/NameInCommentHeuristic.java | {
"start": 1408,
"end": 2867
} | class ____ implements Heuristic {
/**
* Return true if there are no comments on the original actual parameter of a change which match
* the name of the formal parameter.
*/
@Override
public boolean isAcceptableChange(
Changes changes, Tree node, MethodSymbol symbol, VisitorState state) {
// Now check to see if there is a comment in the position of any actual parameter we want to
// change which matches the formal parameter
ImmutableList<Commented<ExpressionTree>> comments = findCommentsForArguments(node, state);
return changes.changedPairs().stream()
.noneMatch(
p -> {
MatchType match =
NamedParameterComment.match(comments.get(p.formal().index()), p.formal().name())
.matchType();
return match == MatchType.EXACT_MATCH || match == MatchType.APPROXIMATE_MATCH;
});
}
private static ImmutableList<Commented<ExpressionTree>> findCommentsForArguments(
Tree tree, VisitorState state) {
return switch (tree) {
case MethodInvocationTree methodInvocationTree ->
Comments.findCommentsForArguments(methodInvocationTree, state);
case NewClassTree newClassTree -> Comments.findCommentsForArguments(newClassTree, state);
default ->
throw new IllegalArgumentException(
"Only MethodInvocationTree or NewClassTree is supported");
};
}
}
| NameInCommentHeuristic |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/jobgraph/topology/DefaultLogicalTopologyTest.java | {
"start": 2202,
"end": 4659
} | class ____ extends TestLogger {
private JobGraph jobGraph;
private DefaultLogicalTopology logicalTopology;
@Before
public void setUp() throws Exception {
jobGraph = createJobGraph();
logicalTopology = DefaultLogicalTopology.fromJobGraph(jobGraph);
}
@Test
public void testGetVertices() {
// vertices from getVertices() should be topologically sorted
final Iterable<JobVertex> jobVertices =
jobGraph.getVerticesSortedTopologicallyFromSources();
final Iterable<DefaultLogicalVertex> logicalVertices = logicalTopology.getVertices();
assertEquals(Iterables.size(jobVertices), Iterables.size(logicalVertices));
final Iterator<JobVertex> jobVertexIterator = jobVertices.iterator();
final Iterator<DefaultLogicalVertex> logicalVertexIterator = logicalVertices.iterator();
while (jobVertexIterator.hasNext()) {
assertVertexAndConnectedResultsEquals(
jobVertexIterator.next(), logicalVertexIterator.next());
}
}
@Test
public void testGetLogicalPipelinedRegions() {
assertEquals(2, IterableUtils.toStream(logicalTopology.getAllPipelinedRegions()).count());
}
private JobGraph createJobGraph() {
final JobVertex[] jobVertices = new JobVertex[3];
final int parallelism = 3;
jobVertices[0] = createNoOpVertex("v1", parallelism);
jobVertices[1] = createNoOpVertex("v2", parallelism);
jobVertices[2] = createNoOpVertex("v3", parallelism);
connectNewDataSetAsInput(jobVertices[1], jobVertices[0], ALL_TO_ALL, PIPELINED);
connectNewDataSetAsInput(jobVertices[2], jobVertices[1], ALL_TO_ALL, BLOCKING);
return JobGraphTestUtils.streamingJobGraph(jobVertices);
}
private static void assertVertexAndConnectedResultsEquals(
final JobVertex jobVertex, final DefaultLogicalVertex logicalVertex) {
assertVertexInfoEquals(jobVertex, logicalVertex);
final List<IntermediateDataSet> consumedResults =
jobVertex.getInputs().stream().map(JobEdge::getSource).collect(Collectors.toList());
assertResultsEquals(consumedResults, logicalVertex.getConsumedResults());
final List<IntermediateDataSet> producedResults = jobVertex.getProducedDataSets();
assertResultsEquals(producedResults, logicalVertex.getProducedResults());
}
}
| DefaultLogicalTopologyTest |
java | apache__flink | flink-libraries/flink-state-processing-api/src/main/java/org/apache/flink/state/api/input/operator/WindowReaderOperator.java | {
"start": 11280,
"end": 11852
} | class ____ extends DefaultKeyedStateStore {
W window;
PerWindowKeyedStateStore(KeyedStateBackend<?> keyedStateBackend) {
super(keyedStateBackend, WindowReaderOperator.this.getSerializerFactory());
}
@Override
protected <SS extends State> SS getPartitionedState(StateDescriptor<SS, ?> stateDescriptor)
throws Exception {
return keyedStateBackend.getPartitionedState(
window, namespaceSerializer, stateDescriptor);
}
}
private static | PerWindowKeyedStateStore |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/odps/ast/OdpsAddFileStatement.java | {
"start": 2090,
"end": 2145
} | enum ____ {
FILE, ARCHIVE, JAR, PY
}
}
| FileType |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/rest/RestBindingDefinition.java | {
"start": 8302,
"end": 8604
} | class ____ the input data. Append a [] to the end of the name if you want the input to be an array
* type.
*/
public void setOutType(String outType) {
this.outType = outType;
}
public Class<?> getOutTypeClass() {
return outTypeClass;
}
/**
* Sets the | of |
java | spring-projects__spring-boot | core/spring-boot-test/src/main/java/org/springframework/boot/test/context/SpringBootContextLoader.java | {
"start": 5070,
"end": 8457
} | class ____ extends AbstractContextLoader implements AotContextLoader {
private static final Consumer<SpringApplication> ALREADY_CONFIGURED = (springApplication) -> {
};
private static final Object NONE = new Object();
@Override
public ApplicationContext loadContext(MergedContextConfiguration mergedConfig) throws Exception {
return loadContext(mergedConfig, Mode.STANDARD, null, null);
}
@Override
public ApplicationContext loadContextForAotProcessing(MergedContextConfiguration mergedConfig,
RuntimeHints runtimeHints) throws Exception {
return loadContext(mergedConfig, Mode.AOT_PROCESSING, null, runtimeHints);
}
@Override
public ApplicationContext loadContextForAotRuntime(MergedContextConfiguration mergedConfig,
ApplicationContextInitializer<ConfigurableApplicationContext> initializer) throws Exception {
return loadContext(mergedConfig, Mode.AOT_RUNTIME, initializer, null);
}
private ApplicationContext loadContext(MergedContextConfiguration mergedConfig, Mode mode,
@Nullable ApplicationContextInitializer<ConfigurableApplicationContext> initializer,
@Nullable RuntimeHints runtimeHints) throws Exception {
assertHasClassesOrLocations(mergedConfig);
SpringBootTestAnnotation annotation = SpringBootTestAnnotation.get(mergedConfig);
String[] args = annotation.getArgs();
UseMainMethod useMainMethod = annotation.getUseMainMethod();
Method mainMethod = getMainMethod(mergedConfig, useMainMethod);
if (mainMethod != null) {
if (runtimeHints != null) {
runtimeHints.reflection().registerMethod(mainMethod, ExecutableMode.INVOKE);
}
ContextLoaderHook hook = new ContextLoaderHook(mode, initializer,
(application) -> configure(mergedConfig, application));
return hook.runMain(() -> {
if (mainMethod.getParameterCount() == 0) {
ReflectionUtils.invokeMethod(mainMethod, null);
}
else {
ReflectionUtils.invokeMethod(mainMethod, null, new Object[] { args });
}
});
}
SpringApplication application = getSpringApplication();
configure(mergedConfig, application);
ContextLoaderHook hook = new ContextLoaderHook(mode, initializer, ALREADY_CONFIGURED);
return hook.run(() -> application.run(args));
}
private void assertHasClassesOrLocations(MergedContextConfiguration mergedConfig) {
boolean hasClasses = !ObjectUtils.isEmpty(mergedConfig.getClasses());
boolean hasLocations = !ObjectUtils.isEmpty(mergedConfig.getLocations());
Assert.state(hasClasses || hasLocations,
() -> "No configuration classes or locations found in @SpringApplicationConfiguration. "
+ "For default configuration detection to work you need Spring 4.0.3 or better (found "
+ SpringVersion.getVersion() + ").");
}
private @Nullable Method getMainMethod(MergedContextConfiguration mergedConfig, UseMainMethod useMainMethod) {
if (useMainMethod == UseMainMethod.NEVER) {
return null;
}
Assert.state(mergedConfig.getParent() == null,
() -> "UseMainMethod.%s cannot be used with @ContextHierarchy tests".formatted(useMainMethod));
Class<?> springBootConfiguration = Arrays.stream(mergedConfig.getClasses())
.filter(this::isSpringBootConfiguration)
.findFirst()
.orElse(null);
Assert.state(springBootConfiguration != null || useMainMethod == UseMainMethod.WHEN_AVAILABLE,
"Cannot use main method as no @SpringBootConfiguration-annotated | SpringBootContextLoader |
java | netty__netty | microbench/src/main/java/io/netty/handler/codec/http/WriteBytesVsShortOrMediumBenchmark.java | {
"start": 1274,
"end": 2424
} | class ____ extends AbstractMicrobenchmark {
private static final int CRLF_SHORT = (CR << 8) + LF;
private static final byte[] CRLF = { CR, LF };
private static final int ZERO_CRLF_MEDIUM = ('0' << 16) + (CR << 8) + LF;
private static final byte[] ZERO_CRLF = { '0', CR, LF };
private final ByteBuf buf = Unpooled.directBuffer(16);
@Benchmark
public ByteBuf shortInt() {
return ByteBufUtil.writeShortBE(buf, CRLF_SHORT).resetWriterIndex();
}
@Benchmark
public ByteBuf mediumInt() {
return ByteBufUtil.writeMediumBE(buf, ZERO_CRLF_MEDIUM).resetWriterIndex();
}
@Benchmark
public ByteBuf byteArray2() {
return buf.writeBytes(CRLF).resetWriterIndex();
}
@Benchmark
public ByteBuf byteArray3() {
return buf.writeBytes(ZERO_CRLF).resetWriterIndex();
}
@Benchmark
public ByteBuf chainedBytes2() {
return buf.writeByte(CR).writeByte(LF).resetWriterIndex();
}
@Benchmark
public ByteBuf chainedBytes3() {
return buf.writeByte('0').writeByte(CR).writeByte(LF).resetWriterIndex();
}
}
| WriteBytesVsShortOrMediumBenchmark |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/spi/QueryEngine.java | {
"start": 1468,
"end": 1730
} | class ____
*/
@Internal
ClassLoaderService getClassLoaderService();
default <R> HqlInterpretation<R> interpretHql(String hql, Class<R> resultType) {
return getInterpretationCache().resolveHqlInterpretation( hql, resultType, getHqlTranslator() );
}
}
| loading |
java | apache__camel | components/camel-jslt/src/test/java/org/apache/camel/component/jslt/JsltVariablesTest.java | {
"start": 4595,
"end": 4648
} | class ____ {
private Master master;
}
}
| Slave |
java | FasterXML__jackson-core | src/main/java/tools/jackson/core/json/ReaderBasedJsonParser.java | {
"start": 575,
"end": 109768
} | class ____
extends JsonParserBase
{
private final static int FEAT_MASK_TRAILING_COMMA = JsonReadFeature.ALLOW_TRAILING_COMMA.getMask();
private final static int FEAT_MASK_ALLOW_MISSING = JsonReadFeature.ALLOW_MISSING_VALUES.getMask();
// Latin1 encoding is not supported, but we do use 8-bit subset for
// pre-processing task, to simplify first pass, keep it fast.
protected final static int[] _icLatin1 = CharTypes.getInputCodeLatin1();
/*
/**********************************************************************
/* Input configuration
/**********************************************************************
*/
/**
* Reader that can be used for reading more content, if one
* buffer from input source, but in some cases pre-loaded buffer
* is handed to the parser.
*/
protected Reader _reader;
/**
* Current buffer from which data is read; generally data is read into
* buffer from input source.
*/
protected char[] _inputBuffer;
/**
* Flag that indicates whether the input buffer is recycable (and
* needs to be returned to recycler once we are done) or not.
*<p>
* If it is not, it also means that parser CANNOT modify underlying
* buffer.
*/
protected boolean _bufferRecyclable;
/*
/**********************************************************************
/* Configuration
/**********************************************************************
*/
protected final CharsToNameCanonicalizer _symbols;
protected final int _hashSeed;
/*
/**********************************************************************
/* Parsing state
/**********************************************************************
*/
/**
* Flag that indicates that the current token has not yet
* been fully processed, and needs to be finished for
* some access (or skipped to obtain the next token)
*/
protected boolean _tokenIncomplete;
/**
* Value of {@link #_inputPtr} at the time when the first character of
* name token was read. Used for calculating token location when requested;
* combined with {@link #_currInputProcessed}, may be updated appropriately
* as needed.
*/
protected long _nameStartOffset;
protected int _nameStartRow;
protected int _nameStartCol;
/*
/**********************************************************************
/* Life-cycle
/**********************************************************************
*/
/**
* Constructor called when caller wants to provide input buffer directly
* (or needs to, in case of bootstrapping having read some of contents)
* and it may or may not be recyclable use standard recycle context.
*
* @param readCtxt Object read context to use
* @param ctxt I/O context to use
* @param stdFeatures Standard stream read features enabled
* @param formatFeatures Format-specific read features enabled
* @param r Reader used for reading actual content, if any; {@code null} if none
* @param st Name canonicalizer to use
* @param inputBuffer Input buffer to read initial content from (before Reader)
* @param start Pointer in {@code inputBuffer} that has the first content character to decode
* @param end Pointer past the last content character in {@code inputBuffer}
* @param bufferRecyclable Whether {@code inputBuffer} passed is managed by Jackson core
* (and thereby needs recycling)
*/
public ReaderBasedJsonParser(ObjectReadContext readCtxt, IOContext ctxt,
int stdFeatures, int formatFeatures, Reader r,
CharsToNameCanonicalizer st,
char[] inputBuffer, int start, int end,
boolean bufferRecyclable)
{
super(readCtxt, ctxt, stdFeatures, formatFeatures);
_reader = r;
_inputBuffer = inputBuffer;
_inputPtr = start;
_inputEnd = end;
_currInputRowStart = start;
// If we have offset, need to omit that from byte offset, so:
_currInputProcessed = -start;
_symbols = st;
_hashSeed = st.hashSeed();
_bufferRecyclable = bufferRecyclable;
}
/**
* Constructor called when input comes as a {@link java.io.Reader}, and buffer allocation
* can be done using default mechanism.
*
* @param readCtxt Object read context to use
* @param ctxt I/O context to use
* @param stdFeatures Standard stream read features enabled
* @param formatFeatures Format-specific read features enabled
* @param r Reader used for reading actual content, if any; {@code null} if none
* @param st Name canonicalizer to use
*/
public ReaderBasedJsonParser(ObjectReadContext readCtxt, IOContext ctxt,
int stdFeatures, int formatFeatures, Reader r,
CharsToNameCanonicalizer st)
{
super(readCtxt, ctxt, stdFeatures, formatFeatures);
_reader = r;
_inputBuffer = ctxt.allocTokenBuffer();
_inputPtr = 0;
_inputEnd = 0;
_symbols = st;
_hashSeed = st.hashSeed();
_bufferRecyclable = true;
}
/*
/**********************************************************************
/* Base method defs, overrides
/**********************************************************************
*/
@Override
public int releaseBuffered(Writer w) throws JacksonException {
int count = _inputEnd - _inputPtr;
if (count < 1) { return 0; }
// let's just advance ptr to end
int origPtr = _inputPtr;
_inputPtr += count;
try {
w.write(_inputBuffer, origPtr, count);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
return count;
}
@Override public Object streamReadInputSource() { return _reader; }
protected char getNextChar(String eofMsg, JsonToken forToken) throws JacksonException {
if (_inputPtr >= _inputEnd) {
if (!_loadMore()) {
_reportInvalidEOF(eofMsg, forToken);
}
}
return _inputBuffer[_inputPtr++];
}
@Override
protected void _closeInput() {
/* 25-Nov-2008, tatus: As per [JACKSON-16] we are not to call close()
* on the underlying Reader, unless we "own" it, or auto-closing
* feature is enabled.
* One downside is that when using our optimized
* Reader (granted, we only do that for UTF-32...) this
* means that buffer recycling won't work correctly.
*/
if (_reader != null) {
if (_ioContext.isResourceManaged() || isEnabled(StreamReadFeature.AUTO_CLOSE_SOURCE)) {
try {
_reader.close();
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
_reader = null;
}
}
/**
* Method called to release internal buffers owned by the base
* reader. This may be called along with {@link #_closeInput} (for
* example, when explicitly closing this reader instance), or
* separately (if need be).
*/
@Override
protected void _releaseBuffers()
{
super._releaseBuffers();
// merge new symbols, if any
_symbols.release();
// and release buffers, if they are recyclable ones
if (_bufferRecyclable) {
char[] buf = _inputBuffer;
if (buf != null) {
_inputBuffer = null;
_ioContext.releaseTokenBuffer(buf);
}
}
}
/*
/**********************************************************************
/* Low-level access, supporting
/**********************************************************************
*/
protected void _loadMoreGuaranteed() throws JacksonException {
if (!_loadMore()) { _reportInvalidEOF(); }
}
protected boolean _loadMore() throws JacksonException
{
if (_reader != null) {
final int count;
try {
count = _reader.read(_inputBuffer, 0, _inputBuffer.length);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
final int bufSize = _inputEnd;
_currInputProcessed += bufSize;
_currInputRowStart -= bufSize;
// 06-Sep-2023, tatu: [core#1046] Enforce max doc length limit
_streamReadConstraints.validateDocumentLength(_currInputProcessed);
if (count > 0) {
// 26-Nov-2015, tatu: Since name-offset requires it too, must offset
// this increase to avoid "moving" name-offset, resulting most likely
// in negative value, which is fine as combine value remains unchanged.
_nameStartOffset -= bufSize;
_inputPtr = 0;
_inputEnd = count;
return true;
}
_inputPtr = _inputEnd = 0;
// End of input
_closeInput();
// Should never return 0, so let's fail
if (count == 0) {
_reportBadReader(_inputBuffer.length);
}
}
return false;
}
/*
/**********************************************************************
/* Public API, data access
/**********************************************************************
*/
/**
* Method for accessing textual representation of the current event;
* if no current event (before first call to {@link #nextToken}, or
* after encountering end-of-input), returns null.
* Method can be called for any event.
*
* @throws JacksonException if there are general I/O or parse issues, including if the text is too large,
* see {@link tools.jackson.core.StreamReadConstraints.Builder#maxStringLength(int)}
*/
@Override
public final String getString() throws JacksonException
{
if (_currToken == JsonToken.VALUE_STRING) {
if (_tokenIncomplete) {
_tokenIncomplete = false;
_finishString(); // only strings can be incomplete
}
return _textBuffer.contentsAsString();
}
return _getText2(_currToken);
}
@Override
public int getString(Writer writer) throws JacksonException
{
final JsonToken t = _currToken;
try {
if (t == JsonToken.VALUE_STRING) {
if (_tokenIncomplete) {
_tokenIncomplete = false;
_finishString(); // only strings can be incomplete
}
return _textBuffer.contentsToWriter(writer);
}
if (t == JsonToken.PROPERTY_NAME) {
String n = _streamReadContext.currentName();
writer.write(n);
return n.length();
}
if (t != null) {
if (t.isNumeric()) {
return _textBuffer.contentsToWriter(writer);
}
char[] ch = t.asCharArray();
writer.write(ch);
return ch.length;
}
} catch (IOException e) {
throw _wrapIOFailure(e);
}
return 0;
}
// // // Let's override default impls for improved performance
@Override
public final String getValueAsString() throws JacksonException
{
if (_currToken == JsonToken.VALUE_STRING) {
if (_tokenIncomplete) {
_tokenIncomplete = false;
_finishString(); // only strings can be incomplete
}
return _textBuffer.contentsAsString();
}
if (_currToken == JsonToken.PROPERTY_NAME) {
return currentName();
}
return super.getValueAsString(null);
}
@Override
public final String getValueAsString(String defValue) throws JacksonException {
if (_currToken == JsonToken.VALUE_STRING) {
if (_tokenIncomplete) {
_tokenIncomplete = false;
_finishString(); // only strings can be incomplete
}
return _textBuffer.contentsAsString();
}
if (_currToken == JsonToken.PROPERTY_NAME) {
return currentName();
}
return super.getValueAsString(defValue);
}
protected final String _getText2(JsonToken t) throws JacksonException {
if (t == null) {
return null;
}
switch (t.id()) {
case ID_PROPERTY_NAME:
return _streamReadContext.currentName();
case ID_STRING:
// fall through
case ID_NUMBER_INT:
case ID_NUMBER_FLOAT:
return _textBuffer.contentsAsString();
default:
return t.asString();
}
}
@Override
public final char[] getStringCharacters() throws JacksonException
{
if (_currToken != null) { // null only before/after document
switch (_currToken.id()) {
case ID_PROPERTY_NAME:
return currentNameInBuffer();
case ID_STRING:
if (_tokenIncomplete) {
_tokenIncomplete = false;
_finishString(); // only strings can be incomplete
}
// fall through
case ID_NUMBER_INT:
case ID_NUMBER_FLOAT:
return _textBuffer.getTextBuffer();
default:
return _currToken.asCharArray();
}
}
return null;
}
@Override
public final int getStringLength() throws JacksonException
{
if (_currToken != null) { // null only before/after document
switch (_currToken.id()) {
case ID_PROPERTY_NAME:
return _streamReadContext.currentName().length();
case ID_STRING:
if (_tokenIncomplete) {
_tokenIncomplete = false;
_finishString(); // only strings can be incomplete
}
// fall through
case ID_NUMBER_INT:
case ID_NUMBER_FLOAT:
return _textBuffer.size();
default:
return _currToken.asCharArray().length;
}
}
return 0;
}
@Override
public final int getStringOffset() throws JacksonException
{
// Most have offset of 0, only some may have other values:
if (_currToken != null) {
switch (_currToken.id()) {
case ID_PROPERTY_NAME:
return 0;
case ID_STRING:
if (_tokenIncomplete) {
_tokenIncomplete = false;
_finishString(); // only strings can be incomplete
}
// fall through
case ID_NUMBER_INT:
case ID_NUMBER_FLOAT:
return _textBuffer.getTextOffset();
default:
}
}
return 0;
}
@Override
public byte[] getBinaryValue(Base64Variant b64variant) throws JacksonException
{
if ((_currToken == JsonToken.VALUE_EMBEDDED_OBJECT) && (_binaryValue != null)) {
return _binaryValue;
}
if (_currToken != JsonToken.VALUE_STRING) {
_reportError("Current token ("+_currToken+") not VALUE_STRING or VALUE_EMBEDDED_OBJECT, cannot access as binary");
}
// To ensure that we won't see inconsistent data, better clear up state
if (_tokenIncomplete) {
try {
_binaryValue = _decodeBase64(b64variant);
} catch (IllegalArgumentException iae) {
throw _constructReadException("Failed to decode VALUE_STRING as base64 (%s): %s",
b64variant, iae.getMessage());
}
// let's clear incomplete only now; allows for accessing other
// textual content in error cases
_tokenIncomplete = false;
} else { // may actually require conversion...
if (_binaryValue == null) {
@SuppressWarnings("resource")
ByteArrayBuilder builder = _getByteArrayBuilder();
_decodeBase64(getString(), builder, b64variant);
_binaryValue = builder.toByteArray();
}
}
return _binaryValue;
}
@Override
public int readBinaryValue(Base64Variant b64variant, OutputStream out) throws JacksonException
{
// if we have already read the token, just use whatever we may have
if (!_tokenIncomplete || _currToken != JsonToken.VALUE_STRING) {
byte[] b = getBinaryValue(b64variant);
try {
out.write(b);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
return b.length;
}
// otherwise do "real" incremental parsing...
byte[] buf = _ioContext.allocBase64Buffer();
try {
return _readBinary(b64variant, out, buf);
} finally {
_ioContext.releaseBase64Buffer(buf);
}
}
protected int _readBinary(Base64Variant b64variant, OutputStream out, byte[] buffer) throws JacksonException
{
int outputPtr = 0;
final int outputEnd = buffer.length - 3;
int outputCount = 0;
while (true) {
// first, we'll skip preceding white space, if any
char ch;
do {
if (_inputPtr >= _inputEnd) {
_loadMoreGuaranteed();
}
ch = _inputBuffer[_inputPtr++];
} while (ch <= INT_SPACE);
int bits = b64variant.decodeBase64Char(ch);
if (bits < 0) { // reached the end, fair and square?
if (ch == '"') {
break;
}
bits = _decodeBase64Escape(b64variant, ch, 0);
if (bits < 0) { // white space to skip
continue;
}
}
// enough room? If not, flush
if (outputPtr > outputEnd) {
outputCount += outputPtr;
try {
out.write(buffer, 0, outputPtr);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
outputPtr = 0;
}
int decodedData = bits;
// then second base64 char; can't get padding yet, nor ws
if (_inputPtr >= _inputEnd) {
_loadMoreGuaranteed();
}
ch = _inputBuffer[_inputPtr++];
bits = b64variant.decodeBase64Char(ch);
if (bits < 0) {
bits = _decodeBase64Escape(b64variant, ch, 1);
}
decodedData = (decodedData << 6) | bits;
// third base64 char; can be padding, but not ws
if (_inputPtr >= _inputEnd) {
_loadMoreGuaranteed();
}
ch = _inputBuffer[_inputPtr++];
bits = b64variant.decodeBase64Char(ch);
// First branch: can get padding (-> 1 byte)
if (bits < 0) {
if (bits != Base64Variant.BASE64_VALUE_PADDING) {
// could also just be missing padding
if (ch == '"') {
decodedData >>= 4;
buffer[outputPtr++] = (byte) decodedData;
if (b64variant.requiresPaddingOnRead()) {
--_inputPtr; // to keep parser state bit more consistent
_handleBase64MissingPadding(b64variant);
}
break;
}
bits = _decodeBase64Escape(b64variant, ch, 2);
}
if (bits == Base64Variant.BASE64_VALUE_PADDING) {
// Ok, must get padding
if (_inputPtr >= _inputEnd) {
_loadMoreGuaranteed();
}
ch = _inputBuffer[_inputPtr++];
if (!b64variant.usesPaddingChar(ch)) {
if (_decodeBase64Escape(b64variant, ch, 3) != Base64Variant.BASE64_VALUE_PADDING) {
_reportInvalidBase64Char(b64variant, ch, 3, "expected padding character '"+b64variant.getPaddingChar()+"'");
}
}
// Got 12 bits, only need 8, need to shift
decodedData >>= 4;
buffer[outputPtr++] = (byte) decodedData;
continue;
}
}
// Nope, 2 or 3 bytes
decodedData = (decodedData << 6) | bits;
// fourth and last base64 char; can be padding, but not ws
if (_inputPtr >= _inputEnd) {
_loadMoreGuaranteed();
}
ch = _inputBuffer[_inputPtr++];
bits = b64variant.decodeBase64Char(ch);
if (bits < 0) {
if (bits != Base64Variant.BASE64_VALUE_PADDING) {
// as per could also just be missing padding
if (ch == '"') {
decodedData >>= 2;
buffer[outputPtr++] = (byte) (decodedData >> 8);
buffer[outputPtr++] = (byte) decodedData;
if (b64variant.requiresPaddingOnRead()) {
--_inputPtr; // to keep parser state bit more consistent
_handleBase64MissingPadding(b64variant);
}
break;
}
bits = _decodeBase64Escape(b64variant, ch, 3);
}
if (bits == Base64Variant.BASE64_VALUE_PADDING) {
/* With padding we only get 2 bytes; but we have
* to shift it a bit so it is identical to triplet
* case with partial output.
* 3 chars gives 3x6 == 18 bits, of which 2 are
* dummies, need to discard:
*/
decodedData >>= 2;
buffer[outputPtr++] = (byte) (decodedData >> 8);
buffer[outputPtr++] = (byte) decodedData;
continue;
}
}
// otherwise, our triplet is now complete
decodedData = (decodedData << 6) | bits;
buffer[outputPtr++] = (byte) (decodedData >> 16);
buffer[outputPtr++] = (byte) (decodedData >> 8);
buffer[outputPtr++] = (byte) decodedData;
}
_tokenIncomplete = false;
if (outputPtr > 0) {
outputCount += outputPtr;
try {
out.write(buffer, 0, outputPtr);
} catch (IOException e) {
throw _wrapIOFailure(e);
}
}
return outputCount;
}
/*
/**********************************************************************
/* Public API, traversal
/**********************************************************************
*/
/**
* @return Next token from the stream, if any found, or null
* to indicate end-of-input
*/
@Override
public final JsonToken nextToken() throws JacksonException
{
/* First: Object Property names are special -- we will always tokenize
* (part of) value along with the property name to simplify
* state handling. If so, can and need to use secondary token:
*/
if (_currToken == JsonToken.PROPERTY_NAME) {
return _nextAfterName();
}
// But if we didn't already have a name, and (partially?) decode number,
// need to ensure no numeric information is leaked
_numTypesValid = NR_UNKNOWN;
if (_tokenIncomplete) {
_skipString(); // only strings can be partial
}
int i = _skipWSOrEnd();
if (i < 0) { // end-of-input
// Should actually close/release things
// like input source, symbol table and recyclable buffers now.
close();
return _updateTokenToNull();
}
// clear any data retained so far
_binaryValue = null;
// Closing scope?
if ((i | 0x20) == INT_RCURLY) { // ~ '}]'
_closeScope(i);
return _currToken;
}
// Nope: do we then expect a comma?
if (_streamReadContext.expectComma()) {
i = _skipComma(i);
// Was that a trailing comma?
if ((_formatReadFeatures & FEAT_MASK_TRAILING_COMMA) != 0) {
if ((i | 0x20) == INT_RCURLY) { // ~ '}]'
_closeScope(i);
return _currToken;
}
}
}
/* And should we now have a name? Always true for Object contexts, since
* the intermediate 'expect-value' state is never retained.
*/
boolean inObject = _streamReadContext.inObject();
if (inObject) {
// First, the property name itself:
_updateNameLocation();
String name = (i == INT_QUOTE) ? _parseName() : _handleOddName(i);
_streamReadContext.setCurrentName(name);
_updateToken(JsonToken.PROPERTY_NAME);
i = _skipColon();
}
_updateLocation();
// Ok: we must have a value... what is it?
JsonToken t;
switch (i) {
case '"':
_tokenIncomplete = true;
t = JsonToken.VALUE_STRING;
break;
case '[':
if (!inObject) {
createChildArrayContext(_tokenInputRow, _tokenInputCol);
}
t = JsonToken.START_ARRAY;
break;
case '{':
if (!inObject) {
createChildObjectContext(_tokenInputRow, _tokenInputCol);
}
t = JsonToken.START_OBJECT;
break;
case '}':
// Error: } is not valid at this point; valid closers have
// been handled earlier
_reportUnexpectedChar(i, "expected a value");
case 't':
_matchTrue();
t = JsonToken.VALUE_TRUE;
break;
case 'f':
_matchFalse();
t = JsonToken.VALUE_FALSE;
break;
case 'n':
_matchNull();
t = JsonToken.VALUE_NULL;
break;
case '-':
t = _parseSignedNumber(true);
break;
case '+':
if (isEnabled(JsonReadFeature.ALLOW_LEADING_PLUS_SIGN_FOR_NUMBERS)) {
t = _parseSignedNumber(false);
} else {
t = _handleOddValue(i);
}
break;
case '.': // [core#61]]
t = _parseFloatThatStartsWithPeriod();
break;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
t = _parseUnsignedNumber(i);
break;
default:
t = _handleOddValue(i);
break;
}
if (inObject) {
_nextToken = t;
return _currToken;
}
return _updateToken(t);
}
private final JsonToken _nextAfterName() throws JacksonException
{
_nameCopied = false; // need to invalidate if it was copied
JsonToken t = _nextToken;
_nextToken = null;
// !!! 16-Nov-2015, tatu: TODO: fix [databind#37], copy next location to current here
// Also: may need to start new context?
if (t == JsonToken.START_ARRAY) {
createChildArrayContext(_tokenInputRow, _tokenInputCol);
} else if (t == JsonToken.START_OBJECT) {
createChildObjectContext(_tokenInputRow, _tokenInputCol);
}
return _updateToken(t);
}
@Override
public void finishToken() throws JacksonException {
if (_tokenIncomplete) {
_tokenIncomplete = false;
_finishString(); // only strings can be incomplete
}
}
/*
/**********************************************************************
/* Public API, nextXxx() overrides
/**********************************************************************
*/
@Override
public boolean nextName(SerializableString sstr) throws JacksonException
{
// // // Note: most of code below is copied from nextToken()
_numTypesValid = NR_UNKNOWN;
if (_currToken == JsonToken.PROPERTY_NAME) {
_nextAfterName();
return false;
}
if (_tokenIncomplete) {
_skipString();
}
int i = _skipWSOrEnd();
if (i < 0) {
close();
_updateTokenToNull();
return false;
}
_binaryValue = null;
// Closing scope?
if ((i | 0x20) == INT_RCURLY) { // ~ '}]'
_closeScope(i);
return false;
}
if (_streamReadContext.expectComma()) {
i = _skipComma(i);
// Was that a trailing comma?
if ((_formatReadFeatures & FEAT_MASK_TRAILING_COMMA) != 0) {
if ((i | 0x20) == INT_RCURLY) { // ~ '}]'
_closeScope(i);
return false;
}
}
}
if (!_streamReadContext.inObject()) {
_updateLocation();
_nextTokenNotInObject(i);
return false;
}
_updateNameLocation();
if (i == INT_QUOTE) {
// when doing literal match, must consider escaping:
char[] nameChars = sstr.asQuotedChars();
final int len = nameChars.length;
// Require 4 more bytes for faster skipping of colon that follows name
if ((_inputPtr + len + 4) < _inputEnd) { // maybe...
// first check length match by
final int end = _inputPtr+len;
if (_inputBuffer[end] == '"') {
int offset = 0;
int ptr = _inputPtr;
while (true) {
if (ptr == end) { // yes, match!
_streamReadContext.setCurrentName(sstr.getValue());
_isNextTokenNameYes(_skipColonFast(ptr+1));
return true;
}
if (nameChars[offset] != _inputBuffer[ptr]) {
break;
}
++offset;
++ptr;
}
}
}
}
return _isNextTokenNameMaybe(i, sstr.getValue());
}
@Override
public String nextName() throws JacksonException
{
// // // Note: this is almost a verbatim copy of nextToken() (minus comments)
_numTypesValid = NR_UNKNOWN;
if (_currToken == JsonToken.PROPERTY_NAME) {
_nextAfterName();
return null;
}
if (_tokenIncomplete) {
_skipString();
}
int i = _skipWSOrEnd();
if (i < 0) {
close();
_updateTokenToNull();
return null;
}
_binaryValue = null;
if ((i | 0x20) == INT_RCURLY) { // ~ '}]'
_closeScope(i);
return null;
}
if (_streamReadContext.expectComma()) {
i = _skipComma(i);
if ((_formatReadFeatures & FEAT_MASK_TRAILING_COMMA) != 0) {
if ((i | 0x20) == INT_RCURLY) { // ~ '}]'
_closeScope(i);
return null;
}
}
}
if (!_streamReadContext.inObject()) {
_updateLocation();
_nextTokenNotInObject(i);
return null;
}
_updateNameLocation();
String name = (i == INT_QUOTE) ? _parseName() : _handleOddName(i);
_streamReadContext.setCurrentName(name);
_updateToken(JsonToken.PROPERTY_NAME);
i = _skipColon();
_updateLocation();
if (i == INT_QUOTE) {
_tokenIncomplete = true;
_nextToken = JsonToken.VALUE_STRING;
return name;
}
// Ok: we must have a value... what is it?
JsonToken t;
switch (i) {
case '-':
t = _parseSignedNumber(true);
break;
case '+':
if (isEnabled(JsonReadFeature.ALLOW_LEADING_PLUS_SIGN_FOR_NUMBERS)) {
t = _parseSignedNumber(false);
} else {
t = _handleOddValue(i);
}
break;
case '.': // [core#61]]
t = _parseFloatThatStartsWithPeriod();
break;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
t = _parseUnsignedNumber(i);
break;
case 'f':
_matchFalse();
t = JsonToken.VALUE_FALSE;
break;
case 'n':
_matchNull();
t = JsonToken.VALUE_NULL;
break;
case 't':
_matchTrue();
t = JsonToken.VALUE_TRUE;
break;
case '[':
t = JsonToken.START_ARRAY;
break;
case '{':
t = JsonToken.START_OBJECT;
break;
default:
t = _handleOddValue(i);
break;
}
_nextToken = t;
return name;
}
private final void _isNextTokenNameYes(int i) throws JacksonException
{
_updateToken(JsonToken.PROPERTY_NAME);
_updateLocation();
switch (i) {
case '"':
_tokenIncomplete = true;
_nextToken = JsonToken.VALUE_STRING;
return;
case '[':
_nextToken = JsonToken.START_ARRAY;
return;
case '{':
_nextToken = JsonToken.START_OBJECT;
return;
case 't':
_matchToken("true", 1);
_nextToken = JsonToken.VALUE_TRUE;
return;
case 'f':
_matchToken("false", 1);
_nextToken = JsonToken.VALUE_FALSE;
return;
case 'n':
_matchToken("null", 1);
_nextToken = JsonToken.VALUE_NULL;
return;
case '-':
_nextToken = _parseSignedNumber(true);
return;
case '+':
if (isEnabled(JsonReadFeature.ALLOW_LEADING_PLUS_SIGN_FOR_NUMBERS)) {
_nextToken = _parseSignedNumber(false);
} else {
_nextToken = _handleOddValue(i);
}
return;
case '.': // [core#61]]
_nextToken = _parseFloatThatStartsWithPeriod();
return;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
_nextToken = _parseUnsignedNumber(i);
return;
}
_nextToken = _handleOddValue(i);
}
protected boolean _isNextTokenNameMaybe(int i, String nameToMatch) throws JacksonException
{
// // // and this is back to standard nextToken()
String name = (i == INT_QUOTE) ? _parseName() : _handleOddName(i);
_streamReadContext.setCurrentName(name);
_updateToken(JsonToken.PROPERTY_NAME);
i = _skipColon();
_updateLocation();
if (i == INT_QUOTE) {
_tokenIncomplete = true;
_nextToken = JsonToken.VALUE_STRING;
return nameToMatch.equals(name);
}
// Ok: we must have a value... what is it?
JsonToken t;
switch (i) {
case '-':
t = _parseSignedNumber(true);
break;
case '+':
if (isEnabled(JsonReadFeature.ALLOW_LEADING_PLUS_SIGN_FOR_NUMBERS)) {
t = _parseSignedNumber(false);
} else {
t = _handleOddValue(i);
}
break;
case '.': // [core#61]
t = _parseFloatThatStartsWithPeriod();
break;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
t = _parseUnsignedNumber(i);
break;
case 'f':
_matchFalse();
t = JsonToken.VALUE_FALSE;
break;
case 'n':
_matchNull();
t = JsonToken.VALUE_NULL;
break;
case 't':
_matchTrue();
t = JsonToken.VALUE_TRUE;
break;
case '[':
t = JsonToken.START_ARRAY;
break;
case '{':
t = JsonToken.START_OBJECT;
break;
default:
t = _handleOddValue(i);
break;
}
_nextToken = t;
return nameToMatch.equals(name);
}
private final JsonToken _nextTokenNotInObject(int i) throws JacksonException
{
if (i == INT_QUOTE) {
_tokenIncomplete = true;
return _updateToken(JsonToken.VALUE_STRING);
}
switch (i) {
case '[':
createChildArrayContext(_tokenInputRow, _tokenInputCol);
return _updateToken(JsonToken.START_ARRAY);
case '{':
createChildObjectContext(_tokenInputRow, _tokenInputCol);
return _updateToken(JsonToken.START_OBJECT);
case 't':
_matchToken("true", 1);
return _updateToken(JsonToken.VALUE_TRUE);
case 'f':
_matchToken("false", 1);
return _updateToken(JsonToken.VALUE_FALSE);
case 'n':
_matchToken("null", 1);
return _updateToken(JsonToken.VALUE_NULL);
case '-':
return _updateToken(_parseSignedNumber(true));
/* Should we have separate handling for plus? Although
* it is not allowed per se, it may be erroneously used,
* and could be indicated by a more specific error message.
*/
case '.': // [core#61]]
return _updateToken(_parseFloatThatStartsWithPeriod());
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return _updateToken(_parseUnsignedNumber(i));
/*
* This check proceeds only if the Feature.ALLOW_MISSING_VALUES is enabled
* The Check is for missing values. In case of missing values in an array, the next token will be either ',' or ']'.
* This case, decrements the already incremented _inputPtr in the buffer in case of comma(,)
* so that the existing flow goes back to checking the next token which will be comma again and
* it continues the parsing.
* Also the case returns NULL as current token in case of ',' or ']'.
*/
// case ']': // 11-May-2020, tatu: related to [core#616], this should never be reached
case ',':
// 11-May-2020, tatu: [core#616] No commas in root level
if (!_streamReadContext.inRoot()) {
if ((_formatReadFeatures & FEAT_MASK_ALLOW_MISSING) != 0) {
--_inputPtr;
return _updateToken(JsonToken.VALUE_NULL);
}
}
}
return _updateToken(_handleOddValue(i));
}
// note: identical to one in UTF8StreamJsonParser
@Override
public final String nextStringValue() throws JacksonException
{
if (_currToken == JsonToken.PROPERTY_NAME) { // mostly copied from '_nextAfterName'
_nameCopied = false;
JsonToken t = _nextToken;
_nextToken = null;
_updateToken(t);
if (t == JsonToken.VALUE_STRING) {
if (_tokenIncomplete) {
_tokenIncomplete = false;
_finishString();
}
return _textBuffer.contentsAsString();
}
if (t == JsonToken.START_ARRAY) {
createChildArrayContext(_tokenInputRow, _tokenInputCol);
} else if (t == JsonToken.START_OBJECT) {
createChildObjectContext(_tokenInputRow, _tokenInputCol);
}
return null;
}
// !!! TODO: optimize this case as well
return (nextToken() == JsonToken.VALUE_STRING) ? getString() : null;
}
// note: identical to one in Utf8StreamParser
@Override
public final int nextIntValue(int defaultValue) throws JacksonException
{
if (_currToken == JsonToken.PROPERTY_NAME) {
_nameCopied = false;
JsonToken t = _nextToken;
_nextToken = null;
_updateToken(t);
if (t == JsonToken.VALUE_NUMBER_INT) {
return getIntValue();
}
if (t == JsonToken.START_ARRAY) {
createChildArrayContext(_tokenInputRow, _tokenInputCol);
} else if (t == JsonToken.START_OBJECT) {
createChildObjectContext(_tokenInputRow, _tokenInputCol);
}
return defaultValue;
}
// !!! TODO: optimize this case as well
return (nextToken() == JsonToken.VALUE_NUMBER_INT) ? getIntValue() : defaultValue;
}
// note: identical to one in Utf8StreamParser
@Override
public final long nextLongValue(long defaultValue) throws JacksonException
{
if (_currToken == JsonToken.PROPERTY_NAME) { // mostly copied from '_nextAfterName'
_nameCopied = false;
JsonToken t = _nextToken;
_nextToken = null;
_updateToken(t);
if (t == JsonToken.VALUE_NUMBER_INT) {
return getLongValue();
}
if (t == JsonToken.START_ARRAY) {
_streamReadContext = _streamReadContext.createChildArrayContext(_tokenInputRow, _tokenInputCol);
} else if (t == JsonToken.START_OBJECT) {
_streamReadContext = _streamReadContext.createChildObjectContext(_tokenInputRow, _tokenInputCol);
}
return defaultValue;
}
// !!! TODO: optimize this case as well
return (nextToken() == JsonToken.VALUE_NUMBER_INT) ? getLongValue() : defaultValue;
}
// note: identical to one in UTF8StreamJsonParser
@Override
public final Boolean nextBooleanValue() throws JacksonException
{
if (_currToken == JsonToken.PROPERTY_NAME) { // mostly copied from '_nextAfterName'
_nameCopied = false;
JsonToken t = _nextToken;
_nextToken = null;
_updateToken(t);
if (t == JsonToken.VALUE_TRUE) {
return Boolean.TRUE;
}
if (t == JsonToken.VALUE_FALSE) {
return Boolean.FALSE;
}
if (t == JsonToken.START_ARRAY) {
createChildArrayContext(_tokenInputRow, _tokenInputCol);
} else if (t == JsonToken.START_OBJECT) {
createChildObjectContext(_tokenInputRow, _tokenInputCol);
}
return null;
}
JsonToken t = nextToken();
if (t != null) {
int id = t.id();
if (id == ID_TRUE) return Boolean.TRUE;
if (id == ID_FALSE) return Boolean.FALSE;
}
return null;
}
/*
/**********************************************************************
/* Internal methods, number parsing
/**********************************************************************
*/
// NOTE: number starts with '.' character WITHOUT leading sign
//
// @since 3.1
protected final JsonToken _parseFloatThatStartsWithPeriod()
throws JacksonException
{
// [core#611]: allow optionally leading decimal point
if (!isEnabled(JsonReadFeature.ALLOW_LEADING_DECIMAL_POINT_FOR_NUMBERS)) {
return _handleOddValue('.');
}
int startPtr = _inputPtr - 1; // include the '.'
return _parseFloat(INT_PERIOD, startPtr, _inputPtr, false, 0);
}
/**
* Initial parsing method for number values. It needs to be able
* to parse enough input to be able to determine whether the
* value is to be considered a simple integer value, or a more
* generic decimal value: latter of which needs to be expressed
* as a floating point number. The basic rule is that if the number
* has no fractional or exponential part, it is an integer; otherwise
* a floating point number.
*<p>
* Because much of input has to be processed in any case, no partial
* parsing is done: all input text will be stored for further
* processing. However, actual numeric value conversion will be
* deferred, since it is usually the most complicated and costliest
* part of processing.
*
* @param ch The first non-null digit character of the number to parse
*
* @return Type of token decoded, usually {@link JsonToken#VALUE_NUMBER_INT}
* or {@link JsonToken#VALUE_NUMBER_FLOAT}
*
* @throws JacksonIOException for low-level read issues
* @throws StreamReadException for decoding problems
*/
protected final JsonToken _parseUnsignedNumber(int ch) throws JacksonException
{
/* Although we will always be complete with respect to textual
* representation (that is, all characters will be parsed),
* actual conversion to a number is deferred. Thus, need to
* note that no representations are valid yet
*/
int ptr = _inputPtr;
int startPtr = ptr-1; // to include digit already read
final int inputLen = _inputEnd;
// One special case, leading zero(es):
if (ch == INT_0) {
return _parseNumber2(false, startPtr);
}
/* First, let's see if the whole number is contained within
* the input buffer unsplit. This should be the common case;
* and to simplify processing, we will just reparse contents
* in the alternative case (number split on buffer boundary)
*/
int intLen = 1; // already got one
// First let's get the obligatory integer part:
int_loop:
while (true) {
if (ptr >= inputLen) {
_inputPtr = startPtr;
return _parseNumber2(false, startPtr);
}
ch = _inputBuffer[ptr++];
if (ch < INT_0 || ch > INT_9) {
break int_loop;
}
++intLen;
}
if (ch == INT_PERIOD || (ch | 0x20) == INT_e) { // ~ '.eE'
_inputPtr = ptr;
return _parseFloat(ch, startPtr, ptr, false, intLen);
}
// Got it all: let's add to text buffer for parsing, access
--ptr; // need to push back following separator
_inputPtr = ptr;
// As per #105, need separating space between root values; check here
if (_streamReadContext.inRoot()) {
_verifyRootSpace(ch);
}
int len = ptr-startPtr;
_textBuffer.resetWithShared(_inputBuffer, startPtr, len);
return resetInt(false, intLen);
}
private final JsonToken _parseFloat(int ch, int startPtr, int ptr, boolean neg, int intLen)
throws JacksonException
{
final int inputEnd = _inputEnd;
int fractLen = 0;
// And then see if we get other parts
if (ch == '.') { // yes, fraction
fract_loop:
while (true) {
if (ptr >= inputEnd) {
return _parseNumber2(neg, startPtr);
}
ch = _inputBuffer[ptr++];
if (ch < INT_0 || ch > INT_9) {
break fract_loop;
}
++fractLen;
}
// must be followed by sequence of ints, one minimum
if (fractLen == 0) {
if (!isEnabled(JsonReadFeature.ALLOW_TRAILING_DECIMAL_POINT_FOR_NUMBERS)) {
_reportUnexpectedNumberChar(ch, "Decimal point not followed by a digit");
}
}
}
int expLen = 0;
if ((ch | 0x20) == INT_e) { // ~ 'eE' and/or exponent
if (ptr >= inputEnd) {
_inputPtr = startPtr;
return _parseNumber2(neg, startPtr);
}
// Sign indicator?
ch = _inputBuffer[ptr++];
if (ch == INT_MINUS || ch == INT_PLUS) { // yup, skip for now
if (ptr >= inputEnd) {
_inputPtr = startPtr;
return _parseNumber2(neg, startPtr);
}
ch = _inputBuffer[ptr++];
}
while (ch <= INT_9 && ch >= INT_0) {
++expLen;
if (ptr >= inputEnd) {
_inputPtr = startPtr;
return _parseNumber2(neg, startPtr);
}
ch = _inputBuffer[ptr++];
}
// must be followed by sequence of ints, one minimum
if (expLen == 0) {
_reportUnexpectedNumberChar(ch, "Exponent indicator not followed by a digit");
}
}
--ptr; // need to push back following separator
_inputPtr = ptr;
// As per #105, need separating space between root values; check here
if (_streamReadContext.inRoot()) {
_verifyRootSpace(ch);
}
int len = ptr-startPtr;
_textBuffer.resetWithShared(_inputBuffer, startPtr, len);
// And there we have it!
return resetFloat(neg, intLen, fractLen, expLen);
}
private final JsonToken _parseSignedNumber(final boolean negative) throws JacksonException
{
int ptr = _inputPtr;
// [core#784]: Include sign character ('+' or '-') in textual representation
int startPtr = ptr - 1; // to include sign already read
final int inputEnd = _inputEnd;
if (ptr >= inputEnd) {
return _parseNumber2(negative, startPtr);
}
int ch = _inputBuffer[ptr++];
// First check: must have a digit to follow minus sign
if (ch > INT_9 || ch < INT_0) {
_inputPtr = ptr;
if (ch == INT_PERIOD) {
// [core#611]: allow optionally leading decimal point
if (!isEnabled(JsonReadFeature.ALLOW_LEADING_DECIMAL_POINT_FOR_NUMBERS)) {
return _handleOddValue('.');
}
return _parseFloat(INT_PERIOD, startPtr, _inputPtr, negative, 0);
}
return _handleInvalidNumberStart(ch, negative, true);
}
// One special case, leading zero(es):
if (ch == INT_0) {
return _parseNumber2(negative, startPtr);
}
int intLen = 1; // already got one
// First let's get the obligatory integer part:
int_loop:
while (true) {
if (ptr >= inputEnd) {
return _parseNumber2(negative, startPtr);
}
ch = _inputBuffer[ptr++];
if (ch < INT_0 || ch > INT_9) {
break int_loop;
}
++intLen;
}
if (ch == INT_PERIOD || (ch | 0x20) == INT_e) { // ~ '.eE'
_inputPtr = ptr;
return _parseFloat(ch, startPtr, ptr, negative, intLen);
}
--ptr;
_inputPtr = ptr;
if (_streamReadContext.inRoot()) {
_verifyRootSpace(ch);
}
int len = ptr-startPtr;
_textBuffer.resetWithShared(_inputBuffer, startPtr, len);
return resetInt(negative, intLen);
}
/**
* Method called to parse a number, when the primary parse
* method has failed to parse it, due to it being split on
* buffer boundary. As a result code is very similar, except
* that it has to explicitly copy contents to the text buffer
* instead of just sharing the main input buffer.
*
* @param neg Whether number being decoded is negative or not
* @param startPtr Offset in input buffer for the next character of content
*
* @return Type of token decoded, usually {@link JsonToken#VALUE_NUMBER_INT}
* or {@link JsonToken#VALUE_NUMBER_FLOAT}
*
* @throws JacksonIOException for low-level read issues
* @throws StreamReadException for decoding problems
*/
private final JsonToken _parseNumber2(boolean neg, int startPtr) throws JacksonException
{
// Check if there's a sign character at startPtr
boolean hasSign = neg || ((startPtr < _inputEnd)
&& _inputBuffer[startPtr] == '+');
_inputPtr = hasSign ? (startPtr + 1) : startPtr;
char[] outBuf = _textBuffer.emptyAndGetCurrentSegment();
int outPtr = 0;
// Need to prepend sign?
if (hasSign) {
outBuf[outPtr++] = neg ? '-' : '+'; // Include actual sign ('+' or '-')
}
// This is the place to do leading-zero check(s) too:
int intLen = 0;
char c = (_inputPtr < _inputEnd) ? _inputBuffer[_inputPtr++]
: getNextChar("No digit following sign", JsonToken.VALUE_NUMBER_INT);
if (c == '0') {
c = _verifyNoLeadingZeroes();
}
boolean eof = false;
// Ok, first the obligatory integer part:
int_loop:
while (c >= '0' && c <= '9') {
++intLen;
if (outPtr >= outBuf.length) {
outBuf = _textBuffer.finishCurrentSegment();
outPtr = 0;
}
outBuf[outPtr++] = c;
if (_inputPtr >= _inputEnd && !_loadMore()) {
// EOF is legal for main level int values
c = CHAR_NULL;
eof = true;
break int_loop;
}
c = _inputBuffer[_inputPtr++];
}
// Also, integer part is not optional
if (intLen == 0) {
// [core#611]: allow optionally leading decimal point
if ((c != '.') || !isEnabled(JsonReadFeature.ALLOW_LEADING_DECIMAL_POINT_FOR_NUMBERS)) {
return _handleInvalidNumberStart(c, neg);
}
}
int fractLen = -1;
// And then see if we get other parts
if (c == '.') { // yes, fraction
fractLen = 0;
if (outPtr >= outBuf.length) {
outBuf = _textBuffer.finishCurrentSegment();
outPtr = 0;
}
outBuf[outPtr++] = c;
fract_loop:
while (true) {
if (_inputPtr >= _inputEnd && !_loadMore()) {
eof = true;
break fract_loop;
}
c = _inputBuffer[_inputPtr++];
if (c < INT_0 || c > INT_9) {
break fract_loop;
}
++fractLen;
if (outPtr >= outBuf.length) {
outBuf = _textBuffer.finishCurrentSegment();
outPtr = 0;
}
outBuf[outPtr++] = c;
}
// must be followed by sequence of ints, one minimum
if (fractLen == 0) {
if (!isEnabled(JsonReadFeature.ALLOW_TRAILING_DECIMAL_POINT_FOR_NUMBERS)) {
_reportUnexpectedNumberChar(c, "Decimal point not followed by a digit");
}
}
}
int expLen = -1;
if ((c | 0x20) == INT_e) { // ~ 'eE' exponent?
expLen = 0;
if (outPtr >= outBuf.length) {
outBuf = _textBuffer.finishCurrentSegment();
outPtr = 0;
}
outBuf[outPtr++] = c;
// Not optional, can require that we get one more char
c = (_inputPtr < _inputEnd) ? _inputBuffer[_inputPtr++]
: getNextChar("expected a digit for number exponent", JsonToken.VALUE_NUMBER_FLOAT);
// Sign indicator?
if (c == '-' || c == '+') {
if (outPtr >= outBuf.length) {
outBuf = _textBuffer.finishCurrentSegment();
outPtr = 0;
}
outBuf[outPtr++] = c;
// Likewise, non optional:
c = (_inputPtr < _inputEnd) ? _inputBuffer[_inputPtr++]
: getNextChar("expected a digit for number exponent", JsonToken.VALUE_NUMBER_FLOAT);
}
exp_loop:
while (c <= INT_9 && c >= INT_0) {
++expLen;
if (outPtr >= outBuf.length) {
outBuf = _textBuffer.finishCurrentSegment();
outPtr = 0;
}
outBuf[outPtr++] = c;
if (_inputPtr >= _inputEnd && !_loadMore()) {
eof = true;
break exp_loop;
}
c = _inputBuffer[_inputPtr++];
}
// must be followed by sequence of ints, one minimum
if (expLen == 0) {
_reportUnexpectedNumberChar(c, "Exponent indicator not followed by a digit");
}
}
// Ok; unless we hit end-of-input, need to push last char read back
if (!eof) {
--_inputPtr;
if (_streamReadContext.inRoot()) {
_verifyRootSpace(c);
}
}
_textBuffer.setCurrentLength(outPtr);
// And there we have it!
// 26-Jun-2022, tatu: Careful here, as non-standard numbers can
// cause surprises - cannot use plain "reset()" but apply diff logic
if (fractLen < 0 && expLen < 0) { // integer
return resetInt(neg, intLen);
}
return resetFloat(neg, intLen, fractLen, expLen);
}
// Method called when we have seen one zero, and want to ensure
// it is not followed by another
private final char _verifyNoLeadingZeroes() throws JacksonException
{
// Fast case first:
if (_inputPtr < _inputEnd) {
char ch = _inputBuffer[_inputPtr];
// if not followed by a number (probably '.'); return zero as is, to be included
if (ch < '0' || ch > '9') {
return '0';
}
}
// and offline the less common case
return _verifyNLZ2();
}
private char _verifyNLZ2() throws JacksonException
{
if (_inputPtr >= _inputEnd && !_loadMore()) {
return '0';
}
char ch = _inputBuffer[_inputPtr];
if (ch < '0' || ch > '9') {
return '0';
}
if (!isEnabled(JsonReadFeature.ALLOW_LEADING_ZEROS_FOR_NUMBERS)) {
_reportInvalidNumber("Leading zeroes not allowed");
}
// if so, just need to skip either all zeroes (if followed by number); or all but one (if non-number)
++_inputPtr; // Leading zero to be skipped
if (ch == INT_0) {
while (_inputPtr < _inputEnd || _loadMore()) {
ch = _inputBuffer[_inputPtr];
if (ch < '0' || ch > '9') { // followed by non-number; retain one zero
return '0';
}
++_inputPtr; // skip previous zero
if (ch != '0') { // followed by other number; return
break;
}
}
}
return ch;
}
// Method called if expected numeric value (due to leading sign) does not
// look like a number
protected JsonToken _handleInvalidNumberStart(int ch, boolean negative)
throws JacksonException
{
return _handleInvalidNumberStart(ch, negative, false);
}
protected JsonToken _handleInvalidNumberStart(int ch, final boolean negative, final boolean hasSign)
throws JacksonException
{
if (ch == 'I') {
if (_inputPtr >= _inputEnd) {
if (!_loadMore()) {
_reportInvalidEOFInValue(JsonToken.VALUE_NUMBER_INT);
}
}
ch = _inputBuffer[_inputPtr++];
if (ch == 'N') {
String match = negative ? "-INF" :"+INF";
_matchToken(match, 3);
if (isEnabled(JsonReadFeature.ALLOW_NON_NUMERIC_NUMBERS)) {
return resetAsNaN(match, negative ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY);
}
_reportError("Non-standard token '"+match+"': enable `JsonReadFeature.ALLOW_NON_NUMERIC_NUMBERS` to allow");
} else if (ch == 'n') {
String match = negative ? "-Infinity" :"+Infinity";
_matchToken(match, 3);
if (isEnabled(JsonReadFeature.ALLOW_NON_NUMERIC_NUMBERS)) {
return resetAsNaN(match, negative ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY);
}
_reportError("Non-standard token '"+match+"': enable `JsonReadFeature.ALLOW_NON_NUMERIC_NUMBERS` to allow");
}
}
if (!isEnabled(JsonReadFeature.ALLOW_LEADING_PLUS_SIGN_FOR_NUMBERS) && hasSign && !negative) {
_reportUnexpectedNumberChar('+', "JSON spec does not allow numbers to have plus signs: enable `JsonReadFeature.ALLOW_LEADING_PLUS_SIGN_FOR_NUMBERS` to allow");
}
final String message = negative ?
"expected digit (0-9) to follow minus sign, for valid numeric value" :
"expected digit (0-9) for valid numeric value";
_reportUnexpectedNumberChar(ch, message);
return null;
}
/**
* Method called to ensure that a root-value is followed by a space
* token.
*<p>
* NOTE: caller MUST ensure there is at least one character available;
* and that input pointer is AT given char (not past)
*
* @param ch First character of likely white space to skip
*
* @throws JacksonIOException for low-level read issues
* @throws StreamReadException for decoding problems
*/
private final void _verifyRootSpace(int ch) throws JacksonException
{
// caller had pushed it back, before calling; reset
++_inputPtr;
switch (ch) {
case ' ':
case '\t':
return;
case '\r':
// 29-Oct-2022, tatu: [core#834] requires change here, we MUST NOT
// force a read. As such let's simply push back the \r without
// further ado; it is enough to know there is valid WS separating
// NOTE: may need to revisit handling of plain \n to keep Location
// info more uniform. But has to do for now.
// _skipCR();
--_inputPtr;
return;
case '\n':
++_currInputRow;
_currInputRowStart = _inputPtr;
return;
}
_reportMissingRootWS(ch);
}
/*
/**********************************************************************
/* Internal methods, secondary parsing
/**********************************************************************
*/
protected final String _parseName() throws JacksonException
{
// First: let's try to see if we have a simple name: one that does
// not cross input buffer boundary, and does not contain escape sequences.
int ptr = _inputPtr;
int hash = _hashSeed;
final int[] codes = _icLatin1;
while (ptr < _inputEnd) {
int ch = _inputBuffer[ptr];
if (ch < codes.length && codes[ch] != 0) {
if (ch == '"') {
final int start = _inputPtr;
_inputPtr = ptr+1; // to skip the quote
return _symbols.findSymbol(_inputBuffer, start, ptr - start, hash);
}
break;
}
hash = (hash * CharsToNameCanonicalizer.HASH_MULT) + ch;
++ptr;
}
int start = _inputPtr;
_inputPtr = ptr;
return _parseName2(start, hash, INT_QUOTE);
}
private String _parseName2(int startPtr, int hash, int endChar) throws JacksonException
{
_textBuffer.resetWithShared(_inputBuffer, startPtr, _inputPtr - startPtr);
/* Output pointers; calls will also ensure that the buffer is
* not shared and has room for at least one more char.
*/
char[] outBuf = _textBuffer.getCurrentSegment();
int outPtr = _textBuffer.getCurrentSegmentSize();
while (true) {
if (_inputPtr >= _inputEnd) {
if (!_loadMore()) {
_reportInvalidEOF(" in property name", JsonToken.PROPERTY_NAME);
}
}
char c = _inputBuffer[_inputPtr++];
int i = c;
if (i <= INT_BACKSLASH) {
if (i == INT_BACKSLASH) {
/* Although chars outside of BMP are to be escaped as
* an UTF-16 surrogate pair, does that affect decoding?
* For now let's assume it does not.
*/
c = _decodeEscaped();
} else if (i <= endChar) {
if (i == endChar) {
break;
}
if (i < INT_SPACE) {
_throwUnquotedSpace(i, "name");
}
}
}
hash = (hash * CharsToNameCanonicalizer.HASH_MULT) + c;
// Ok, let's add char to output:
outBuf[outPtr++] = c;
// Need more room?
if (outPtr >= outBuf.length) {
outBuf = _textBuffer.finishCurrentSegment();
outPtr = 0;
}
}
_textBuffer.setCurrentLength(outPtr);
{
final TextBuffer tb = _textBuffer;
final char[] buf = tb.getTextBuffer();
final int start = tb.getTextOffset();
return _symbols.findSymbol(buf, start, tb.size(), hash);
}
}
/**
* Method called when we see non-white space character other
* than double quote, when expecting an Object property name.
* In standard mode will just throw an expection; but
* in non-standard modes may be able to parse name.
*
* @param i First not-yet-decoded character of possible "odd name" to decode
*
* @return Name decoded, if allowed and successful
*
* @throws JacksonIOException for low-level read issues
* @throws StreamReadException for decoding problems
*/
protected String _handleOddName(int i) throws JacksonException
{
// Allow single quotes?
if (i == '\'' && isEnabled(JsonReadFeature.ALLOW_SINGLE_QUOTES)) {
return _parseAposName();
}
// Allow unquoted names if feature enabled:
if (!isEnabled(JsonReadFeature.ALLOW_UNQUOTED_PROPERTY_NAMES)) {
_reportUnexpectedChar(i, "was expecting double-quote to start property name");
}
final int[] codes = CharTypes.getInputCodeLatin1JsNames();
final int maxCode = codes.length;
// Also: first char must be a valid name char, but NOT be number
boolean firstOk;
if (i < maxCode) { // identifier, or a number ([jackson-core#102])
firstOk = (codes[i] == 0);
} else {
firstOk = Character.isJavaIdentifierPart((char) i);
}
if (!firstOk) {
_reportUnexpectedChar(i, "was expecting either valid name character (for unquoted name) or double-quote (for quoted) to start property name");
}
int ptr = _inputPtr;
int hash = _hashSeed;
final int inputLen = _inputEnd;
if (ptr < inputLen) {
do {
int ch = _inputBuffer[ptr];
if (ch < maxCode) {
if (codes[ch] != 0) {
final int start = _inputPtr-1; // -1 to bring back first char
_inputPtr = ptr;
return _symbols.findSymbol(_inputBuffer, start, ptr - start, hash);
}
} else if (!Character.isJavaIdentifierPart((char) ch)) {
final int start = _inputPtr-1; // -1 to bring back first char
_inputPtr = ptr;
return _symbols.findSymbol(_inputBuffer, start, ptr - start, hash);
}
hash = (hash * CharsToNameCanonicalizer.HASH_MULT) + ch;
++ptr;
} while (ptr < inputLen);
}
int start = _inputPtr-1;
_inputPtr = ptr;
return _handleOddName2(start, hash, codes);
}
protected String _parseAposName() throws JacksonException
{
// Note: mostly copy of _parseName()
int ptr = _inputPtr;
int hash = _hashSeed;
final int inputLen = _inputEnd;
if (ptr < inputLen) {
final int[] codes = _icLatin1;
final int maxCode = codes.length;
do {
int ch = _inputBuffer[ptr];
if (ch == '\'') {
int start = _inputPtr;
_inputPtr = ptr+1; // to skip the quote
return _symbols.findSymbol(_inputBuffer, start, ptr - start, hash);
}
if (ch < maxCode && codes[ch] != 0) {
break;
}
hash = (hash * CharsToNameCanonicalizer.HASH_MULT) + ch;
++ptr;
} while (ptr < inputLen);
}
int start = _inputPtr;
_inputPtr = ptr;
return _parseName2(start, hash, '\'');
}
/**
* Method for handling cases where first non-space character
* of an expected value token is not legal for standard JSON content.
*
* @param i First undecoded character of possible "odd value" to decode
*
* @return Type of value decoded, if allowed and successful
*
* @throws JacksonIOException for low-level read issues
* @throws StreamReadException for decoding problems
*/
protected JsonToken _handleOddValue(int i) throws JacksonException
{
// Most likely an error, unless we are to allow single-quote-strings
switch (i) {
case '\'':
/* Allow single quotes? Unlike with regular Strings, we'll eagerly parse
* contents; this so that there's no need to store information on quote char used.
* Also, no separation to fast/slow parsing; we'll just do
* one regular (~= slowish) parsing, to keep code simple
*/
if (isEnabled(JsonReadFeature.ALLOW_SINGLE_QUOTES)) {
return _handleApos();
}
break;
case ']':
// 28-Mar-2016: [core#116]: If Feature.ALLOW_MISSING_VALUES is enabled
// we may allow "missing values", that is, encountering a trailing
// comma or closing marker where value would be expected
if (!_streamReadContext.inArray()) {
break;
}
// fall through
case ',':
// 11-May-2020, tatu: [core#616] No commas in root level
if (!_streamReadContext.inRoot()) {
if ((_formatReadFeatures & FEAT_MASK_ALLOW_MISSING) != 0) {
--_inputPtr;
return JsonToken.VALUE_NULL;
}
}
break;
case 'N':
_matchToken("NaN", 1);
if (isEnabled(JsonReadFeature.ALLOW_NON_NUMERIC_NUMBERS)) {
return resetAsNaN("NaN", Double.NaN);
}
_reportError("Non-standard token 'NaN': enable `JsonReadFeature.ALLOW_NON_NUMERIC_NUMBERS` to allow");
break;
case 'I':
_matchToken("Infinity", 1);
if (isEnabled(JsonReadFeature.ALLOW_NON_NUMERIC_NUMBERS)) {
return resetAsNaN("Infinity", Double.POSITIVE_INFINITY);
}
_reportError("Non-standard token 'Infinity': enable `JsonReadFeature.ALLOW_NON_NUMERIC_NUMBERS` to allow");
break;
case '+': // note: '-' is taken as number
if (_inputPtr >= _inputEnd) {
if (!_loadMore()) {
_reportInvalidEOFInValue(JsonToken.VALUE_NUMBER_INT);
}
}
return _handleInvalidNumberStart(_inputBuffer[_inputPtr++], false, true);
}
// [core#77] Try to decode most likely token
if (Character.isJavaIdentifierStart(i)) {
_reportInvalidToken(""+((char) i), _validJsonTokenList());
}
// but if it doesn't look like a token:
_reportUnexpectedChar(i, "expected a valid value "+_validJsonValueList());
return null;
}
protected JsonToken _handleApos() throws JacksonException
{
char[] outBuf = _textBuffer.emptyAndGetCurrentSegment();
int outPtr = _textBuffer.getCurrentSegmentSize();
while (true) {
if (_inputPtr >= _inputEnd) {
if (!_loadMore()) {
_reportInvalidEOF(": was expecting closing quote for a string value",
JsonToken.VALUE_STRING);
}
}
char c = _inputBuffer[_inputPtr++];
int i = c;
if (i <= '\\') {
if (i == '\\') {
// Although chars outside of BMP are to be escaped as
// an UTF-16 surrogate pair, does that affect decoding?
// For now let's assume it does not.
c = _decodeEscaped();
} else if (i <= '\'') {
if (i == '\'') {
break;
}
if (i < INT_SPACE) {
_throwUnquotedSpace(i, "string value");
}
}
}
// Need more room?
if (outPtr >= outBuf.length) {
outBuf = _textBuffer.finishCurrentSegment();
outPtr = 0;
}
// Ok, let's add char to output:
outBuf[outPtr++] = c;
}
_textBuffer.setCurrentLength(outPtr);
return JsonToken.VALUE_STRING;
}
private String _handleOddName2(int startPtr, int hash, int[] codes) throws JacksonException
{
_textBuffer.resetWithShared(_inputBuffer, startPtr, _inputPtr - startPtr);
char[] outBuf = _textBuffer.getCurrentSegment();
int outPtr = _textBuffer.getCurrentSegmentSize();
final int maxCode = codes.length;
while (true) {
if (_inputPtr >= _inputEnd) {
if (!_loadMore()) { // acceptable for now (will error out later)
break;
}
}
char c = _inputBuffer[_inputPtr];
int i = c;
if (i < maxCode) {
if (codes[i] != 0) {
break;
}
} else if (!Character.isJavaIdentifierPart(c)) {
break;
}
++_inputPtr;
hash = (hash * CharsToNameCanonicalizer.HASH_MULT) + i;
// Ok, let's add char to output:
outBuf[outPtr++] = c;
// Need more room?
if (outPtr >= outBuf.length) {
outBuf = _textBuffer.finishCurrentSegment();
outPtr = 0;
}
}
_textBuffer.setCurrentLength(outPtr);
{
final TextBuffer tb = _textBuffer;
final char[] buf = tb.getTextBuffer();
final int start = tb.getTextOffset();
return _symbols.findSymbol(buf, start, tb.size(), hash);
}
}
protected final void _finishString() throws JacksonException
{
/* First: let's try to see if we have simple String value: one
* that does not cross input buffer boundary, and does not
* contain escape sequences.
*/
int ptr = _inputPtr;
final int inputLen = _inputEnd;
if (ptr < inputLen) {
final int[] codes = _icLatin1;
final int maxCode = codes.length;
do {
int ch = _inputBuffer[ptr];
if (ch < maxCode && codes[ch] != 0) {
if (ch == '"') {
_textBuffer.resetWithShared(_inputBuffer, _inputPtr, (ptr-_inputPtr));
_inputPtr = ptr+1;
// Yes, we got it all
return;
}
break;
}
++ptr;
} while (ptr < inputLen);
}
// Either ran out of input, or bumped into an escape sequence...
_textBuffer.resetWithCopy(_inputBuffer, _inputPtr, (ptr-_inputPtr));
_inputPtr = ptr;
_finishString2();
}
protected void _finishString2() throws JacksonException
{
char[] outBuf = _textBuffer.getCurrentSegment();
int outPtr = _textBuffer.getCurrentSegmentSize();
final int[] codes = _icLatin1;
final int maxCode = codes.length;
while (true) {
if (_inputPtr >= _inputEnd) {
if (!_loadMore()) {
_reportInvalidEOF(": was expecting closing quote for a string value",
JsonToken.VALUE_STRING);
}
}
char c = _inputBuffer[_inputPtr++];
int i = c;
if (i < maxCode && codes[i] != 0) {
if (i == INT_QUOTE) {
break;
} else if (i == INT_BACKSLASH) {
/* Although chars outside of BMP are to be escaped as
* an UTF-16 surrogate pair, does that affect decoding?
* For now let's assume it does not.
*/
c = _decodeEscaped();
} else if (i < INT_SPACE) {
_throwUnquotedSpace(i, "string value");
} // anything else?
}
// Need more room?
if (outPtr >= outBuf.length) {
outBuf = _textBuffer.finishCurrentSegment();
outPtr = 0;
}
// Ok, let's add char to output:
outBuf[outPtr++] = c;
}
_textBuffer.setCurrentLength(outPtr);
}
/**
* Method called to skim through rest of unparsed String value,
* if it is not needed. This can be done bit faster if contents
* need not be stored for future access.
*
* @throws JacksonIOException for low-level read issues
* @throws StreamReadException for decoding problems
*/
protected final void _skipString() throws JacksonException
{
_tokenIncomplete = false;
int inPtr = _inputPtr;
int inLen = _inputEnd;
char[] inBuf = _inputBuffer;
while (true) {
if (inPtr >= inLen) {
_inputPtr = inPtr;
if (!_loadMore()) {
_reportInvalidEOF(": was expecting closing quote for a string value",
JsonToken.VALUE_STRING);
}
inPtr = _inputPtr;
inLen = _inputEnd;
}
char c = inBuf[inPtr++];
int i = c;
if (i <= INT_BACKSLASH) {
if (i == INT_BACKSLASH) {
// Although chars outside of BMP are to be escaped as an UTF-16 surrogate pair,
// does that affect decoding? For now let's assume it does not.
_inputPtr = inPtr;
/*c = */ _decodeEscaped();
inPtr = _inputPtr;
inLen = _inputEnd;
} else if (i <= INT_QUOTE) {
if (i == INT_QUOTE) {
_inputPtr = inPtr;
break;
}
if (i < INT_SPACE) {
_inputPtr = inPtr;
_throwUnquotedSpace(i, "string value");
}
}
}
}
}
/*
/**********************************************************************
/* Internal methods, other parsing
/**********************************************************************
*/
// We actually need to check the character value here
// (to see if we have \n following \r).
protected final void _skipCR() throws JacksonException {
if (_inputPtr < _inputEnd || _loadMore()) {
if (_inputBuffer[_inputPtr] == '\n') {
++_inputPtr;
}
}
++_currInputRow;
_currInputRowStart = _inputPtr;
}
private final int _skipColon() throws JacksonException
{
if ((_inputPtr + 4) >= _inputEnd) {
return _skipColon2(false);
}
char c = _inputBuffer[_inputPtr];
if (c == ':') { // common case, no leading space
int i = _inputBuffer[++_inputPtr];
if (i > INT_SPACE) { // nor trailing
if (i == INT_SLASH || i == INT_HASH) {
return _skipColon2(true);
}
++_inputPtr;
return i;
}
if (i == INT_SPACE || i == INT_TAB) {
i = _inputBuffer[++_inputPtr];
if (i > INT_SPACE) {
if (i == INT_SLASH || i == INT_HASH) {
return _skipColon2(true);
}
++_inputPtr;
return i;
}
}
return _skipColon2(true); // true -> skipped colon
}
if (c == ' ' || c == '\t') {
c = _inputBuffer[++_inputPtr];
}
if (c == ':') {
int i = _inputBuffer[++_inputPtr];
if (i > INT_SPACE) {
if (i == INT_SLASH || i == INT_HASH) {
return _skipColon2(true);
}
++_inputPtr;
return i;
}
if (i == INT_SPACE || i == INT_TAB) {
i = _inputBuffer[++_inputPtr];
if (i > INT_SPACE) {
if (i == INT_SLASH || i == INT_HASH) {
return _skipColon2(true);
}
++_inputPtr;
return i;
}
}
return _skipColon2(true);
}
return _skipColon2(false);
}
private final int _skipColon2(boolean gotColon) throws JacksonException
{
while (_inputPtr < _inputEnd || _loadMore()) {
int i = _inputBuffer[_inputPtr++];
if (i > INT_SPACE) {
if (i == INT_SLASH) {
_skipComment();
continue;
}
if (i == INT_HASH) {
if (_skipYAMLComment()) {
continue;
}
}
if (gotColon) {
return i;
}
if (i != INT_COLON) {
_reportUnexpectedChar(i, "was expecting a colon to separate property name and value");
}
gotColon = true;
continue;
}
if (i < INT_SPACE) {
if (i == INT_LF) {
++_currInputRow;
_currInputRowStart = _inputPtr;
} else if (i == INT_CR) {
_skipCR();
} else if (i != INT_TAB) {
_reportInvalidSpace(i);
}
}
}
_reportInvalidEOF(" within/between "+_streamReadContext.typeDesc()+" entries",
null);
return -1;
}
// Variant called when we know there's at least 4 more bytes available
private final int _skipColonFast(int ptr) throws JacksonException
{
int i = _inputBuffer[ptr++];
if (i == INT_COLON) { // common case, no leading space
i = _inputBuffer[ptr++];
if (i > INT_SPACE) { // nor trailing
if (i != INT_SLASH && i != INT_HASH) {
_inputPtr = ptr;
return i;
}
} else if (i == INT_SPACE || i == INT_TAB) {
i = _inputBuffer[ptr++];
if (i > INT_SPACE) {
if (i != INT_SLASH && i != INT_HASH) {
_inputPtr = ptr;
return i;
}
}
}
_inputPtr = ptr-1;
return _skipColon2(true); // true -> skipped colon
}
if (i == INT_SPACE || i == INT_TAB) {
i = _inputBuffer[ptr++];
}
boolean gotColon = (i == INT_COLON);
if (gotColon) {
i = _inputBuffer[ptr++];
if (i > INT_SPACE) {
if (i != INT_SLASH && i != INT_HASH) {
_inputPtr = ptr;
return i;
}
} else if (i == INT_SPACE || i == INT_TAB) {
i = _inputBuffer[ptr++];
if (i > INT_SPACE) {
if (i != INT_SLASH && i != INT_HASH) {
_inputPtr = ptr;
return i;
}
}
}
}
_inputPtr = ptr-1;
return _skipColon2(gotColon);
}
// Primary loop: no reloading, comment handling
private final int _skipComma(int i) throws JacksonException
{
if (i != INT_COMMA) {
_reportUnexpectedChar(i, "was expecting comma to separate "+_streamReadContext.typeDesc()+" entries");
}
while (_inputPtr < _inputEnd) {
i = _inputBuffer[_inputPtr++];
if (i > INT_SPACE) {
if (i == INT_SLASH || i == INT_HASH) {
--_inputPtr;
return _skipAfterComma2();
}
return i;
}
if (i < INT_SPACE) {
if (i == INT_LF) {
++_currInputRow;
_currInputRowStart = _inputPtr;
} else if (i == INT_CR) {
_skipCR();
} else if (i != INT_TAB) {
_reportInvalidSpace(i);
}
}
}
return _skipAfterComma2();
}
private final int _skipAfterComma2() throws JacksonException
{
while (_inputPtr < _inputEnd || _loadMore()) {
int i = _inputBuffer[_inputPtr++];
if (i > INT_SPACE) {
if (i == INT_SLASH) {
_skipComment();
continue;
}
if (i == INT_HASH) {
if (_skipYAMLComment()) {
continue;
}
}
return i;
}
if (i < INT_SPACE) {
if (i == INT_LF) {
++_currInputRow;
_currInputRowStart = _inputPtr;
} else if (i == INT_CR) {
_skipCR();
} else if (i != INT_TAB) {
_reportInvalidSpace(i);
}
}
}
throw _constructReadException("Unexpected end-of-input within/between "+_streamReadContext.typeDesc()+" entries");
}
private final int _skipWSOrEnd() throws JacksonException
{
// Let's handle first character separately since it is likely that
// it is either non-whitespace; or we have longer run of white space
if (_inputPtr >= _inputEnd) {
if (!_loadMore()) {
return _eofAsNextChar();
}
}
int i = _inputBuffer[_inputPtr++];
if (i > INT_SPACE) {
if (i == INT_SLASH || i == INT_HASH) {
--_inputPtr;
return _skipWSOrEnd2();
}
return i;
}
if (i != INT_SPACE) {
if (i == INT_LF) {
++_currInputRow;
_currInputRowStart = _inputPtr;
} else if (i == INT_CR) {
_skipCR();
} else if (i != INT_TAB && !_isAllowedCtrlCharRS(i)) {
_reportInvalidSpace(i);
}
}
while (_inputPtr < _inputEnd) {
i = _inputBuffer[_inputPtr++];
if (i > INT_SPACE) {
if (i == INT_SLASH || i == INT_HASH) {
--_inputPtr;
return _skipWSOrEnd2();
}
return i;
}
if (i != INT_SPACE) {
if (i == INT_LF) {
++_currInputRow;
_currInputRowStart = _inputPtr;
} else if (i == INT_CR) {
_skipCR();
} else if (i != INT_TAB && !_isAllowedCtrlCharRS(i)) {
_reportInvalidSpace(i);
}
}
}
return _skipWSOrEnd2();
}
private int _skipWSOrEnd2() throws JacksonException
{
while (true) {
if (_inputPtr >= _inputEnd) {
if (!_loadMore()) { // We ran out of input...
return _eofAsNextChar();
}
}
int i = _inputBuffer[_inputPtr++];
if (i > INT_SPACE) {
if (i == INT_SLASH) {
_skipComment();
continue;
}
if (i == INT_HASH) {
if (_skipYAMLComment()) {
continue;
}
}
return i;
} else if (i != INT_SPACE) {
if (i == INT_LF) {
++_currInputRow;
_currInputRowStart = _inputPtr;
} else if (i == INT_CR) {
_skipCR();
} else if (i != INT_TAB) {
_reportInvalidSpace(i);
}
}
}
}
private void _skipComment() throws JacksonException
{
if (!isEnabled(JsonReadFeature.ALLOW_JAVA_COMMENTS)) {
_reportUnexpectedChar('/', "maybe a (non-standard) comment? (not recognized as one since Feature 'ALLOW_COMMENTS' not enabled for parser)");
}
// First: check which comment (if either) it is:
if (_inputPtr >= _inputEnd && !_loadMore()) {
_reportInvalidEOF(" in a comment", null);
}
char c = _inputBuffer[_inputPtr++];
if (c == '/') {
_skipLine();
} else if (c == '*') {
_skipCComment();
} else {
_reportUnexpectedChar(c, "was expecting either '*' or '/' for a comment");
}
}
private void _skipCComment() throws JacksonException
{
// Ok: need the matching '*/'
while ((_inputPtr < _inputEnd) || _loadMore()) {
int i = _inputBuffer[_inputPtr++];
if (i <= '*') {
if (i == '*') { // end?
if ((_inputPtr >= _inputEnd) && !_loadMore()) {
break;
}
if (_inputBuffer[_inputPtr] == INT_SLASH) {
++_inputPtr;
return;
}
continue;
}
if (i < INT_SPACE) {
if (i == INT_LF) {
++_currInputRow;
_currInputRowStart = _inputPtr;
} else if (i == INT_CR) {
_skipCR();
} else if (i != INT_TAB) {
_reportInvalidSpace(i);
}
}
}
}
_reportInvalidEOF(" in a comment", null);
}
private boolean _skipYAMLComment() throws JacksonException
{
if (!isEnabled(JsonReadFeature.ALLOW_YAML_COMMENTS)) {
return false;
}
_skipLine();
return true;
}
private void _skipLine() throws JacksonException
{
// Ok: need to find EOF or linefeed
while ((_inputPtr < _inputEnd) || _loadMore()) {
int i = _inputBuffer[_inputPtr++];
if (i < INT_SPACE) {
if (i == INT_LF) {
++_currInputRow;
_currInputRowStart = _inputPtr;
break;
} else if (i == INT_CR) {
_skipCR();
break;
} else if (i != INT_TAB) {
_reportInvalidSpace(i);
}
}
}
}
@Override
protected char _decodeEscaped() throws JacksonException
{
if (_inputPtr >= _inputEnd) {
if (!_loadMore()) {
_reportInvalidEOF(" in character escape sequence", JsonToken.VALUE_STRING);
}
}
char c = _inputBuffer[_inputPtr++];
switch (c) {
// First, ones that are mapped
case 'b':
return '\b';
case 't':
return '\t';
case 'n':
return '\n';
case 'f':
return '\f';
case 'r':
return '\r';
// And these are to be returned as they are
case '"':
case '/':
case '\\':
return c;
case 'u': // and finally hex-escaped
break;
default:
return _handleUnrecognizedCharacterEscape(c);
}
// Ok, a hex escape. Need 4 characters
int value = 0;
for (int i = 0; i < 4; ++i) {
if (_inputPtr >= _inputEnd) {
if (!_loadMore()) {
_reportInvalidEOF(" in character escape sequence", JsonToken.VALUE_STRING);
}
}
int ch = _inputBuffer[_inputPtr++];
int digit = CharTypes.charToHex(ch);
if (digit < 0) {
_reportUnexpectedChar(ch, "expected a hex-digit for character escape sequence");
}
value = (value << 4) | digit;
}
return (char) value;
}
private final void _matchTrue() throws JacksonException {
int ptr = _inputPtr;
if ((ptr + 3) < _inputEnd) {
final char[] b = _inputBuffer;
if (b[ptr] == 'r' && b[++ptr] == 'u' && b[++ptr] == 'e') {
char c = b[++ptr];
if (c < '0' || c == ']' || c == '}') { // expected/allowed chars
_inputPtr = ptr;
return;
}
}
}
// buffer boundary, or problem, offline
_matchToken("true", 1);
}
private final void _matchFalse() throws JacksonException {
int ptr = _inputPtr;
if ((ptr + 4) < _inputEnd) {
final char[] b = _inputBuffer;
if (b[ptr] == 'a' && b[++ptr] == 'l' && b[++ptr] == 's' && b[++ptr] == 'e') {
char c = b[++ptr];
if (c < '0' || c == ']' || c == '}') { // expected/allowed chars
_inputPtr = ptr;
return;
}
}
}
// buffer boundary, or problem, offline
_matchToken("false", 1);
}
private final void _matchNull() throws JacksonException {
int ptr = _inputPtr;
if ((ptr + 3) < _inputEnd) {
final char[] b = _inputBuffer;
if (b[ptr] == 'u' && b[++ptr] == 'l' && b[++ptr] == 'l') {
char c = b[++ptr];
if (c < '0' || c == ']' || c == '}') { // expected/allowed chars
_inputPtr = ptr;
return;
}
}
}
// buffer boundary, or problem, offline
_matchToken("null", 1);
}
// Helper method for checking whether input matches expected token
protected final void _matchToken(String matchStr, int i) throws JacksonException
{
final int len = matchStr.length();
if ((_inputPtr + len) >= _inputEnd) {
_matchToken2(matchStr, i);
return;
}
do {
if (_inputBuffer[_inputPtr] != matchStr.charAt(i)) {
_reportInvalidToken(matchStr.substring(0, i));
}
++_inputPtr;
} while (++i < len);
int ch = _inputBuffer[_inputPtr];
if (ch >= '0' && ch != ']' && ch != '}') { // expected/allowed chars
_checkMatchEnd(matchStr, i, ch);
}
}
private final void _matchToken2(String matchStr, int i) throws JacksonException
{
final int len = matchStr.length();
do {
if (((_inputPtr >= _inputEnd) && !_loadMore())
|| (_inputBuffer[_inputPtr] != matchStr.charAt(i))) {
_reportInvalidToken(matchStr.substring(0, i));
}
++_inputPtr;
} while (++i < len);
// but let's also ensure we either get EOF, or non-alphanum char...
if (_inputPtr >= _inputEnd && !_loadMore()) {
return;
}
int ch = _inputBuffer[_inputPtr];
if (ch >= '0' && ch != ']' && ch != '}') { // expected/allowed chars
_checkMatchEnd(matchStr, i, ch);
}
}
private final void _checkMatchEnd(String matchStr, int i, int c) throws JacksonException {
// but actually only alphanums are problematic
char ch = (char) c;
if (Character.isJavaIdentifierPart(ch)) {
_reportInvalidToken(matchStr.substring(0, i));
}
}
/*
/**********************************************************************
/* Binary access
/**********************************************************************
*/
/**
* Efficient handling for incremental parsing of base64-encoded
* textual content.
*
* @param b64variant Type of base64 encoding expected in context
*
* @return Fully decoded value of base64 content
*
* @throws JacksonIOException for low-level read issues
* @throws StreamReadException for decoding problems
*/
@SuppressWarnings("resource")
protected byte[] _decodeBase64(Base64Variant b64variant) throws JacksonException
{
ByteArrayBuilder builder = _getByteArrayBuilder();
//main_loop:
while (true) {
// first, we'll skip preceding white space, if any
char ch;
do {
if (_inputPtr >= _inputEnd) {
_loadMoreGuaranteed();
}
ch = _inputBuffer[_inputPtr++];
} while (ch <= INT_SPACE);
int bits = b64variant.decodeBase64Char(ch);
if (bits < 0) {
if (ch == '"') { // reached the end, fair and square?
return builder.toByteArray();
}
bits = _decodeBase64Escape(b64variant, ch, 0);
if (bits < 0) { // white space to skip
continue;
}
}
int decodedData = bits;
// then second base64 char; can't get padding yet, nor ws
if (_inputPtr >= _inputEnd) {
_loadMoreGuaranteed();
}
ch = _inputBuffer[_inputPtr++];
bits = b64variant.decodeBase64Char(ch);
if (bits < 0) {
bits = _decodeBase64Escape(b64variant, ch, 1);
}
decodedData = (decodedData << 6) | bits;
// third base64 char; can be padding, but not ws
if (_inputPtr >= _inputEnd) {
_loadMoreGuaranteed();
}
ch = _inputBuffer[_inputPtr++];
bits = b64variant.decodeBase64Char(ch);
// First branch: can get padding (-> 1 byte)
if (bits < 0) {
if (bits != Base64Variant.BASE64_VALUE_PADDING) {
// as per [JACKSON-631], could also just be 'missing' padding
if (ch == '"') {
decodedData >>= 4;
builder.append(decodedData);
if (b64variant.requiresPaddingOnRead()) {
--_inputPtr; // to keep parser state bit more consistent
_handleBase64MissingPadding(b64variant);
}
return builder.toByteArray();
}
bits = _decodeBase64Escape(b64variant, ch, 2);
}
if (bits == Base64Variant.BASE64_VALUE_PADDING) {
// Ok, must get more padding chars, then
if (_inputPtr >= _inputEnd) {
_loadMoreGuaranteed();
}
ch = _inputBuffer[_inputPtr++];
if (!b64variant.usesPaddingChar(ch)) {
if (_decodeBase64Escape(b64variant, ch, 3) != Base64Variant.BASE64_VALUE_PADDING) {
_reportInvalidBase64Char(b64variant, ch, 3, "expected padding character '"+b64variant.getPaddingChar()+"'");
}
}
// Got 12 bits, only need 8, need to shift
decodedData >>= 4;
builder.append(decodedData);
continue;
}
// otherwise we got escaped other char, to be processed below
}
// Nope, 2 or 3 bytes
decodedData = (decodedData << 6) | bits;
// fourth and last base64 char; can be padding, but not ws
if (_inputPtr >= _inputEnd) {
_loadMoreGuaranteed();
}
ch = _inputBuffer[_inputPtr++];
bits = b64variant.decodeBase64Char(ch);
if (bits < 0) {
if (bits != Base64Variant.BASE64_VALUE_PADDING) {
// as per [JACKSON-631], could also just be 'missing' padding
if (ch == '"') {
decodedData >>= 2;
builder.appendTwoBytes(decodedData);
if (b64variant.requiresPaddingOnRead()) {
--_inputPtr; // to keep parser state bit more consistent
_handleBase64MissingPadding(b64variant);
}
return builder.toByteArray();
}
bits = _decodeBase64Escape(b64variant, ch, 3);
}
if (bits == Base64Variant.BASE64_VALUE_PADDING) {
// With padding we only get 2 bytes; but we have
// to shift it a bit so it is identical to triplet
// case with partial output.
// 3 chars gives 3x6 == 18 bits, of which 2 are
// dummies, need to discard:
decodedData >>= 2;
builder.appendTwoBytes(decodedData);
continue;
}
// otherwise we got escaped other char, to be processed below
}
// otherwise, our triplet is now complete
decodedData = (decodedData << 6) | bits;
builder.appendThreeBytes(decodedData);
}
}
/*
/**********************************************************************
/* Internal methods, location updating
/**********************************************************************
*/
@Override
public TokenStreamLocation currentTokenLocation()
{
if (_currToken == JsonToken.PROPERTY_NAME) {
long total = _currInputProcessed + (_nameStartOffset-1);
return new TokenStreamLocation(_contentReference(),
-1L, total, _nameStartRow, _nameStartCol);
}
return new TokenStreamLocation(_contentReference(),
-1L, _tokenInputTotal-1, _tokenInputRow, _tokenInputCol);
}
@Override
public TokenStreamLocation currentLocation() {
final int col = _inputPtr - _currInputRowStart + 1; // 1-based
return new TokenStreamLocation(_contentReference(),
-1L, _currInputProcessed + _inputPtr,
_currInputRow, col);
}
@Override // @since 2.17
protected TokenStreamLocation _currentLocationMinusOne() {
final int prevInputPtr = _inputPtr - 1;
final int col = prevInputPtr - _currInputRowStart + 1; // 1-based
return new TokenStreamLocation(_contentReference(),
-1L, _currInputProcessed + prevInputPtr,
_currInputRow, col);
}
private final void _updateLocation()
{
int ptr = _inputPtr;
_tokenInputTotal = _currInputProcessed + ptr;
_tokenInputRow = _currInputRow;
_tokenInputCol = ptr - _currInputRowStart;
}
// @since 2.7
private final void _updateNameLocation()
{
int ptr = _inputPtr;
_nameStartOffset = ptr;
_nameStartRow = _currInputRow;
_nameStartCol = ptr - _currInputRowStart;
}
/*
/**********************************************************************
/* Error reporting
/**********************************************************************
*/
protected void _reportInvalidToken(String matchedPart) throws JacksonException {
_reportInvalidToken(matchedPart, _validJsonTokenList());
}
protected void _reportInvalidToken(String matchedPart, String msg) throws JacksonException
{
/* Let's just try to find what appears to be the token, using
* regular Java identifier character rules. It's just a heuristic,
* nothing fancy here.
*/
// [core#1180]: Construct JsonLocation at token start BEFORE _loadMore() may change buffer state
final int tokenStartPtr = _inputPtr - matchedPart.length();
final int col = tokenStartPtr - _currInputRowStart + 1; // 1-based
final TokenStreamLocation loc = new TokenStreamLocation(_contentReference(),
-1L, _currInputProcessed + tokenStartPtr,
_currInputRow, col);
StringBuilder sb = new StringBuilder(matchedPart);
while ((_inputPtr < _inputEnd) || _loadMore()) {
char c = _inputBuffer[_inputPtr];
if (!Character.isJavaIdentifierPart(c)) {
break;
}
++_inputPtr;
sb.append(c);
if (sb.length() >= _ioContext.errorReportConfiguration().getMaxErrorTokenLength()) {
sb.append("...");
break;
}
}
final String fullMsg = String.format("Unrecognized token '%s': was expecting %s", sb, msg);
throw _constructReadException(fullMsg, loc);
}
/*
/**********************************************************************
/* Internal methods, other
/**********************************************************************
*/
private void _closeScope(int i) throws StreamReadException
{
if (i == INT_RBRACKET) {
_updateLocation();
if (!_streamReadContext.inArray()) {
_reportMismatchedEndMarker(i, '}');
}
_streamReadContext = _streamReadContext.clearAndGetParent();
_updateToken(JsonToken.END_ARRAY);
}
if (i == INT_RCURLY) {
_updateLocation();
if (!_streamReadContext.inObject()) {
_reportMismatchedEndMarker(i, ']');
}
_streamReadContext = _streamReadContext.clearAndGetParent();
_updateToken(JsonToken.END_OBJECT);
}
}
}
| ReaderBasedJsonParser |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/handler/InvocationNotifierHandler.java | {
"start": 625,
"end": 2578
} | class ____<T> implements MockHandler<T> {
private final List<InvocationListener> invocationListeners;
private final MockHandler<T> mockHandler;
public InvocationNotifierHandler(MockHandler<T> mockHandler, MockCreationSettings<T> settings) {
this.mockHandler = mockHandler;
this.invocationListeners = settings.getInvocationListeners();
}
@Override
public Object handle(Invocation invocation) throws Throwable {
try {
Object returnedValue = mockHandler.handle(invocation);
notifyMethodCall(invocation, returnedValue);
return returnedValue;
} catch (Throwable t) {
notifyMethodCallException(invocation, t);
throw t;
}
}
private void notifyMethodCall(Invocation invocation, Object returnValue) {
for (InvocationListener listener : invocationListeners) {
try {
listener.reportInvocation(
new NotifiedMethodInvocationReport(invocation, returnValue));
} catch (Throwable listenerThrowable) {
throw invocationListenerThrewException(listener, listenerThrowable);
}
}
}
private void notifyMethodCallException(Invocation invocation, Throwable exception) {
for (InvocationListener listener : invocationListeners) {
try {
listener.reportInvocation(
new NotifiedMethodInvocationReport(invocation, exception));
} catch (Throwable listenerThrowable) {
throw invocationListenerThrewException(listener, listenerThrowable);
}
}
}
@Override
public MockCreationSettings<T> getMockSettings() {
return mockHandler.getMockSettings();
}
@Override
public InvocationContainer getInvocationContainer() {
return mockHandler.getInvocationContainer();
}
}
| InvocationNotifierHandler |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java | {
"start": 9044,
"end": 14480
} | class ____ implements FallbackSyntheticSourceBlockLoader.Reader<BytesRef> {
private final Function<List<T>, List<Object>> formatter;
private GeometriesFallbackSyntheticSourceReader() {
this.formatter = getFormatter(GeometryFormatterFactory.WKB);
}
@Override
public void convertValue(Object value, List<BytesRef> accumulator) {
final List<T> values = new ArrayList<>();
geometryParser.fetchFromSource(value, v -> {
if (v != null) {
values.add(v);
} else if (nullValue != null) {
values.add(nullValue);
}
});
var formatted = formatter.apply(values);
for (var formattedValue : formatted) {
if (formattedValue instanceof byte[] wkb) {
accumulator.add(new BytesRef(wkb));
} else {
throw new IllegalArgumentException(
"Unsupported source type for spatial geometry: " + formattedValue.getClass().getSimpleName()
);
}
}
}
@Override
public void parse(XContentParser parser, List<BytesRef> accumulator) throws IOException {
final List<T> values = new ArrayList<>();
geometryParser.parseFromSource(parser, v -> {
if (v != null) {
values.add(v);
} else if (nullValue != null) {
values.add(nullValue);
}
});
var formatted = formatter.apply(values);
for (var formattedValue : formatted) {
if (formattedValue instanceof byte[] wkb) {
accumulator.add(new BytesRef(wkb));
} else {
throw new IllegalArgumentException(
"Unsupported source type for spatial geometry: " + formattedValue.getClass().getSimpleName()
);
}
}
}
@Override
public void writeToBlock(List<BytesRef> values, BlockLoader.Builder blockBuilder) {
var bytesRefBuilder = (BlockLoader.BytesRefBuilder) blockBuilder;
for (var value : values) {
bytesRefBuilder.appendBytesRef(value);
}
}
}
}
private final Explicit<Boolean> ignoreMalformed;
private final Explicit<Boolean> ignoreZValue;
private final Parser<T> parser;
protected AbstractGeometryFieldMapper(
String simpleName,
MappedFieldType mappedFieldType,
BuilderParams builderParams,
Explicit<Boolean> ignoreMalformed,
Explicit<Boolean> ignoreZValue,
Parser<T> parser
) {
super(simpleName, mappedFieldType, builderParams);
this.ignoreMalformed = ignoreMalformed;
this.ignoreZValue = ignoreZValue;
this.parser = parser;
}
@Override
@SuppressWarnings("unchecked")
public AbstractGeometryFieldType<T> fieldType() {
return (AbstractGeometryFieldType<T>) mappedFieldType;
}
@Override
protected void parseCreateField(DocumentParserContext context) throws IOException {
throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called");
}
/**
* Build an index document using a parsed geometry
* @param context the ParseContext holding the document
* @param geometry the parsed geometry object
*/
protected abstract void index(DocumentParserContext context, T geometry) throws IOException;
@Override
protected boolean supportsParsingObject() {
return true;
}
@Override
public final void parse(DocumentParserContext context) throws IOException {
if (builderParams.hasScript()) {
throw new DocumentParsingException(
context.parser().getTokenLocation(),
"failed to parse field [" + fieldType().name() + "] of type + " + contentType() + "]",
new IllegalArgumentException("Cannot index data directly into a field with a [script] parameter")
);
}
parser.parse(context.parser(), v -> index(context, v), new DefaultMalformedValueHandler((e, b) -> onMalformedValue(context, b, e)));
}
protected void onMalformedValue(DocumentParserContext context, XContentBuilder malformedDataForSyntheticSource, Exception cause)
throws IOException {
if (ignoreMalformed()) {
context.addIgnoredField(fieldType().name());
} else {
throw new DocumentParsingException(
context.parser().getTokenLocation(),
"failed to parse field [" + fieldType().name() + "] of type [" + contentType() + "]",
cause
);
}
}
@Override
public boolean ignoreMalformed() {
return ignoreMalformed.value();
}
public boolean ignoreZValue() {
return ignoreZValue.value();
}
@Override
public final boolean parsesArrayValue() {
return true;
}
}
| GeometriesFallbackSyntheticSourceReader |
java | processing__processing4 | java/src/processing/mode/java/runner/MessageConsumer.java | {
"start": 1421,
"end": 1486
} | interface ____ {
public void message(String s);
}
| MessageConsumer |
java | apache__flink | flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/header/materializedtable/RefreshMaterializedTableHeaders.java | {
"start": 1898,
"end": 3776
} | class ____
implements SqlGatewayMessageHeaders<
RefreshMaterializedTableRequestBody,
RefreshMaterializedTableResponseBody,
RefreshMaterializedTableParameters> {
private static final RefreshMaterializedTableHeaders INSTANCE =
new RefreshMaterializedTableHeaders();
private static final String URL =
"/sessions/:"
+ SessionHandleIdPathParameter.KEY
+ "/materialized-tables/:"
+ MaterializedTableIdentifierPathParameter.KEY
+ "/refresh";
@Override
public HttpMethodWrapper getHttpMethod() {
return HttpMethodWrapper.POST;
}
@Override
public String getTargetRestEndpointURL() {
return URL;
}
@Override
public Class<RefreshMaterializedTableResponseBody> getResponseClass() {
return RefreshMaterializedTableResponseBody.class;
}
@Override
public HttpResponseStatus getResponseStatusCode() {
return HttpResponseStatus.OK;
}
@Override
public String getDescription() {
return "Refresh materialized table";
}
@Override
public Class<RefreshMaterializedTableRequestBody> getRequestClass() {
return RefreshMaterializedTableRequestBody.class;
}
@Override
public RefreshMaterializedTableParameters getUnresolvedMessageParameters() {
return new RefreshMaterializedTableParameters();
}
@Override
public Collection<? extends RestAPIVersion<?>> getSupportedAPIVersions() {
return Collections.singleton(SqlGatewayRestAPIVersion.V3);
}
public static RefreshMaterializedTableHeaders getInstance() {
return INSTANCE;
}
@Override
public String operationId() {
return "refreshMaterializedTable";
}
}
| RefreshMaterializedTableHeaders |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/internal/creation/proxy/ProxyMockMakerTest.java | {
"start": 422,
"end": 3673
} | class ____
extends AbstractMockMakerTest<ProxyMockMaker, ProxyMockMakerTest.SomeInterface> {
public ProxyMockMakerTest() {
super(new ProxyMockMaker(), SomeInterface.class);
}
@Test
public void should_create_mock_from_interface() {
SomeInterface proxy =
mockMaker.createMock(settingsFor(SomeInterface.class), dummyHandler());
Class<?> superClass = proxy.getClass().getSuperclass();
assertThat(superClass).isEqualTo(Proxy.class);
}
@Test
public void should_create_mock_from_interface_with_extra_interface() {
SomeInterface proxy =
mockMaker.createMock(
settingsFor(SomeInterface.class, Serializable.class), dummyHandler());
Class<?> superClass = proxy.getClass().getSuperclass();
assertThat(superClass).isEqualTo(Proxy.class);
assertThat(proxy).isInstanceOf(Serializable.class);
}
@Test
public void should_discover_mockable_input() {
assertThat(mockMaker.isTypeMockable(Number.class).mockable()).isFalse();
assertThat(mockMaker.isTypeMockable(Number.class).nonMockableReason())
.isEqualTo("non-interface");
assertThat(mockMaker.isTypeMockable(SomeInterface.class).mockable()).isTrue();
assertThat(mockMaker.isTypeMockable(Object.class).mockable()).isTrue();
}
@Test
public void can_compute_hash_code() throws Throwable {
SomeInterface proxy =
mockMaker.createMock(settingsFor(SomeInterface.class), dummyHandler());
InvocationHandler handler = Proxy.getInvocationHandler(proxy);
assertThat(handler.invoke(proxy, Object.class.getMethod("hashCode"), null))
.isEqualTo(System.identityHashCode(proxy));
}
@Test
public void can_compute_equality() throws Throwable {
SomeInterface proxy =
mockMaker.createMock(settingsFor(SomeInterface.class), dummyHandler());
InvocationHandler handler = Proxy.getInvocationHandler(proxy);
assertThat(
handler.invoke(
proxy,
Object.class.getMethod("equals", Object.class),
new Object[] {proxy}))
.isEqualTo(true);
assertThat(
handler.invoke(
proxy,
Object.class.getMethod("equals", Object.class),
new Object[] {null}))
.isEqualTo(false);
assertThat(
handler.invoke(
proxy,
Object.class.getMethod("equals", Object.class),
new Object[] {new Object()}))
.isEqualTo(false);
}
@Test
public void can_invoke_toString() throws Throwable {
SomeInterface proxy =
mockMaker.createMock(settingsFor(SomeInterface.class), dummyHandler());
InvocationHandler handler = Proxy.getInvocationHandler(proxy);
assertThat(handler.invoke(proxy, Object.class.getMethod("toString"), null)).isNull();
}
| ProxyMockMakerTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/GuardedByBinderTest.java | {
"start": 3837,
"end": 4245
} | class ____ extends Super {
final Object lock = new Object();
}
"""));
}
@Test
public void namedClass_this() {
assertThat(
bind(
"Test",
"Test.class",
forSourceLines(
"threadsafety/Test.java",
"""
package threadsafety;
| Test |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/AnnotationBeanNameGeneratorTests.java | {
"start": 7017,
"end": 7124
} | class ____ {
}
@Component("myComponent")
@Service("myService")
static | ComponentWithDuplicateIdenticalNames |
java | google__guava | android/guava/src/com/google/common/eventbus/Subscriber.java | {
"start": 1313,
"end": 4423
} | class ____ {
/** Creates a {@code Subscriber} for {@code method} on {@code listener}. */
static Subscriber create(EventBus bus, Object listener, Method method) {
return isDeclaredThreadSafe(method)
? new Subscriber(bus, listener, method)
: new SynchronizedSubscriber(bus, listener, method);
}
/** The event bus this subscriber belongs to. */
@Weak private final EventBus bus;
/** The object with the subscriber method. */
@VisibleForTesting final Object target;
/** Subscriber method. */
private final Method method;
/** Executor to use for dispatching events to this subscriber. */
private final Executor executor;
private Subscriber(EventBus bus, Object target, Method method) {
this.bus = bus;
this.target = checkNotNull(target);
this.method = method;
method.setAccessible(true);
this.executor = bus.executor();
}
/** Dispatches {@code event} to this subscriber using the proper executor. */
final void dispatchEvent(Object event) {
executor.execute(
() -> {
try {
invokeSubscriberMethod(event);
} catch (InvocationTargetException e) {
bus.handleSubscriberException(e.getCause(), context(event));
}
});
}
/**
* Invokes the subscriber method. This method can be overridden to make the invocation
* synchronized.
*/
@VisibleForTesting
void invokeSubscriberMethod(Object event) throws InvocationTargetException {
try {
method.invoke(target, checkNotNull(event));
} catch (IllegalArgumentException e) {
throw new Error("Method rejected target/argument: " + event, e);
} catch (IllegalAccessException e) {
throw new Error("Method became inaccessible: " + event, e);
} catch (InvocationTargetException e) {
if (e.getCause() instanceof Error) {
throw (Error) e.getCause();
}
throw e;
}
}
/** Gets the context for the given event. */
private SubscriberExceptionContext context(Object event) {
return new SubscriberExceptionContext(bus, event, target, method);
}
@Override
public final int hashCode() {
return (31 + method.hashCode()) * 31 + System.identityHashCode(target);
}
@Override
public final boolean equals(@Nullable Object obj) {
if (obj instanceof Subscriber) {
Subscriber that = (Subscriber) obj;
// Use == so that different equal instances will still receive events.
// We only guard against the case that the same object is registered
// multiple times
return target == that.target && method.equals(that.method);
}
return false;
}
/**
* Checks whether {@code method} is thread-safe, as indicated by the presence of the {@link
* AllowConcurrentEvents} annotation.
*/
private static boolean isDeclaredThreadSafe(Method method) {
return method.getAnnotation(AllowConcurrentEvents.class) != null;
}
/**
* Subscriber that synchronizes invocations of a method to ensure that only one thread may enter
* the method at a time.
*/
@VisibleForTesting
static final | Subscriber |
java | grpc__grpc-java | api/src/main/java/io/grpc/ManagedChannelProvider.java | {
"start": 1223,
"end": 1465
} | class ____. If
* exceptions may reasonably occur for implementation-specific reasons, implementations should
* generally handle the exception gracefully and return {@code false} from {@link #isAvailable()}.
*/
@Internal
public abstract | loading |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BulkDeleteUtils.java | {
"start": 1053,
"end": 2271
} | class ____ {
private BulkDeleteUtils() {
}
/**
* Preconditions for bulk delete paths.
* @param paths paths to delete.
* @param pageSize maximum number of paths to delete in a single operation.
* @param basePath base path for the delete operation.
*/
public static void validateBulkDeletePaths(Collection<Path> paths, int pageSize, Path basePath) {
requireNonNull(paths);
checkArgument(paths.size() <= pageSize,
"Number of paths (%d) is larger than the page size (%d)", paths.size(), pageSize);
paths.forEach(p -> {
checkArgument(p.isAbsolute(), "Path %s is not absolute", p);
checkArgument(validatePathIsUnderParent(p, basePath),
"Path %s is not under the base path %s", p, basePath);
});
}
/**
* Check if a given path is the base path or under the base path.
* @param p path to check.
* @param basePath base path.
* @return true if the given path is the base path or under the base path.
*/
public static boolean validatePathIsUnderParent(Path p, Path basePath) {
while (p != null) {
if (p.equals(basePath)) {
return true;
}
p = p.getParent();
}
return false;
}
}
| BulkDeleteUtils |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/EnumUtils.java | {
"start": 20270,
"end": 21022
} | enum ____.
* @since 3.2
*/
public static <E extends Enum<E>> EnumSet<E> processBitVectors(final Class<E> enumClass, final long... values) {
final EnumSet<E> results = EnumSet.noneOf(asEnum(enumClass));
final long[] lvalues = ArrayUtils.clone(Objects.requireNonNull(values, "values"));
ArrayUtils.reverse(lvalues);
stream(enumClass).forEach(constant -> {
final int block = constant.ordinal() / Long.SIZE;
if (block < lvalues.length && (lvalues[block] & 1L << constant.ordinal() % Long.SIZE) != 0) {
results.add(constant);
}
});
return results;
}
/**
* Returns a sequential ordered stream whose elements are the given class' | class |
java | apache__avro | lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftData.java | {
"start": 1980,
"end": 6006
} | class ____ extends GenericData {
static final String THRIFT_TYPE = "thrift";
static final String THRIFT_PROP = "thrift";
private static final ThriftData INSTANCE = new ThriftData();
protected ThriftData() {
}
/** Return the singleton instance. */
public static ThriftData get() {
return INSTANCE;
}
@Override
public DatumReader createDatumReader(Schema schema) {
return new ThriftDatumReader(schema, schema, this);
}
@Override
public DatumWriter createDatumWriter(Schema schema) {
return new ThriftDatumWriter(schema, this);
}
@Override
public void setField(Object r, String n, int pos, Object value) {
setField(r, n, pos, value, getRecordState(r, getSchema(r.getClass())));
}
@Override
public Object getField(Object r, String name, int pos) {
return getField(r, name, pos, getRecordState(r, getSchema(r.getClass())));
}
@Override
protected void setField(Object record, String name, int position, Object value, Object state) {
if (value == null && record instanceof TUnion)
return;
((TBase) record).setFieldValue(((TFieldIdEnum[]) state)[position], value);
}
@Override
protected Object getField(Object record, String name, int pos, Object state) {
TFieldIdEnum f = ((TFieldIdEnum[]) state)[pos];
TBase struct = (TBase) record;
if (struct.isSet(f))
return struct.getFieldValue(f);
return null;
}
private final Map<Schema, TFieldIdEnum[]> fieldCache = new ConcurrentHashMap<>();
@Override
@SuppressWarnings("unchecked")
protected Object getRecordState(Object r, Schema s) {
TFieldIdEnum[] fields = fieldCache.get(s);
if (fields == null) { // cache miss
fields = new TFieldIdEnum[s.getFields().size()];
Class c = r.getClass();
for (TFieldIdEnum f : ((Map<TFieldIdEnum, FieldMetaData>) FieldMetaData
.getStructMetaDataMap((Class<? extends TBase>) c)).keySet())
fields[s.getField(f.getFieldName()).pos()] = f;
fieldCache.put(s, fields); // update cache
}
return fields;
}
@Override
protected String getSchemaName(Object datum) {
// support implicit conversion from thrift's i16
// to avro INT for thrift's optional fields
if (datum instanceof Short)
return Schema.Type.INT.getName();
// support implicit conversion from thrift's byte
// to avro INT for thrift's optional fields
if (datum instanceof Byte)
return Schema.Type.INT.getName();
return super.getSchemaName(datum);
}
@Override
protected boolean isRecord(Object datum) {
return datum instanceof TBase;
}
@Override
protected boolean isEnum(Object datum) {
return datum instanceof TEnum;
}
@Override
protected Schema getEnumSchema(Object datum) {
return getSchema(datum.getClass());
}
@Override
// setFieldValue takes ByteBuffer but getFieldValue returns byte[]
protected boolean isBytes(Object datum) {
if (datum instanceof ByteBuffer)
return true;
if (datum == null)
return false;
Class c = datum.getClass();
return c.isArray() && c.getComponentType() == Byte.TYPE;
}
@Override
public Object newRecord(Object old, Schema schema) {
try {
Class c = ClassUtils.forName(SpecificData.getClassName(schema));
if (c == null)
return super.newRecord(old, schema); // punt to generic
if (c.isInstance(old))
return old; // reuse instance
return c.newInstance(); // create new instance
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
protected Schema getRecordSchema(Object record) {
return getSchema(record.getClass());
}
private final Map<Class, Schema> schemaCache = new ConcurrentHashMap<>();
/** Return a record schema given a thrift generated class. */
@SuppressWarnings("unchecked")
public Schema getSchema(Class c) {
Schema schema = schemaCache.get(c);
if (schema == null) { // cache miss
try {
if (TEnum.class.isAssignableFrom(c)) { // | ThriftData |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/convert/JdbcTypeConverterTest.java | {
"start": 894,
"end": 2052
} | class ____ {
@Test
@JiraKey(value = "HHH-12586")
public void testJava8TimeObjectsUsingJdbcSqlTypeDescriptors(EntityManagerFactoryScope scope) {
// Because some databases do not support millisecond values in timestamps, we clear it here.
// This will serve sufficient for our test to verify that the retrieved values match persisted.
LocalDateTime now = LocalDateTime.now().withNano( 0 );
// persist the record.
Integer rowId = scope.fromTransaction( entityManager -> {
JavaTimeBean javaTime = new JavaTimeBean();
javaTime.setLocalDate( now.toLocalDate() );
javaTime.setLocalTime( now.toLocalTime() );
javaTime.setLocalDateTime( now );
entityManager.persist( javaTime );
return javaTime.getId();
} );
// retrieve the record and verify values.
scope.inTransaction( entityManager -> {
final JavaTimeBean javaTime = entityManager.find( JavaTimeBean.class, rowId );
assertEquals( now.toLocalDate(), javaTime.getLocalDate() );
assertEquals( now.toLocalTime(), javaTime.getLocalTime() );
assertEquals( now, javaTime.getLocalDateTime() );
} );
}
@Entity(name = "JavaTimeBean")
public static | JdbcTypeConverterTest |
java | quarkusio__quarkus | integration-tests/logging-min-level-set/src/main/java/io/quarkus/it/logging/minlevel/set/above/LoggingMinLevelAbove.java | {
"start": 293,
"end": 766
} | class ____ {
static final Logger LOG = Logger.getLogger(LoggingMinLevelAbove.class);
@GET
@Path("/not-info")
@Produces(MediaType.TEXT_PLAIN)
public boolean isNotInfo() {
return !LOG.isInfoEnabled() && !LoggingWitness.loggedInfo("should not print", LOG);
}
@GET
@Path("/warn")
@Produces(MediaType.TEXT_PLAIN)
public boolean isWarn() {
return LoggingWitness.loggedWarn("warn message", LOG);
}
}
| LoggingMinLevelAbove |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/PojoContainingTuple.java | {
"start": 971,
"end": 1567
} | class ____ {
public int someInt;
public String someString = "abc";
public Tuple2<Long, Long> theTuple;
public PojoContainingTuple() {}
public PojoContainingTuple(int i, long l1, long l2) {
someInt = i;
theTuple = new Tuple2<Long, Long>(l1, l2);
}
@Override
public boolean equals(Object obj) {
if (obj instanceof PojoContainingTuple) {
PojoContainingTuple other = (PojoContainingTuple) obj;
return someInt == other.someInt && theTuple.equals(other.theTuple);
}
return false;
}
}
| PojoContainingTuple |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/CompressDecompressTester.java | {
"start": 4598,
"end": 4793
} | interface ____<T extends Compressor, E extends Decompressor> {
ImmutableList<TesterPair<T, E>> filterOnAssumeWhat(
ImmutableList<TesterPair<T, E>> pairs);
}
public | PreAssertionTester |
java | quarkusio__quarkus | integration-tests/main/src/test/java/io/quarkus/it/main/QuarkusTestNestedWithTestProfileTestCase.java | {
"start": 1738,
"end": 2227
} | class ____ implements QuarkusTestProfile {
@Override
public Map<String, String> getConfigOverrides() {
return Collections.singletonMap("quarkus.http.test-port", "" + TEST_PORT_FROM_PROFILE);
}
@Override
public String[] commandLineParameters() {
return new String[] { "OuterProfile" };
}
@Override
public boolean runMainMethod() {
return true;
}
}
public static | OuterProfile |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ChunkEndpointBuilderFactory.java | {
"start": 13849,
"end": 14971
} | class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final ChunkHeaderNameBuilder INSTANCE = new ChunkHeaderNameBuilder();
/**
* A URI for the template resource to use instead of the endpoint
* configured.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code ChunkResourceUri}.
*/
public String chunkResourceUri() {
return "ChunkResourceUri";
}
/**
* The template to use instead of the endpoint configured.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code ChunkTemplate}.
*/
public String chunkTemplate() {
return "ChunkTemplate";
}
}
static ChunkEndpointBuilder endpointBuilder(String componentName, String path) {
| ChunkHeaderNameBuilder |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java | {
"start": 46349,
"end": 48448
} | class ____ implements
EventHandler<JobHistoryEvent> {
private String workflowId;
private String workflowName;
private String workflowNodeName;
private String workflowAdjacencies;
private String workflowTags;
private Boolean assertBoolean;
public JobSubmittedEventHandler(String workflowId, String workflowName,
String workflowNodeName, String workflowAdjacencies,
String workflowTags) {
this.workflowId = workflowId;
this.workflowName = workflowName;
this.workflowNodeName = workflowNodeName;
this.workflowAdjacencies = workflowAdjacencies;
this.workflowTags = workflowTags;
assertBoolean = null;
}
@Override
public void handle(JobHistoryEvent jhEvent) {
if (jhEvent.getType() != EventType.JOB_SUBMITTED) {
return;
}
JobSubmittedEvent jsEvent = (JobSubmittedEvent) jhEvent.getHistoryEvent();
if (!workflowId.equals(jsEvent.getWorkflowId())) {
setAssertValue(false);
return;
}
if (!workflowName.equals(jsEvent.getWorkflowName())) {
setAssertValue(false);
return;
}
if (!workflowNodeName.equals(jsEvent.getWorkflowNodeName())) {
setAssertValue(false);
return;
}
String[] wrkflowAdj = workflowAdjacencies.split(" ");
String[] jswrkflowAdj = jsEvent.getWorkflowAdjacencies().split(" ");
Arrays.sort(wrkflowAdj);
Arrays.sort(jswrkflowAdj);
if (!Arrays.equals(wrkflowAdj, jswrkflowAdj)) {
setAssertValue(false);
return;
}
if (!workflowTags.equals(jsEvent.getWorkflowTags())) {
setAssertValue(false);
return;
}
setAssertValue(true);
}
private synchronized void setAssertValue(Boolean bool) {
assertBoolean = bool;
notify();
}
public synchronized boolean getAssertValue() throws InterruptedException {
while (assertBoolean == null) {
wait();
}
return assertBoolean;
}
}
private static | JobSubmittedEventHandler |
java | apache__hadoop | hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/OSSDataBlocks.java | {
"start": 10020,
"end": 10435
} | class ____ {@link #enterState(DestState, DestState)} to
* manage the state machine.
* @return the stream
* @throws IOException trouble
*/
BlockUploadData startUpload() throws IOException {
LOG.debug("Start datablock[{}] upload", index);
enterState(DestState.Writing, DestState.Upload);
return null;
}
/**
* Enter the closed state.
* @return true if the | calls |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/LazyCollectionDetachWithCollectionInDefaultFetchGroupFalseTest.java | {
"start": 1968,
"end": 3575
} | class ____ {
private static final int CHILDREN_SIZE = 10;
private Long parentID;
@BeforeEach
public void prepare(SessionFactoryScope scope) {
scope.inTransaction( s -> {
Parent parent = new Parent();
parent.setChildren( new ArrayList<>() );
for ( int i = 0; i < CHILDREN_SIZE; i++ ) {
Child child = new Child();
child.parent = parent;
s.persist( child );
}
s.persist( parent );
parentID = parent.id;
} );
}
@Test
public void testDetach(SessionFactoryScope scope) {
scope.inTransaction( s -> {
Parent parent = s.find( Parent.class, parentID );
assertThat( parent, notNullValue() );
assertThat( parent, not( instanceOf( HibernateProxy.class ) ) );
assertFalse( isPropertyInitialized( parent, "children" ) );
checkDirtyTracking( parent );
s.detach( parent );
s.flush();
} );
}
@Test
public void testDetachProxy(SessionFactoryScope scope) {
scope.inTransaction( s -> {
Parent parent = s.getReference( Parent.class, parentID );
checkDirtyTracking( parent );
s.detach( parent );
s.flush();
} );
}
@Test
public void testRefresh(SessionFactoryScope scope) {
scope.inTransaction( s -> {
Parent parent = s.find( Parent.class, parentID );
assertThat( parent, notNullValue() );
assertThat( parent, not( instanceOf( HibernateProxy.class ) ) );
assertFalse( isPropertyInitialized( parent, "children" ) );
checkDirtyTracking( parent );
s.refresh( parent );
s.flush();
} );
}
@Entity(name = "Parent")
@Table(name = "PARENT")
static | LazyCollectionDetachWithCollectionInDefaultFetchGroupFalseTest |
java | quarkusio__quarkus | core/deployment/src/test/java/io/quarkus/deployment/util/JandexUtilTest.java | {
"start": 7664,
"end": 7738
} | interface ____ extends Single<Double> {
}
public | ExtendsSimpleNoParam |
java | spring-projects__spring-security | aspects/src/test/java/org/springframework/security/authorization/method/aspectj/PreAuthorizeAspectTests.java | {
"start": 5179,
"end": 5285
} | interface ____ {
@PreAuthorize("hasRole('Y')")
void securedMethod();
}
static | AnotherSecuredInterface |
java | apache__rocketmq | common/src/main/java/org/apache/rocketmq/common/filter/impl/Operator.java | {
"start": 859,
"end": 2408
} | class ____ extends Op {
public static final Operator LEFTPARENTHESIS = new Operator("(", 30, false);
public static final Operator RIGHTPARENTHESIS = new Operator(")", 30, false);
public static final Operator AND = new Operator("&&", 20, true);
public static final Operator OR = new Operator("||", 15, true);
private int priority;
private boolean compareable;
private Operator(String symbol, int priority, boolean compareable) {
super(symbol);
this.priority = priority;
this.compareable = compareable;
}
public static Operator createOperator(String operator) {
if (LEFTPARENTHESIS.getSymbol().equals(operator))
return LEFTPARENTHESIS;
else if (RIGHTPARENTHESIS.getSymbol().equals(operator))
return RIGHTPARENTHESIS;
else if (AND.getSymbol().equals(operator))
return AND;
else if (OR.getSymbol().equals(operator))
return OR;
else
throw new IllegalArgumentException("unsupport operator " + operator);
}
public int getPriority() {
return priority;
}
public boolean isCompareable() {
return compareable;
}
public int compare(Operator operator) {
if (this.priority > operator.priority)
return 1;
else if (this.priority == operator.priority)
return 0;
else
return -1;
}
public boolean isSpecifiedOp(String operator) {
return this.getSymbol().equals(operator);
}
}
| Operator |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/block/OracleBlockTest8.java | {
"start": 977,
"end": 3361
} | class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = "DECLARE" +
" daily_order_total NUMBER(12,2);" +
" weekly_order_total NUMBER(12,2); " +
" monthly_order_total NUMBER(12,2);" +
"BEGIN" +
" COMMIT; -- end previous transaction\n" +
" SET TRANSACTION READ ONLY NAME 'Calculate Order Totals';" +
"" +
" SELECT SUM (order_total)" +
" INTO daily_order_total" +
" FROM orders" +
" WHERE order_date = SYSDATE;" +
"" +
" SELECT SUM (order_total)" +
" INTO weekly_order_total" +
" FROM orders" +
" WHERE order_date = SYSDATE - 7;" +
"" +
" SELECT SUM (order_total)" +
" INTO monthly_order_total" +
" FROM orders" +
" WHERE order_date = SYSDATE - 30;" +
"" +
" COMMIT; -- ends read-only transaction\n" +
"END;";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
print(statementList);
assertEquals(1, statementList.size());
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
for (SQLStatement statement : statementList) {
statement.accept(visitor);
}
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("orders")));
assertEquals(2, visitor.getColumns().size());
assertEquals(1, visitor.getConditions().size());
assertTrue(visitor.getColumns().contains(new TableStat.Column("orders", "order_total")));
assertTrue(visitor.getColumns().contains(new TableStat.Column("orders", "order_date")));
}
}
| OracleBlockTest8 |
java | apache__dubbo | dubbo-test/dubbo-test-modules/src/test/java/org/apache/dubbo/dependency/FileTest.java | {
"start": 1521,
"end": 28841
} | class ____ {
private static final List<Pattern> ignoredModules = new LinkedList<>();
private static final List<Pattern> ignoredArtifacts = new LinkedList<>();
private static final List<Pattern> ignoredModulesInDubboAll = new LinkedList<>();
private static final List<Pattern> ignoredModulesInDubboAllShade = new LinkedList<>();
static {
ignoredModules.add(Pattern.compile("dubbo-apache-release"));
ignoredModules.add(Pattern.compile("dubbo-all-shaded"));
ignoredModules.add(Pattern.compile("dubbo-dependencies-all"));
ignoredModules.add(Pattern.compile("dubbo-parent"));
ignoredModules.add(Pattern.compile("dubbo-core-spi"));
ignoredModules.add(Pattern.compile("dubbo-demo.*"));
ignoredModules.add(Pattern.compile("dubbo-annotation-processor"));
ignoredModules.add(Pattern.compile("dubbo-config-spring6"));
ignoredModules.add(Pattern.compile("dubbo-spring6-security"));
ignoredModules.add(Pattern.compile("dubbo-spring-boot-3-autoconfigure"));
ignoredModules.add(Pattern.compile("dubbo-plugin-loom.*"));
ignoredModules.add(Pattern.compile("dubbo-mutiny.*"));
ignoredModules.add(Pattern.compile("dubbo-mcp"));
ignoredArtifacts.add(Pattern.compile("dubbo-demo.*"));
ignoredArtifacts.add(Pattern.compile("dubbo-test.*"));
ignoredArtifacts.add(Pattern.compile("dubbo-annotation-processor"));
ignoredModulesInDubboAll.add(Pattern.compile("dubbo"));
ignoredModulesInDubboAll.add(Pattern.compile("dubbo-all-shaded"));
ignoredModulesInDubboAll.add(Pattern.compile("dubbo-bom"));
ignoredModulesInDubboAll.add(Pattern.compile("dubbo-compiler"));
ignoredModulesInDubboAll.add(Pattern.compile("dubbo-dependencies.*"));
ignoredModulesInDubboAll.add(Pattern.compile("dubbo-distribution"));
ignoredModulesInDubboAll.add(Pattern.compile("dubbo-metadata-processor"));
ignoredModulesInDubboAll.add(Pattern.compile("dubbo-native.*"));
ignoredModulesInDubboAll.add(Pattern.compile("dubbo-config-spring6.*"));
ignoredModulesInDubboAll.add(Pattern.compile(".*spring-boot.*"));
ignoredModulesInDubboAll.add(Pattern.compile("dubbo-maven-plugin"));
ignoredModulesInDubboAllShade.add(Pattern.compile("dubbo-spring6-security"));
ignoredModulesInDubboAllShade.add(Pattern.compile("dubbo-plugin-loom"));
ignoredModulesInDubboAllShade.add(Pattern.compile("dubbo-mcp"));
ignoredModulesInDubboAllShade.add(Pattern.compile("dubbo-mutiny"));
}
@Test
void checkDubboBom() throws DocumentException {
File baseFile = getBaseFile();
List<File> poms = new LinkedList<>();
readPoms(baseFile, poms);
SAXReader reader = new SAXReader();
List<String> artifactIds = poms.stream()
.map(f -> {
try {
return reader.read(f);
} catch (DocumentException e) {
throw new RuntimeException(e);
}
})
.map(Document::getRootElement)
.map(doc -> doc.elementText("artifactId"))
.sorted()
.collect(Collectors.toList());
String dubboBomPath = "dubbo-distribution" + File.separator + "dubbo-bom" + File.separator + "pom.xml";
Document dubboBom = reader.read(new File(getBaseFile(), dubboBomPath));
List<String> artifactIdsInDubboBom = dubboBom
.getRootElement()
.element("dependencyManagement")
.element("dependencies")
.elements("dependency")
.stream()
.map(ele -> ele.elementText("artifactId"))
.collect(Collectors.toList());
List<String> expectedArtifactIds = new LinkedList<>(artifactIds);
expectedArtifactIds.removeAll(artifactIdsInDubboBom);
expectedArtifactIds.removeIf(artifactId -> ignoredModules.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches()));
Assertions.assertTrue(
expectedArtifactIds.isEmpty(),
"Newly created modules must be added to dubbo-bom. Found modules: " + expectedArtifactIds);
}
@Test
void checkArtifacts() throws DocumentException, IOException {
File baseFile = getBaseFile();
List<File> poms = new LinkedList<>();
readPoms(baseFile, poms);
SAXReader reader = new SAXReader();
List<String> artifactIds = poms.stream()
.map(f -> {
try {
return reader.read(f);
} catch (DocumentException e) {
throw new RuntimeException(e);
}
})
.map(Document::getRootElement)
.map(doc -> doc.elementText("artifactId"))
.sorted()
.collect(Collectors.toList());
List<String> artifactIdsInRoot = IOUtils.readLines(
this.getClass()
.getClassLoader()
.getResource(CommonConstants.DUBBO_VERSIONS_KEY + "/.artifacts")
.openStream(),
StandardCharsets.UTF_8);
artifactIdsInRoot.removeIf(s -> s.startsWith("#"));
List<String> expectedArtifactIds = new LinkedList<>(artifactIds);
expectedArtifactIds.removeAll(artifactIdsInRoot);
expectedArtifactIds.removeIf(artifactId -> ignoredArtifacts.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches()));
Assertions.assertTrue(
expectedArtifactIds.isEmpty(),
"Newly created modules must be added to .artifacts (in project root). Found modules: "
+ expectedArtifactIds);
}
@Test
void checkDubboDependenciesAll() throws DocumentException {
File baseFile = getBaseFile();
List<File> poms = new LinkedList<>();
readPoms(baseFile, poms);
SAXReader reader = new SAXReader();
List<String> artifactIds = poms.stream()
.map(f -> {
try {
return reader.read(f);
} catch (DocumentException e) {
throw new RuntimeException(e);
}
})
.map(Document::getRootElement)
.filter(doc -> !Objects.equals("pom", doc.elementText("packaging")))
.map(doc -> doc.elementText("artifactId"))
.sorted()
.collect(Collectors.toList());
String dubboDependenciesAllPath =
"dubbo-test" + File.separator + "dubbo-dependencies-all" + File.separator + "pom.xml";
Document dubboDependenciesAll = reader.read(new File(getBaseFile(), dubboDependenciesAllPath));
List<String> artifactIdsInDubboDependenciesAll =
dubboDependenciesAll.getRootElement().element("dependencies").elements("dependency").stream()
.map(ele -> ele.elementText("artifactId"))
.collect(Collectors.toList());
List<String> expectedArtifactIds = new LinkedList<>(artifactIds);
expectedArtifactIds.removeAll(artifactIdsInDubboDependenciesAll);
expectedArtifactIds.removeIf(artifactId -> ignoredModules.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches()));
Assertions.assertTrue(
expectedArtifactIds.isEmpty(),
"Newly created modules must be added to dubbo-dependencies-all. Found modules: " + expectedArtifactIds);
}
@Test
void checkDubboAllDependencies() throws DocumentException {
File baseFile = getBaseFile();
List<File> poms = new LinkedList<>();
readPoms(baseFile, poms);
SAXReader reader = new SAXReader();
List<String> artifactIds = poms.stream()
.map(f -> {
try {
return reader.read(f);
} catch (DocumentException e) {
throw new RuntimeException(e);
}
})
.map(Document::getRootElement)
.map(doc -> doc.elementText("artifactId"))
.sorted()
.collect(Collectors.toList());
Assertions.assertEquals(poms.size(), artifactIds.size());
List<String> deployedArtifactIds = poms.stream()
.map(f -> {
try {
return reader.read(f);
} catch (DocumentException e) {
throw new RuntimeException(e);
}
})
.map(Document::getRootElement)
.filter(doc -> !Objects.equals("pom", doc.elementText("packaging")))
.filter(doc -> Objects.isNull(doc.element("properties"))
|| (!Objects.equals("true", doc.element("properties").elementText("skip_maven_deploy"))
&& !Objects.equals(
"true", doc.element("properties").elementText("maven.deploy.skip"))))
.map(doc -> doc.elementText("artifactId"))
.sorted()
.collect(Collectors.toList());
String dubboAllPath = "dubbo-distribution" + File.separator + "dubbo-all" + File.separator + "pom.xml";
Document dubboAll = reader.read(new File(getBaseFile(), dubboAllPath));
List<String> artifactIdsInDubboAll =
dubboAll.getRootElement().element("dependencies").elements("dependency").stream()
.map(ele -> ele.elementText("artifactId"))
.collect(Collectors.toList());
List<String> expectedArtifactIds = new LinkedList<>(deployedArtifactIds);
expectedArtifactIds.removeAll(artifactIdsInDubboAll);
expectedArtifactIds.removeIf(artifactId -> ignoredModules.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches()));
expectedArtifactIds.removeIf(artifactId -> ignoredModulesInDubboAll.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches()));
Assertions.assertTrue(
expectedArtifactIds.isEmpty(),
"Newly created modules must be added to dubbo-all(dubbo-distribution" + File.separator + "dubbo-all"
+ File.separator + "pom.xml). Found modules: " + expectedArtifactIds);
List<String> unexpectedArtifactIds = new LinkedList<>(artifactIdsInDubboAll);
unexpectedArtifactIds.removeIf(artifactId -> !artifactIds.contains(artifactId));
unexpectedArtifactIds.removeAll(deployedArtifactIds);
Assertions.assertTrue(
unexpectedArtifactIds.isEmpty(),
"Undeploy dependencies should not be added to dubbo-all(dubbo-distribution" + File.separator
+ "dubbo-all" + File.separator + "pom.xml). Found modules: " + unexpectedArtifactIds);
unexpectedArtifactIds = new LinkedList<>();
for (String artifactId : artifactIdsInDubboAll) {
if (!artifactIds.contains(artifactId)) {
continue;
}
if (ignoredModules.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches())) {
unexpectedArtifactIds.add(artifactId);
}
if (ignoredModulesInDubboAll.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches())) {
unexpectedArtifactIds.add(artifactId);
}
}
Assertions.assertTrue(
unexpectedArtifactIds.isEmpty(),
"Unexpected dependencies should not be added to dubbo-all(dubbo-distribution" + File.separator
+ "dubbo-all" + File.separator + "pom.xml). Found modules: " + unexpectedArtifactIds);
}
@Test
void checkDubboAllShade() throws DocumentException {
File baseFile = getBaseFile();
List<File> poms = new LinkedList<>();
readPoms(baseFile, poms);
SAXReader reader = new SAXReader();
List<String> artifactIds = poms.stream()
.map(f -> {
try {
return reader.read(f);
} catch (DocumentException e) {
throw new RuntimeException(e);
}
})
.map(Document::getRootElement)
.map(doc -> doc.elementText("artifactId"))
.sorted()
.collect(Collectors.toList());
Assertions.assertEquals(poms.size(), artifactIds.size());
List<String> deployedArtifactIds = poms.stream()
.map(f -> {
try {
return reader.read(f);
} catch (DocumentException e) {
throw new RuntimeException(e);
}
})
.map(Document::getRootElement)
.filter(doc -> Objects.isNull(doc.element("properties"))
|| (!Objects.equals("true", doc.element("properties").elementText("skip_maven_deploy"))
&& !Objects.equals(
"true", doc.element("properties").elementText("maven.deploy.skip"))))
.filter(doc -> !Objects.equals("pom", doc.elementText("packaging")))
.map(doc -> doc.elementText("artifactId"))
.sorted()
.collect(Collectors.toList());
String dubboAllPath = "dubbo-distribution" + File.separator + "dubbo-all" + File.separator + "pom.xml";
Document dubboAll = reader.read(new File(getBaseFile(), dubboAllPath));
List<String> artifactIdsInDubboAll =
dubboAll.getRootElement().element("build").element("plugins").elements("plugin").stream()
.filter(ele -> ele.elementText("artifactId").equals("maven-shade-plugin"))
.map(ele -> ele.element("executions"))
.map(ele -> ele.elements("execution"))
.flatMap(Collection::stream)
.filter(ele -> ele.elementText("phase").equals("package"))
.map(ele -> ele.element("configuration"))
.map(ele -> ele.element("artifactSet"))
.map(ele -> ele.element("includes"))
.map(ele -> ele.elements("include"))
.flatMap(Collection::stream)
.map(Element::getText)
.filter(artifactId -> artifactId.startsWith("org.apache.dubbo:"))
.map(artifactId -> artifactId.substring("org.apache.dubbo:".length()))
.collect(Collectors.toList());
List<String> expectedArtifactIds = new LinkedList<>(deployedArtifactIds);
expectedArtifactIds.removeAll(artifactIdsInDubboAll);
expectedArtifactIds.removeIf(artifactId -> ignoredModules.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches()));
expectedArtifactIds.removeIf(artifactId -> ignoredModulesInDubboAll.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches()));
Assertions.assertTrue(
expectedArtifactIds.isEmpty(),
"Newly created modules must be added to dubbo-all (dubbo-distribution" + File.separator + "dubbo-all"
+ File.separator + "pom.xml in shade plugin). Found modules: " + expectedArtifactIds);
List<String> unexpectedArtifactIds = new LinkedList<>(artifactIdsInDubboAll);
unexpectedArtifactIds.removeIf(artifactId -> !artifactIds.contains(artifactId));
unexpectedArtifactIds.removeAll(deployedArtifactIds);
Assertions.assertTrue(
unexpectedArtifactIds.isEmpty(),
"Undeploy dependencies should not be added to dubbo-all (dubbo-distribution" + File.separator
+ "dubbo-all" + File.separator + "pom.xml in shade plugin). Found modules: "
+ unexpectedArtifactIds);
unexpectedArtifactIds = new LinkedList<>();
for (String artifactId : artifactIdsInDubboAll) {
if (!artifactIds.contains(artifactId)) {
continue;
}
if (ignoredModulesInDubboAllShade.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches())) {
continue;
}
if (ignoredModules.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches())) {
unexpectedArtifactIds.add(artifactId);
}
if (ignoredModulesInDubboAll.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches())) {
unexpectedArtifactIds.add(artifactId);
}
}
Assertions.assertTrue(
unexpectedArtifactIds.isEmpty(),
"Unexpected dependencies should not be added to dubbo-all (dubbo-distribution" + File.separator
+ "dubbo-all" + File.separator + "pom.xml in shade plugin). Found modules: "
+ unexpectedArtifactIds);
}
@Test
void checkDubboAllNettyShade() throws DocumentException {
File baseFile = getBaseFile();
List<File> poms = new LinkedList<>();
readPoms(baseFile, poms);
SAXReader reader = new SAXReader();
List<String> artifactIds = poms.stream()
.map(f -> {
try {
return reader.read(f);
} catch (DocumentException e) {
throw new RuntimeException(e);
}
})
.map(Document::getRootElement)
.map(doc -> doc.elementText("artifactId"))
.sorted()
.collect(Collectors.toList());
Assertions.assertEquals(poms.size(), artifactIds.size());
List<String> deployedArtifactIds = poms.stream()
.map(f -> {
try {
return reader.read(f);
} catch (DocumentException e) {
throw new RuntimeException(e);
}
})
.map(Document::getRootElement)
.filter(doc -> Objects.isNull(doc.element("properties"))
|| (!Objects.equals("true", doc.element("properties").elementText("skip_maven_deploy"))
&& !Objects.equals(
"true", doc.element("properties").elementText("maven.deploy.skip"))))
.filter(doc -> !Objects.equals("pom", doc.elementText("packaging")))
.map(doc -> doc.elementText("artifactId"))
.sorted()
.collect(Collectors.toList());
String dubboAllPath = "dubbo-distribution" + File.separator + "dubbo-all-shaded" + File.separator + "pom.xml";
Document dubboAll = reader.read(new File(getBaseFile(), dubboAllPath));
List<String> artifactIdsInDubboAll =
dubboAll.getRootElement().element("build").element("plugins").elements("plugin").stream()
.filter(ele -> ele.elementText("artifactId").equals("maven-shade-plugin"))
.map(ele -> ele.element("executions"))
.map(ele -> ele.elements("execution"))
.flatMap(Collection::stream)
.filter(ele -> ele.elementText("phase").equals("package"))
.map(ele -> ele.element("configuration"))
.map(ele -> ele.element("artifactSet"))
.map(ele -> ele.element("includes"))
.map(ele -> ele.elements("include"))
.flatMap(Collection::stream)
.map(Element::getText)
.filter(artifactId -> artifactId.startsWith("org.apache.dubbo:"))
.map(artifactId -> artifactId.substring("org.apache.dubbo:".length()))
.collect(Collectors.toList());
List<String> expectedArtifactIds = new LinkedList<>(deployedArtifactIds);
expectedArtifactIds.removeAll(artifactIdsInDubboAll);
expectedArtifactIds.removeIf(artifactId -> ignoredModules.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches()));
expectedArtifactIds.removeIf(artifactId -> ignoredModulesInDubboAll.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches()));
Assertions.assertTrue(
expectedArtifactIds.isEmpty(),
"Newly created modules must be added to dubbo-all-shaded (dubbo-distribution" + File.separator
+ "dubbo-all-shaded" + File.separator + "pom.xml in shade plugin). Found modules: "
+ expectedArtifactIds);
List<String> unexpectedArtifactIds = new LinkedList<>(artifactIdsInDubboAll);
unexpectedArtifactIds.removeIf(artifactId -> !artifactIds.contains(artifactId));
unexpectedArtifactIds.removeAll(deployedArtifactIds);
Assertions.assertTrue(
unexpectedArtifactIds.isEmpty(),
"Undeploy dependencies should not be added to dubbo-all-shaded (dubbo-distribution" + File.separator
+ "dubbo-all-shaded" + File.separator + "pom.xml in shade plugin). Found modules: "
+ unexpectedArtifactIds);
unexpectedArtifactIds = new LinkedList<>();
for (String artifactId : artifactIdsInDubboAll) {
if (!artifactIds.contains(artifactId)) {
continue;
}
if (ignoredModulesInDubboAllShade.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches())) {
continue;
}
if (ignoredModules.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches())) {
unexpectedArtifactIds.add(artifactId);
}
if (ignoredModulesInDubboAll.stream()
.anyMatch(pattern -> pattern.matcher(artifactId).matches())) {
unexpectedArtifactIds.add(artifactId);
}
}
Assertions.assertTrue(
unexpectedArtifactIds.isEmpty(),
"Unexpected dependencies should not be added to dubbo-all-shaded (dubbo-distribution" + File.separator
+ "dubbo-all-shaded" + File.separator + "pom.xml in shade plugin). Found modules: "
+ unexpectedArtifactIds);
}
@Test
void checkDubboTransform() throws DocumentException {
File baseFile = getBaseFile();
List<String> spis = new LinkedList<>();
readSPI(baseFile, spis);
String dubboAllPath = "dubbo-distribution" + File.separator + "dubbo-all" + File.separator + "pom.xml";
String dubboAllShadedPath =
"dubbo-distribution" + File.separator + "dubbo-all-shaded" + File.separator + "pom.xml";
String dubboCoreSPIPath = "dubbo-distribution" + File.separator + "dubbo-core-spi" + File.separator + "pom.xml";
SAXReader reader = new SAXReader();
Document dubboAll = reader.read(new File(baseFile, dubboAllPath));
Document dubboAllShaded = reader.read(new File(baseFile, dubboAllShadedPath));
Document dubboCoreSPI = reader.read(new File(baseFile, dubboCoreSPIPath));
List<String> transformsInDubboAll =
dubboAll.getRootElement().element("build").element("plugins").elements("plugin").stream()
.filter(ele -> ele.elementText("artifactId").equals("maven-shade-plugin"))
.map(ele -> ele.element("executions"))
.map(ele -> ele.elements("execution"))
.flatMap(Collection::stream)
.filter(ele -> ele.elementText("phase").equals("package"))
.map(ele -> ele.element("configuration"))
.map(ele -> ele.element("transformers"))
.map(ele -> ele.elements("transformer"))
.flatMap(Collection::stream)
.map(ele -> ele.elementText("resource"))
.map(String::trim)
.map(resource -> resource.substring(resource.lastIndexOf("/") + 1))
.collect(Collectors.toList());
List<String> transformsInDubboAllShaded =
dubboAllShaded.getRootElement().element("build").element("plugins").elements("plugin").stream()
.filter(ele -> ele.elementText("artifactId").equals("maven-shade-plugin"))
.map(ele -> ele.element("executions"))
.map(ele -> ele.elements("execution"))
.flatMap(Collection::stream)
.filter(ele -> ele.elementText("phase").equals("package"))
.map(ele -> ele.element("configuration"))
.map(ele -> ele.element("transformers"))
.map(ele -> ele.elements("transformer"))
.flatMap(Collection::stream)
.map(ele -> ele.elementText("resource"))
.map(String::trim)
.map(resource -> resource.substring(resource.lastIndexOf("/") + 1))
.collect(Collectors.toList());
List<String> transformsInDubboCoreSPI =
dubboCoreSPI.getRootElement().element("build").element("plugins").elements("plugin").stream()
.filter(ele -> ele.elementText("artifactId").equals("maven-shade-plugin"))
.map(ele -> ele.element("executions"))
.map(ele -> ele.elements("execution"))
.flatMap(Collection::stream)
.filter(ele -> ele.elementText("phase").equals("package"))
.map(ele -> ele.element("configuration"))
.map(ele -> ele.element("transformers"))
.map(ele -> ele.elements("transformer"))
.flatMap(Collection::stream)
.map(ele -> ele.elementText("resource"))
.map(String::trim)
.map(resource -> resource.substring(resource.lastIndexOf("/") + 1))
.collect(Collectors.toList());
List<String> expectedSpis = new LinkedList<>(spis);
expectedSpis.removeAll(transformsInDubboAll);
Assertions.assertTrue(
expectedSpis.isEmpty(),
"Newly created SPI | FileTest |
java | elastic__elasticsearch | modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentIdQueryBuilder.java | {
"start": 1541,
"end": 8077
} | class ____ extends AbstractQueryBuilder<ParentIdQueryBuilder> {
public static final String NAME = "parent_id";
/**
* The default value for ignore_unmapped.
*/
public static final boolean DEFAULT_IGNORE_UNMAPPED = false;
private static final ParseField ID_FIELD = new ParseField("id");
private static final ParseField TYPE_FIELD = new ParseField("type");
private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped");
private final String type;
private final String id;
private boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED;
public ParentIdQueryBuilder(String type, String id) {
this.type = requireValue(type, "[" + NAME + "] requires '" + TYPE_FIELD.getPreferredName() + "' field");
this.id = requireValue(id, "[" + NAME + "] requires '" + ID_FIELD.getPreferredName() + "' field");
}
/**
* Read from a stream.
*/
public ParentIdQueryBuilder(StreamInput in) throws IOException {
super(in);
type = in.readString();
id = in.readString();
ignoreUnmapped = in.readBoolean();
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(type);
out.writeString(id);
out.writeBoolean(ignoreUnmapped);
}
public String getType() {
return type;
}
public String getId() {
return id;
}
/**
* Sets whether the query builder should ignore unmapped types (and run a
* {@link MatchNoDocsQuery} in place of this query) or throw an exception if
* the type is unmapped.
*/
public ParentIdQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) {
this.ignoreUnmapped = ignoreUnmapped;
return this;
}
/**
* Gets whether the query builder will ignore unmapped types (and run a
* {@link MatchNoDocsQuery} in place of this query) or throw an exception if
* the type is unmapped.
*/
public boolean ignoreUnmapped() {
return ignoreUnmapped;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.field(TYPE_FIELD.getPreferredName(), type);
builder.field(ID_FIELD.getPreferredName(), id);
if (ignoreUnmapped != DEFAULT_IGNORE_UNMAPPED) {
builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped);
}
boostAndQueryNameToXContent(builder);
builder.endObject();
}
public static ParentIdQueryBuilder fromXContent(XContentParser parser) throws IOException {
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String type = null;
String id = null;
String queryName = null;
String currentFieldName = null;
boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (TYPE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
type = parser.text();
} else if (ID_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
id = parser.text();
} else if (IGNORE_UNMAPPED_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
ignoreUnmapped = parser.booleanValue();
} else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
boost = parser.floatValue();
} else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[parent_id] query does not support [" + currentFieldName + "]");
}
} else {
throw new ParsingException(parser.getTokenLocation(), "[parent_id] query does not support [" + currentFieldName + "]");
}
}
ParentIdQueryBuilder queryBuilder = new ParentIdQueryBuilder(type, id);
queryBuilder.queryName(queryName);
queryBuilder.boost(boost);
queryBuilder.ignoreUnmapped(ignoreUnmapped);
return queryBuilder;
}
@Override
protected Query doToQuery(SearchExecutionContext context) throws IOException {
if (context.allowExpensiveQueries() == false) {
throw new ElasticsearchException(
"[joining] queries cannot be executed when '" + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false."
);
}
Joiner joiner = Joiner.getJoiner(context);
if (joiner == null) {
if (ignoreUnmapped) {
return new MatchNoDocsQuery();
} else {
final String indexName = context.getIndexSettings().getIndex().getName();
throw new QueryShardException(context, "[" + NAME + "] no join field found for index [" + indexName + "]");
}
}
if (joiner.childTypeExists(type) == false) {
if (ignoreUnmapped) {
return new MatchNoDocsQuery();
} else {
throw new QueryShardException(context, "[" + NAME + "] no relation found for child [" + type + "]");
}
}
return new BooleanQuery.Builder().add(new TermQuery(new Term(joiner.parentJoinField(type), id)), BooleanClause.Occur.MUST)
// Need to take child type into account, otherwise a child doc of different type with the same id could match
.add(new TermQuery(new Term(joiner.getJoinField(), type)), BooleanClause.Occur.FILTER)
.build();
}
@Override
protected boolean doEquals(ParentIdQueryBuilder that) {
return Objects.equals(type, that.type) && Objects.equals(id, that.id) && Objects.equals(ignoreUnmapped, that.ignoreUnmapped);
}
@Override
protected int doHashCode() {
return Objects.hash(type, id, ignoreUnmapped);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
}
| ParentIdQueryBuilder |
java | quarkusio__quarkus | integration-tests/gradle/src/test/java/io/quarkus/gradle/devmode/CompileOnlyExtensionDependencyDevModeTest.java | {
"start": 297,
"end": 997
} | class ____ extends QuarkusDevGradleTestBase {
@Override
protected String projectDirectoryName() {
return "compile-only-extension-dependency";
}
@Override
protected String[] buildArguments() {
return new String[] { "clean", "run" };
}
protected void testDevMode() throws Exception {
assertThat(getHttpResponse("/hello")).contains("hello");
final String uuid = UUID.randomUUID().toString();
replace("src/main/java/org/acme/ExampleResource.java",
ImmutableMap.of("return \"hello\";", "return \"" + uuid + "\";"));
assertUpdatedResponseContains("/hello", uuid);
}
}
| CompileOnlyExtensionDependencyDevModeTest |
java | apache__flink | flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/operators/wmassigners/ProcTimeMiniBatchAssignerOperatorTest.java | {
"start": 1421,
"end": 5248
} | class ____ extends WatermarkAssignerOperatorTestBase {
@Test
void testMiniBatchAssignerOperator() throws Exception {
final ProcTimeMiniBatchAssignerOperator operator =
new ProcTimeMiniBatchAssignerOperator(100);
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
new OneInputStreamOperatorTestHarness<>(operator);
long currentTime = 0;
testHarness.open();
testHarness.processElement(new StreamRecord<>(GenericRowData.of(1L)));
testHarness.processElement(new StreamRecord<>(GenericRowData.of(2L)));
testHarness.processWatermark(new Watermark(2)); // this watermark should be ignored
testHarness.processElement(new StreamRecord<>(GenericRowData.of(3L)));
testHarness.processElement(new StreamRecord<>(GenericRowData.of(4L)));
{
ConcurrentLinkedQueue<Object> output = testHarness.getOutput();
long currentElement = 1L;
long lastWatermark = 0L;
while (true) {
if (output.size() > 0) {
Object next = output.poll();
assertThat(next).isNotNull();
Tuple2<Long, Long> update =
validateElement(next, currentElement, lastWatermark);
long nextElementValue = update.f0;
lastWatermark = update.f1;
if (next instanceof Watermark) {
assertThat(lastWatermark).isEqualTo(100);
break;
} else {
assertThat(nextElementValue - 1).isEqualTo(currentElement);
currentElement += 1;
assertThat(lastWatermark).isEqualTo(0);
}
} else {
currentTime = currentTime + 10;
testHarness.setProcessingTime(currentTime);
}
}
output.clear();
}
testHarness.processElement(new StreamRecord<>(GenericRowData.of(4L)));
testHarness.processElement(new StreamRecord<>(GenericRowData.of(5L)));
testHarness.processElement(new StreamRecord<>(GenericRowData.of(6L)));
testHarness.processElement(new StreamRecord<>(GenericRowData.of(7L)));
testHarness.processElement(new StreamRecord<>(GenericRowData.of(8L)));
{
ConcurrentLinkedQueue<Object> output = testHarness.getOutput();
long currentElement = 4L;
long lastWatermark = 100L;
while (true) {
if (output.size() > 0) {
Object next = output.poll();
assertThat(next).isNotNull();
Tuple2<Long, Long> update =
validateElement(next, currentElement, lastWatermark);
long nextElementValue = update.f0;
lastWatermark = update.f1;
if (next instanceof Watermark) {
assertThat(lastWatermark).isEqualTo(200);
break;
} else {
assertThat(nextElementValue - 1).isEqualTo(currentElement);
currentElement += 1;
assertThat(lastWatermark).isEqualTo(100);
}
} else {
currentTime = currentTime + 10;
testHarness.setProcessingTime(currentTime);
}
}
output.clear();
}
testHarness.processWatermark(new Watermark(Long.MAX_VALUE));
assertThat(((Watermark) testHarness.getOutput().poll()).getTimestamp())
.isEqualTo(Long.MAX_VALUE);
}
}
| ProcTimeMiniBatchAssignerOperatorTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/bug/Issue2049.java | {
"start": 467,
"end": 1064
} | class ____ extends TestCase {
public void test_0() throws Exception {
String sql = "select * from emp a,dmp b;";
List<SQLStatement> stmtList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
SQLSelectStatement stmt = (SQLSelectStatement) stmtList.get(0);
SQLSelectQueryBlock queryBlock = stmt.getSelect().getQueryBlock();
SQLJoinTableSource joinTableSource = (SQLJoinTableSource) queryBlock.getFrom();
assertEquals("a", joinTableSource.getLeft().getAlias());
assertEquals("b", joinTableSource.getRight().getAlias());
}
}
| Issue2049 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.