language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-framework | spring-orm/src/main/java/org/springframework/orm/jpa/AbstractEntityManagerFactoryBean.java | {
"start": 3526,
"end": 4213
} | class ____ implements the
* {@link org.springframework.dao.support.PersistenceExceptionTranslator}
* interface, as autodetected by Spring's
* {@link org.springframework.dao.annotation.PersistenceExceptionTranslationPostProcessor},
* for AOP-based translation of native exceptions to Spring DataAccessExceptions.
* Hence, the presence of, for example, LocalEntityManagerFactoryBean automatically enables
* a PersistenceExceptionTranslationPostProcessor to translate JPA exceptions.
*
* @author Juergen Hoeller
* @author Rod Johnson
* @since 2.0
* @see LocalEntityManagerFactoryBean
* @see LocalContainerEntityManagerFactoryBean
*/
@SuppressWarnings("serial")
public abstract | also |
java | junit-team__junit5 | junit-jupiter-api/src/testFixtures/java/org/junit/jupiter/api/extension/ExtensionContextParameterResolver.java | {
"start": 364,
"end": 865
} | class ____ implements ParameterResolver {
@Override
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException {
return ExtensionContext.class.equals(parameterContext.getParameter().getType());
}
@Override
public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException {
return extensionContext;
}
}
| ExtensionContextParameterResolver |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderCryptoExtension.java | {
"start": 18067,
"end": 19991
} | class ____
extends CachingKeyProvider
implements KeyProviderCryptoExtension.CryptoExtension {
private KeyProvider kp;
private KeyVersion kv;
private EncryptedKeyVersion ekv;
public DummyCachingCryptoExtensionKeyProvider(KeyProvider keyProvider,
long keyTimeoutMillis,
long currKeyTimeoutMillis) {
super(keyProvider, keyTimeoutMillis, currKeyTimeoutMillis);
conf = new Configuration();
try {
this.kp = new UserProvider.Factory().createProvider(
new URI("user:///"), conf);
this.kv = new KeyVersion(ENCRYPTION_KEY_NAME,
"dummyCachingFakeKey@1", new byte[16]);
this.ekv = new EncryptedKeyVersion(ENCRYPTION_KEY_NAME,
"dummyCachingFakeKey@1", new byte[16], kv);
} catch (URISyntaxException e) {
fail(e.getMessage());
} catch (IOException e) {
fail(e.getMessage());
}
}
@Override
public void warmUpEncryptedKeys(String... keyNames) throws IOException {
}
@Override
public void drain(String keyName) {
}
@Override
public EncryptedKeyVersion generateEncryptedKey(String encryptionKeyName)
throws IOException, GeneralSecurityException {
return this.ekv;
}
@Override
public KeyVersion decryptEncryptedKey(
EncryptedKeyVersion encryptedKeyVersion)
throws IOException, GeneralSecurityException {
return kv;
}
@Override
public EncryptedKeyVersion reencryptEncryptedKey(EncryptedKeyVersion ekv)
throws IOException, GeneralSecurityException {
return ekv;
}
@Override
public void reencryptEncryptedKeys(List<EncryptedKeyVersion> ekvs)
throws IOException, GeneralSecurityException {
}
}
}
| DummyCachingCryptoExtensionKeyProvider |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/beans/factory/xml/support/CustomNamespaceHandlerTests.java | {
"start": 3199,
"end": 7231
} | class ____ {
private static final Class<?> CLASS = CustomNamespaceHandlerTests.class;
private static final String CLASSNAME = CLASS.getSimpleName();
private static final String FQ_PATH = "org/springframework/beans/factory/xml/support";
private static final String NS_PROPS = format("%s/%s.properties", FQ_PATH, CLASSNAME);
private static final String NS_XML = format("%s/%s-context.xml", FQ_PATH, CLASSNAME);
private static final String TEST_XSD = format("%s/%s.xsd", FQ_PATH, CLASSNAME);
private GenericApplicationContext beanFactory;
@BeforeEach
void setUp() {
NamespaceHandlerResolver resolver = new DefaultNamespaceHandlerResolver(CLASS.getClassLoader(), NS_PROPS);
this.beanFactory = new GenericApplicationContext();
XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(this.beanFactory);
reader.setNamespaceHandlerResolver(resolver);
reader.setValidationMode(XmlBeanDefinitionReader.VALIDATION_XSD);
reader.setEntityResolver(new DummySchemaResolver());
reader.loadBeanDefinitions(getResource());
this.beanFactory.refresh();
}
@Test
void testSimpleParser() {
TestBean bean = (TestBean) this.beanFactory.getBean("testBean");
assertTestBean(bean);
}
@Test
void testSimpleDecorator() {
TestBean bean = (TestBean) this.beanFactory.getBean("customisedTestBean");
assertTestBean(bean);
}
@Test
void testProxyingDecorator() {
ITestBean bean = (ITestBean) this.beanFactory.getBean("debuggingTestBean");
assertTestBean(bean);
assertThat(AopUtils.isAopProxy(bean)).isTrue();
Advisor[] advisors = ((Advised) bean).getAdvisors();
assertThat(advisors).as("Incorrect number of advisors").hasSize(1);
assertThat(advisors[0].getAdvice().getClass()).as("Incorrect advice class").isEqualTo(DebugInterceptor.class);
}
@Test
void testProxyingDecoratorNoInstance() {
String[] beanNames = this.beanFactory.getBeanNamesForType(ApplicationListener.class);
assertThat(Arrays.asList(beanNames)).contains("debuggingTestBeanNoInstance");
assertThat(this.beanFactory.getType("debuggingTestBeanNoInstance")).isEqualTo(ApplicationListener.class);
assertThatExceptionOfType(BeanCreationException.class).isThrownBy(() ->
this.beanFactory.getBean("debuggingTestBeanNoInstance"))
.havingRootCause()
.isInstanceOf(BeanInstantiationException.class);
}
@Test
void testChainedDecorators() {
ITestBean bean = (ITestBean) this.beanFactory.getBean("chainedTestBean");
assertTestBean(bean);
assertThat(AopUtils.isAopProxy(bean)).isTrue();
Advisor[] advisors = ((Advised) bean).getAdvisors();
assertThat(advisors).as("Incorrect number of advisors").hasSize(2);
assertThat(advisors[0].getAdvice().getClass()).as("Incorrect advice class").isEqualTo(DebugInterceptor.class);
assertThat(advisors[1].getAdvice().getClass()).as("Incorrect advice class").isEqualTo(NopInterceptor.class);
}
@Test
void testDecorationViaAttribute() {
BeanDefinition beanDefinition = this.beanFactory.getBeanDefinition("decorateWithAttribute");
assertThat(beanDefinition.getAttribute("objectName")).isEqualTo("foo");
}
@Test // SPR-2728
public void testCustomElementNestedWithinUtilList() {
List<?> things = (List<?>) this.beanFactory.getBean("list.of.things");
assertThat(things).isNotNull();
assertThat(things).hasSize(2);
}
@Test // SPR-2728
public void testCustomElementNestedWithinUtilSet() {
Set<?> things = (Set<?>) this.beanFactory.getBean("set.of.things");
assertThat(things).isNotNull();
assertThat(things).hasSize(2);
}
@Test // SPR-2728
public void testCustomElementNestedWithinUtilMap() {
Map<?, ?> things = (Map<?, ?>) this.beanFactory.getBean("map.of.things");
assertThat(things).isNotNull();
assertThat(things).hasSize(2);
}
private void assertTestBean(ITestBean bean) {
assertThat(bean.getName()).as("Invalid name").isEqualTo("Rob Harrop");
assertThat(bean.getAge()).as("Invalid age").isEqualTo(23);
}
private Resource getResource() {
return new ClassPathResource(NS_XML);
}
private final | CustomNamespaceHandlerTests |
java | mapstruct__mapstruct | integrationtest/src/test/resources/sealedSubclassTest/src/main/java/org/mapstruct/itest/sealedsubclass/CarDto.java | {
"start": 206,
"end": 413
} | class ____ extends VehicleDto {
private boolean manual;
public boolean isManual() {
return manual;
}
public void setManual(boolean manual) {
this.manual = manual;
}
}
| CarDto |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/MetadataUtils.java | {
"start": 339,
"end": 1314
} | class ____ {
public static final String RESERVED_PREFIX = "_";
public static final String RESERVED_METADATA_KEY = RESERVED_PREFIX + "reserved";
public static final String DEPRECATED_METADATA_KEY = RESERVED_PREFIX + "deprecated";
public static final String DEPRECATED_REASON_METADATA_KEY = RESERVED_PREFIX + "deprecated_reason";
public static final Map<String, Object> DEFAULT_RESERVED_METADATA = Map.of(RESERVED_METADATA_KEY, true);
private MetadataUtils() {}
public static boolean containsReservedMetadata(Map<String, Object> metadata) {
for (String key : metadata.keySet()) {
if (key.startsWith(RESERVED_PREFIX)) {
return true;
}
}
return false;
}
public static Map<String, Object> getDeprecatedReservedMetadata(String reason) {
return Map.of(RESERVED_METADATA_KEY, true, DEPRECATED_METADATA_KEY, true, DEPRECATED_REASON_METADATA_KEY, reason);
}
}
| MetadataUtils |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/threadsafety/ThreadSafety.java | {
"start": 30876,
"end": 33616
} | enum ____ are identified by the enclosing constant
// declaration
return sym.owner.getSimpleName().toString();
}
// anonymous classes have an empty name, but a recognizable superclass or interface
// e.g. refer to `new Runnable() { ... }` as "Runnable"
Type superType = state.getTypes().supertype(sym.type);
if (state.getTypes().isSameType(superType, state.getSymtab().objectType)) {
superType = Iterables.getFirst(state.getTypes().interfaces(sym.type), superType);
}
return superType.tsym.getSimpleName().toString();
}
public Violation checkInstantiation(
Collection<TypeVariableSymbol> typeParameters, Collection<Type> typeArguments) {
return Streams.zip(
typeParameters.stream(),
typeArguments.stream(),
(sym, type) -> checkInstantiation(sym, ImmutableList.of(type)))
.filter(Violation::isPresent)
.findFirst()
.orElse(Violation.absent());
}
/** Checks that any thread-safe type parameters are instantiated with thread-safe types. */
public Violation checkInstantiation(
TypeVariableSymbol typeParameter, Collection<Type> instantiations) {
if (!hasThreadSafeTypeParameterAnnotation(typeParameter)) {
return Violation.absent();
}
for (Type instantiation : instantiations) {
Violation info =
isThreadSafeType(
/* allowContainerTypeParameters= */ true,
/* containerTypeParameters= */ ImmutableSet.of(),
instantiation);
if (info.isPresent()) {
return info.plus(
String.format(
"instantiation of '%s' is %s", typeParameter, purpose.mutableOrNotThreadSafe()));
}
}
return Violation.absent();
}
/** Checks the instantiation of any thread-safe type parameters in the current invocation. */
public Violation checkInvocation(Type methodType, Symbol symbol) {
if (methodType == null) {
return Violation.absent();
}
List<TypeVariableSymbol> typeParameters = symbol.getTypeParameters();
if (typeParameters.stream().noneMatch(this::hasThreadSafeTypeParameterAnnotation)) {
// fast path
return Violation.absent();
}
ImmutableListMultimap<TypeVariableSymbol, Type> instantiation =
ASTHelpers.getTypeSubstitution(methodType, symbol);
for (TypeVariableSymbol typeParameter : typeParameters) {
Violation violation = checkInstantiation(typeParameter, instantiation.get(typeParameter));
if (violation.isPresent()) {
return violation;
}
}
return Violation.absent();
}
private static final Supplier<Name> CONTAINEROF =
VisitorState.memoize(state -> state.getName("containerOf"));
}
| constants |
java | google__guice | core/test/com/google/inject/spi/InjectionPointTest.java | {
"start": 1777,
"end": 1936
} | class ____ extends TestCase {
public @Inject @Named("a") String foo;
public @Inject void bar(@Named("b") String param) {}
public static | InjectionPointTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/OneToManySubSelectFetchAndInheritanceTest.java | {
"start": 6737,
"end": 7083
} | class ____ {
@Id
@GeneratedValue
Integer id;
String name;
@ManyToOne
Parent parent;
public SomeOther() {
}
public SomeOther(String name) {
this.name = name;
}
public Integer getId() {
return id;
}
public String getName() {
return name;
}
public Parent getParent() {
return parent;
}
}
}
| SomeOther |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/collection/dereferenced/UnversionedCascadeDereferencedCollectionTest.java | {
"start": 1165,
"end": 11188
} | class ____ extends AbstractDereferencedCollectionTest {
@Test
@JiraKey(value = "HHH-9777")
public void testMergeNullCollection(SessionFactoryScope scope) {
UnversionedCascadeOne unversionedCascadeOne = new UnversionedCascadeOne();
scope.inTransaction(
session -> {
assertNull( unversionedCascadeOne.getManies() );
session.persist( unversionedCascadeOne );
assertNull( unversionedCascadeOne.getManies() );
EntityEntry eeOne = getEntityEntry( session, unversionedCascadeOne );
assertNull( eeOne.getLoadedValue( "manies" ) );
session.flush();
assertNull( unversionedCascadeOne.getManies() );
assertNull( eeOne.getLoadedValue( "manies" ) );
}
);
final String role = UnversionedCascadeOne.class.getName() + ".manies";
scope.inTransaction(
session -> {
UnversionedCascadeOne one = session.merge( unversionedCascadeOne );
assertThat( one.getManies().size() ).isEqualTo( 0 );
EntityEntry eeOne = getEntityEntry( session, one );
AbstractPersistentCollection maniesEEOneStateOrig = (AbstractPersistentCollection) eeOne.getLoadedValue(
"manies" );
assertNotNull( maniesEEOneStateOrig );
// Ensure maniesEEOneStateOrig has role, key, and session properly defined (even though one.manies == null)
assertEquals( role, maniesEEOneStateOrig.getRole() );
assertEquals( one.getId(), maniesEEOneStateOrig.getKey() );
assertSame( session, maniesEEOneStateOrig.getSession() );
// Ensure there is a CollectionEntry for maniesEEOneStateOrig and that the role, persister, and key are set properly.
CollectionEntry ceManiesOrig = getCollectionEntry( session, maniesEEOneStateOrig );
assertNotNull( ceManiesOrig );
assertEquals( role, ceManiesOrig.getRole() );
assertSame(
scope.getSessionFactory().getRuntimeMetamodels()
.getMappingMetamodel()
.getCollectionDescriptor(role),
ceManiesOrig.getLoadedPersister()
);
assertEquals( one.getId(), ceManiesOrig.getKey() );
session.flush();
// Ensure the same EntityEntry is being used.
assertSame( eeOne, getEntityEntry( session, one ) );
assertThat( one.getManies().size() ).isEqualTo( 0 );
}
);
}
@Test
@JiraKey(value = "HHH-9777")
public void testGetAndNullifyCollection(SessionFactoryScope scope) {
UnversionedCascadeOne unversionedCascadeOne = new UnversionedCascadeOne();
scope.inTransaction(
session -> {
assertNull( unversionedCascadeOne.getManies() );
session.persist( unversionedCascadeOne );
assertNull( unversionedCascadeOne.getManies() );
EntityEntry eeOne = getEntityEntry( session, unversionedCascadeOne );
assertNull( eeOne.getLoadedValue( "manies" ) );
session.flush();
assertNull( unversionedCascadeOne.getManies() );
assertNull( eeOne.getLoadedValue( "manies" ) );
}
);
final String role = UnversionedCascadeOne.class.getName() + ".manies";
scope.inTransaction(
session -> {
UnversionedCascadeOne one = session.get(
UnversionedCascadeOne.class,
unversionedCascadeOne.getId()
);
// When returned by Session.get(), one.getManies() will return a PersistentCollection;
// the EntityEntry loaded state should contain the same PersistentCollection.
EntityEntry eeOne = getEntityEntry( session, one );
assertNotNull( one.getManies() );
AbstractPersistentCollection maniesEEOneStateOrig = (AbstractPersistentCollection) eeOne.getLoadedValue(
"manies" );
assertSame( one.getManies(), maniesEEOneStateOrig );
// Ensure maniesEEOneStateOrig has role, key, and session properly defined (even though one.manies == null)
assertEquals( role, maniesEEOneStateOrig.getRole() );
assertEquals( one.getId(), maniesEEOneStateOrig.getKey() );
assertSame( session, maniesEEOneStateOrig.getSession() );
// Ensure there is a CollectionEntry for maniesEEOneStateOrig and that the role, persister, and key are set properly.
CollectionEntry ceManies = getCollectionEntry( session, maniesEEOneStateOrig );
assertNotNull( ceManies );
assertEquals( role, ceManies.getRole() );
assertSame(
scope.getSessionFactory().getRuntimeMetamodels()
.getMappingMetamodel()
.getCollectionDescriptor(role),
ceManies.getLoadedPersister()
);
assertEquals( one.getId(), ceManies.getKey() );
// nullify collection
one.setManies( null );
session.flush();
// Ensure the same EntityEntry is being used.
assertSame( eeOne, getEntityEntry( session, one ) );
// Ensure one.getManies() is still null.
assertNull( one.getManies() );
// Ensure CollectionEntry for maniesEEOneStateOrig is no longer in the PersistenceContext.
assertNull( getCollectionEntry( session, maniesEEOneStateOrig ) );
// Ensure the original CollectionEntry has role, persister, and key set to null.
assertNull( ceManies.getRole() );
assertNull( ceManies.getLoadedPersister() );
assertNull( ceManies.getKey() );
// Ensure the PersistentCollection (that was previously returned by eeOne.getLoadedState())
// has key and role set to null.
assertNull( maniesEEOneStateOrig.getKey() );
assertNull( maniesEEOneStateOrig.getRole() );
// Ensure eeOne.getLoadedState() returns null for collection after flush.
assertNull( eeOne.getLoadedValue( "manies" ) );
// Ensure the session in maniesEEOneStateOrig has been unset.
assertNull( maniesEEOneStateOrig.getSession() );
}
);
}
@Test
@JiraKey(value = "HHH-9777")
public void testGetAndReplaceCollection(SessionFactoryScope scope) {
UnversionedCascadeOne unversionedCascadeOne = new UnversionedCascadeOne();
scope.inTransaction(
session -> {
assertNull( unversionedCascadeOne.getManies() );
session.persist( unversionedCascadeOne );
assertNull( unversionedCascadeOne.getManies() );
EntityEntry eeOne = getEntityEntry( session, unversionedCascadeOne );
assertNull( eeOne.getLoadedValue( "manies" ) );
session.flush();
assertNull( unversionedCascadeOne.getManies() );
assertNull( eeOne.getLoadedValue( "manies" ) );
}
);
final String role = UnversionedCascadeOne.class.getName() + ".manies";
scope.inTransaction(
session -> {
UnversionedCascadeOne one = session.get(
UnversionedCascadeOne.class,
unversionedCascadeOne.getId()
);
// When returned by Session.get(), one.getManies() will return a PersistentCollection;
// the EntityEntry loaded state should contain the same PersistentCollection.
EntityEntry eeOne = getEntityEntry( session, one );
assertNotNull( one.getManies() );
AbstractPersistentCollection maniesEEOneStateOrig = (AbstractPersistentCollection) eeOne.getLoadedValue(
"manies" );
assertSame( one.getManies(), maniesEEOneStateOrig );
// Ensure maniesEEOneStateOrig has role, key, and session properly defined (even though one.manies == null)
assertEquals( role, maniesEEOneStateOrig.getRole() );
assertEquals( one.getId(), maniesEEOneStateOrig.getKey() );
assertSame( session, maniesEEOneStateOrig.getSession() );
// Ensure there is a CollectionEntry for maniesEEOneStateOrig and that the role, persister, and key are set properly.
CollectionEntry ceManiesOrig = getCollectionEntry( session, maniesEEOneStateOrig );
assertNotNull( ceManiesOrig );
assertEquals( role, ceManiesOrig.getRole() );
assertSame(
scope.getSessionFactory().getRuntimeMetamodels()
.getMappingMetamodel()
.getCollectionDescriptor(role),
ceManiesOrig.getLoadedPersister()
);
assertEquals( one.getId(), ceManiesOrig.getKey() );
// replace collection
one.setManies( new HashSet<>() );
session.flush();
// Ensure the same EntityEntry is being used.
assertSame( eeOne, getEntityEntry( session, one ) );
// Ensure CollectionEntry for maniesEEOneStateOrig is no longer in the PersistenceContext.
assertNull( getCollectionEntry( session, maniesEEOneStateOrig ) );
// Ensure the original CollectionEntry has role, persister, and key set to null.
assertNull( ceManiesOrig.getRole() );
assertNull( ceManiesOrig.getLoadedPersister() );
assertNull( ceManiesOrig.getKey() );
// Ensure the PersistentCollection (that was previously returned by eeOne.getLoadedState())
// has key and role set to null.
assertNull( maniesEEOneStateOrig.getKey() );
assertNull( maniesEEOneStateOrig.getRole() );
// one.getManies() should be "wrapped" by a PersistentCollection now; role, key, and session should be set properly.
assertTrue( PersistentCollection.class.isInstance( one.getManies() ) );
assertEquals( role, ( (PersistentCollection) one.getManies() ).getRole() );
assertEquals( one.getId(), ( (PersistentCollection) one.getManies() ).getKey() );
assertSame( session, ( (AbstractPersistentCollection) one.getManies() ).getSession() );
// Ensure eeOne.getLoadedState() contains the new collection.
assertSame( one.getManies(), eeOne.getLoadedValue( "manies" ) );
// Ensure there is a new CollectionEntry for the new collection and that role, persister, and key are set properly.
CollectionEntry ceManiesAfterReplace = getCollectionEntry(
session,
(PersistentCollection) one.getManies()
);
assertNotNull( ceManiesAfterReplace );
assertEquals( role, ceManiesAfterReplace.getRole() );
assertSame(
scope.getSessionFactory().getRuntimeMetamodels()
.getMappingMetamodel()
.getCollectionDescriptor(role),
ceManiesAfterReplace.getLoadedPersister()
);
assertEquals( one.getId(), ceManiesAfterReplace.getKey() );
// Ensure the session in maniesEEOneStateOrig has been unset.
assertNull( maniesEEOneStateOrig.getSession() );
}
);
}
}
| UnversionedCascadeDereferencedCollectionTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ReferenceEqualityTest.java | {
"start": 5287,
"end": 5598
} | class ____ {
boolean f(Optional<Integer> a, Optional<Integer> b) {
return a == b || (a.equals(b));
}
}
""")
.addOutputLines(
"out/Test.java",
"""
import com.google.common.base.Optional;
| Test |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportAddIndexBlockAction.java | {
"start": 2320,
"end": 5999
} | class ____ extends TransportMasterNodeAction<AddIndexBlockRequest, AddIndexBlockResponse> {
public static final ActionType<AddIndexBlockResponse> TYPE = new ActionType<>("indices:admin/block/add");
private static final Logger logger = LogManager.getLogger(TransportAddIndexBlockAction.class);
private final MetadataIndexStateService indexStateService;
private final ProjectResolver projectResolver;
private final IndexNameExpressionResolver indexNameExpressionResolver;
private final DestructiveOperations destructiveOperations;
@Inject
public TransportAddIndexBlockAction(
TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool,
MetadataIndexStateService indexStateService,
ActionFilters actionFilters,
ProjectResolver projectResolver,
IndexNameExpressionResolver indexNameExpressionResolver,
DestructiveOperations destructiveOperations
) {
super(
TYPE.name(),
transportService,
clusterService,
threadPool,
actionFilters,
AddIndexBlockRequest::new,
AddIndexBlockResponse::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.indexStateService = indexStateService;
this.projectResolver = projectResolver;
this.indexNameExpressionResolver = indexNameExpressionResolver;
this.destructiveOperations = destructiveOperations;
}
@Override
protected void doExecute(Task task, AddIndexBlockRequest request, ActionListener<AddIndexBlockResponse> listener) {
destructiveOperations.failDestructive(request.indices());
super.doExecute(task, request, listener);
}
@Override
protected ClusterBlockException checkBlock(AddIndexBlockRequest request, ClusterState state) {
final ProjectMetadata projectMetadata = projectResolver.getProjectMetadata(state);
if (request.getBlock().getBlock().levels().contains(ClusterBlockLevel.METADATA_WRITE)
&& state.blocks().global(projectMetadata.id(), ClusterBlockLevel.METADATA_WRITE).isEmpty()) {
return null;
}
return state.blocks()
.indicesBlockedException(
projectMetadata.id(),
ClusterBlockLevel.METADATA_WRITE,
indexNameExpressionResolver.concreteIndexNames(projectMetadata, request)
);
}
@Override
protected void masterOperation(
final Task task,
final AddIndexBlockRequest request,
final ClusterState state,
final ActionListener<AddIndexBlockResponse> listener
) throws Exception {
final Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request);
if (concreteIndices == null || concreteIndices.length == 0) {
listener.onResponse(new AddIndexBlockResponse(true, false, Collections.emptyList()));
return;
}
indexStateService.addIndexBlock(
new AddIndexBlockClusterStateUpdateRequest(
request.masterNodeTimeout(),
request.ackTimeout(),
projectResolver.getProjectId(),
request.getBlock(),
request.markVerified(),
task.getId(),
concreteIndices
),
listener.delegateResponse((delegatedListener, t) -> {
logger.debug(() -> "failed to mark indices as readonly [" + Arrays.toString(concreteIndices) + "]", t);
delegatedListener.onFailure(t);
})
);
}
}
| TransportAddIndexBlockAction |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/language/XPathFunctionTest.java | {
"start": 1273,
"end": 3909
} | class ____ extends ContextTestSupport {
protected MockEndpoint x;
protected MockEndpoint y;
protected MockEndpoint z;
protected MockEndpoint end;
@Test
public void testCheckHeader() throws Exception {
String body = "<one/>";
x.expectedBodiesReceived(body);
// The SpringChoiceTest.java can't setup the header by Spring configure
// file
// x.expectedHeaderReceived("name", "a");
expectsMessageCount(0, y, z);
sendMessage("bar", body);
assertMockEndpointsSatisfied();
}
@Test
public void testCheckBody() throws Exception {
String body = "<two/>";
y.expectedBodiesReceived(body);
expectsMessageCount(0, x, z);
sendMessage("cheese", body);
assertMockEndpointsSatisfied();
}
@Test
public void testSetXpathProperty() throws Exception {
String body
= "<soapenv:Body xmlns:ns=\"http://myNamespace\" xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\">"
+ "<ns:Addresses> <Address>address1</Address>"
+ " <Address>address2</Address> <Address>address3</Address>"
+ " <Address>address4</Address> </ns:Addresses> </soapenv:Body>";
end.reset();
end.expectedMessageCount(1);
template.sendBody("direct:setProperty", body);
assertMockEndpointsSatisfied();
Exchange exchange = end.getExchanges().get(0);
NodeList nodeList = exchange.getProperty("Addresses", NodeList.class);
assertNotNull(nodeList, "The node list should not be null");
}
protected void sendMessage(final Object headerValue, final Object body) {
template.sendBodyAndHeader("direct:start", body, "foo", headerValue);
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
x = getMockEndpoint("mock:x");
y = getMockEndpoint("mock:y");
z = getMockEndpoint("mock:z");
end = getMockEndpoint("mock:end");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// START SNIPPET: ex
from("direct:start").choice().when().xpath("in:header('foo') = 'bar'").to("mock:x").when()
.xpath("in:body() = '<two/>'").to("mock:y").otherwise().to("mock:z");
// END SNIPPET: ex
from("direct:setProperty").setProperty("Addresses").xpath("//Address", NodeList.class).to("mock:end");
}
};
}
}
| XPathFunctionTest |
java | elastic__elasticsearch | x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDoc.java | {
"start": 1723,
"end": 8072
} | class ____ extends MonitoringDoc {
private static final ToXContent.MapParams CLUSTER_STATS_PARAMS = new ToXContent.MapParams(
Collections.singletonMap(
"metric",
ClusterState.Metric.VERSION + "," + ClusterState.Metric.MASTER_NODE + "," + ClusterState.Metric.NODES
)
);
public static final String TYPE = "cluster_stats";
protected static final String SETTING_DISPLAY_NAME = "cluster.metadata.display_name";
private final String clusterName;
private final String version;
private final License license;
private final boolean apmIndicesExist;
private final List<XPackFeatureUsage> usages;
private final ClusterStatsResponse clusterStats;
private final ClusterState clusterState;
private final ClusterHealthStatus status;
private final boolean clusterNeedsTLSEnabled;
ClusterStatsMonitoringDoc(
final String cluster,
final long timestamp,
final long intervalMillis,
final MonitoringDoc.Node node,
final String clusterName,
final String version,
final ClusterHealthStatus status,
@Nullable final License license,
final boolean apmIndicesExist,
@Nullable final List<XPackFeatureUsage> usages,
@Nullable final ClusterStatsResponse clusterStats,
@Nullable final ClusterState clusterState,
final boolean clusterNeedsTLSEnabled
) {
super(cluster, timestamp, intervalMillis, node, MonitoredSystem.ES, TYPE, null);
this.clusterName = Objects.requireNonNull(clusterName);
this.version = Objects.requireNonNull(version);
this.status = Objects.requireNonNull(status);
this.license = license;
this.apmIndicesExist = apmIndicesExist;
this.usages = usages;
this.clusterStats = clusterStats;
this.clusterState = clusterState;
this.clusterNeedsTLSEnabled = clusterNeedsTLSEnabled;
}
String getClusterName() {
return clusterName;
}
String getVersion() {
return version;
}
License getLicense() {
return license;
}
boolean getAPMIndicesExist() {
return apmIndicesExist;
}
List<XPackFeatureUsage> getUsages() {
return usages;
}
ClusterStatsResponse getClusterStats() {
return clusterStats;
}
ClusterState getClusterState() {
return clusterState;
}
ClusterHealthStatus getStatus() {
return status;
}
boolean getClusterNeedsTLSEnabled() {
return clusterNeedsTLSEnabled;
}
String getClusterDisplayName() {
Metadata metadata = this.clusterState.getMetadata();
if (metadata == null) {
return null;
}
return metadata.settings().get(SETTING_DISPLAY_NAME);
}
@Override
protected void innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("cluster_name", clusterName);
builder.field("version", version);
if (license != null) {
builder.startObject("license");
{
Map<String, String> extraParams = Map.of(License.REST_VIEW_MODE, "true");
params = new ToXContent.DelegatingMapParams(extraParams, params);
license.toInnerXContent(builder, params);
if (clusterNeedsTLSEnabled) {
builder.field("cluster_needs_tls", true);
}
}
builder.endObject();
}
if (clusterStats != null) {
builder.startObject("cluster_stats");
{
clusterStats.toXContent(builder, params);
}
builder.endObject();
}
if (clusterState != null) {
builder.startObject("cluster_state");
{
builder.field("nodes_hash", nodesHash(clusterState.nodes()));
builder.field("status", status.name().toLowerCase(Locale.ROOT));
// we need the whole doc in memory anyway so no need to preserve chunking here; moreover CLUSTER_STATS_PARAMS doesn't
// include anything heavy so this should be fine.
ChunkedToXContent.wrapAsToXContent(clusterState).toXContent(builder, CLUSTER_STATS_PARAMS);
}
builder.endObject();
}
String displayName = getClusterDisplayName();
if (displayName != null) {
builder.startObject("cluster_settings");
{
builder.startObject("cluster");
{
builder.startObject("metadata");
{
builder.field("display_name", displayName);
}
builder.endObject();
}
builder.endObject();
}
builder.endObject();
}
builder.startObject("stack_stats");
{
// in the future, it may be useful to pass in an object that represents APM (and others), but for now this
// is good enough
builder.startObject("apm");
{
builder.field("found", apmIndicesExist);
}
builder.endObject();
if (usages != null) {
builder.startObject("xpack");
for (final XPackFeatureUsage usage : usages) {
builder.field(usage.name(), usage);
}
builder.endObject();
}
}
builder.endObject();
}
/**
* Create a simple hash value that can be used to determine if the nodes listing has changed since the last report.
*
* @param nodes All nodes in the cluster state.
* @return A hash code value whose value can be used to determine if the node listing has changed (including node restarts).
*/
public static int nodesHash(final DiscoveryNodes nodes) {
final StringBuilder temp = new StringBuilder();
// adds the Ephemeral ID (as opposed to the Persistent UUID) to catch node restarts, which is critical for 1 node clusters
for (final DiscoveryNode node : nodes) {
temp.append(node.getEphemeralId());
}
return temp.toString().hashCode();
}
}
| ClusterStatsMonitoringDoc |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/producer/dependent/DeclaringBeanTest.java | {
"start": 774,
"end": 2302
} | class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(ListProducer.class, StringProducer.class,
LongProducer.class);
@SuppressWarnings("serial")
@Test
public void testDependendDestroyedProducerMethod() {
TypeLiteral<List<String>> literal = new TypeLiteral<List<String>>() {
};
assertFalse(ListProducer.DESTROYED.get());
List<String> list1 = Arc.container().instance(literal).get();
// @Dependent contextual instance created to receive a producer method is destroyed when the invocation completes
assertTrue(ListProducer.DESTROYED.get());
assertNotEquals(list1, Arc.container().instance(literal).get());
}
@Test
public void testDependendDestroyedProducerField() {
assertFalse(StringProducer.DESTROYED.get());
String string1 = Arc.container().instance(String.class).get();
// @Dependent contextual instance created to receive a producer method is destroyed when the invocation completes
assertTrue(StringProducer.DESTROYED.get());
assertNotEquals(string1, Arc.container().instance(String.class).get());
}
@Test
public void testSingletonNotDestroyed() {
assertFalse(LongProducer.DESTROYED.get());
Long long1 = Arc.container().instance(Long.class).get();
assertFalse(LongProducer.DESTROYED.get());
assertEquals(long1, Arc.container().instance(Long.class).get());
}
@Dependent
static | DeclaringBeanTest |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/convert/converters/MultiValuesConverterFactory.java | {
"start": 19016,
"end": 22691
} | class ____ extends AbstractConverterFromMultiValues<Map> {
public MultiValuesToMapConverter(ConversionService conversionService) {
super(conversionService);
}
@Override
protected Optional<Map> retrieveSeparatedValue(ArgumentConversionContext<Map> conversionContext,
String name,
ConvertibleMultiValues<String> parameters,
String defaultValue,
Character delimiter
) {
Map<String, String> values = getSeparatedMapParameters(parameters, name, defaultValue, delimiter);
return convertValues(conversionContext, values);
}
@Override
protected Optional<Map> retrieveMultiValue(ArgumentConversionContext<Map> conversionContext,
String name,
ConvertibleMultiValues<String> parameters
) {
Map<String, String> values = getMultiMapParameters(parameters);
return convertValues(conversionContext, values);
}
@Override
protected Optional<Map> retrieveDeepObjectValue(ArgumentConversionContext<Map> conversionContext,
String name,
ConvertibleMultiValues<String> parameters
) {
Map<String, String> values = getDeepObjectMapParameters(parameters, name);
return convertValues(conversionContext, values);
}
private Optional<Map> convertValues(ArgumentConversionContext<Map> context, Map<String, String> values) {
// There is no option to convert between maps
if (!context.getArgument().getType().isAssignableFrom(values.getClass())) {
return Optional.empty();
}
Argument<?>[] typeArguments = context.getTypeParameters();
Argument<?> keyArgument = typeArguments.length > 0 ? typeArguments[0] : Argument.OBJECT_ARGUMENT;
Argument<?> valueArgument = typeArguments.length > 1 ? typeArguments[1] : Argument.OBJECT_ARGUMENT;
Map convertedValues;
// Convert all the values
if (keyArgument.getType().isAssignableFrom(String.class) &&
valueArgument.getType().isAssignableFrom(String.class)) {
convertedValues = values;
} else {
ArgumentConversionContext<?> keyContext = ConversionContext.of(keyArgument);
ArgumentConversionContext<?> valueContext = ConversionContext.of(valueArgument);
convertedValues = new HashMap();
for (Map.Entry<String, String> entry: values.entrySet()) {
Object value = conversionService.convert(entry.getValue(), valueContext).orElse(null);
if (value == null) {
continue;
}
Object key = conversionService.convert(entry.getKey(), keyContext).orElse(null);
if (key == null) {
continue;
}
convertedValues.put(key, value);
}
}
// Convert the collection itself
return Optional.of(convertedValues);
}
}
/**
* A converter to convert from {@link ConvertibleMultiValues} to a POJO {@link Object}.
*/
public static | MultiValuesToMapConverter |
java | spring-projects__spring-boot | module/spring-boot-jackson/src/main/java/org/springframework/boot/jackson/autoconfigure/JacksonAutoConfiguration.java | {
"start": 11046,
"end": 12132
} | class ____ implements RuntimeHintsRegistrar {
@Override
public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
if (ClassUtils.isPresent("tools.jackson.databind.PropertyNamingStrategy", classLoader)) {
registerPropertyNamingStrategyHints(hints.reflection());
}
}
/**
* Register hints for the {@code configurePropertyNamingStrategyField} method to
* use.
* @param hints reflection hints
*/
private void registerPropertyNamingStrategyHints(ReflectionHints hints) {
registerPropertyNamingStrategyHints(hints, PropertyNamingStrategies.class);
}
private void registerPropertyNamingStrategyHints(ReflectionHints hints, Class<?> type) {
Stream.of(type.getDeclaredFields())
.filter(this::isPropertyNamingStrategyField)
.forEach(hints::registerField);
}
private boolean isPropertyNamingStrategyField(Field candidate) {
return ReflectionUtils.isPublicStaticFinal(candidate)
&& candidate.getType().isAssignableFrom(PropertyNamingStrategy.class);
}
}
abstract static | JacksonAutoConfigurationRuntimeHints |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/StringValueSerializer.java | {
"start": 1248,
"end": 3694
} | class ____ extends TypeSerializerSingleton<StringValue> {
private static final long serialVersionUID = 1L;
private static final int HIGH_BIT = 0x1 << 7;
public static final StringValueSerializer INSTANCE = new StringValueSerializer();
@Override
public boolean isImmutableType() {
return false;
}
@Override
public StringValue createInstance() {
return new StringValue();
}
@Override
public StringValue copy(StringValue from) {
return copy(from, new StringValue());
}
@Override
public StringValue copy(StringValue from, StringValue reuse) {
reuse.setValue(from);
return reuse;
}
@Override
public int getLength() {
return -1;
}
@Override
public void serialize(StringValue record, DataOutputView target) throws IOException {
record.write(target);
}
@Override
public StringValue deserialize(DataInputView source) throws IOException {
return deserialize(new StringValue(), source);
}
@Override
public StringValue deserialize(StringValue reuse, DataInputView source) throws IOException {
reuse.read(source);
return reuse;
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
int len = source.readUnsignedByte();
target.writeByte(len);
if (len >= HIGH_BIT) {
int shift = 7;
int curr;
len = len & 0x7f;
while ((curr = source.readUnsignedByte()) >= HIGH_BIT) {
target.writeByte(curr);
len |= (curr & 0x7f) << shift;
shift += 7;
}
target.writeByte(curr);
len |= curr << shift;
}
for (int i = 0; i < len; i++) {
int c = source.readUnsignedByte();
target.writeByte(c);
while (c >= HIGH_BIT) {
c = source.readUnsignedByte();
target.writeByte(c);
}
}
}
@Override
public TypeSerializerSnapshot<StringValue> snapshotConfiguration() {
return new StringValueSerializerSnapshot();
}
// ------------------------------------------------------------------------
/** Serializer configuration snapshot for compatibility and format evolution. */
@SuppressWarnings("WeakerAccess")
public static final | StringValueSerializer |
java | apache__kafka | server-common/src/main/java/org/apache/kafka/timeline/SnapshotRegistry.java | {
"start": 1651,
"end": 2451
} | class ____ implements Iterator<Snapshot> {
Snapshot cur;
Snapshot result = null;
SnapshotIterator(Snapshot start) {
cur = start;
}
@Override
public boolean hasNext() {
return cur != head;
}
@Override
public Snapshot next() {
result = cur;
cur = cur.next();
return result;
}
@Override
public void remove() {
if (result == null) {
throw new IllegalStateException();
}
deleteSnapshot(result);
result = null;
}
}
/**
* Iterate through the list of snapshots in reverse order of creation, such that
* the newest snapshot is first.
*/
| SnapshotIterator |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/runtime/src/main/java/io/quarkus/resteasy/runtime/AuthenticationCompletionExceptionMapper.java | {
"start": 304,
"end": 897
} | class ____ implements ExceptionMapper<AuthenticationCompletionException> {
private static final Logger log = Logger.getLogger(AuthenticationCompletionExceptionMapper.class.getName());
@Override
public Response toResponse(AuthenticationCompletionException ex) {
log.debug("Authentication has failed, returning HTTP status 401");
if (LaunchMode.current().isDev() && ex.getMessage() != null) {
return Response.status(401).entity(ex.getMessage()).build();
}
return Response.status(401).build();
}
}
| AuthenticationCompletionExceptionMapper |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/ClassUtils.java | {
"start": 41268,
"end": 41520
} | class ____ the package name or an empty string.
* @since 2.4
*/
public static String getShortCanonicalName(final String canonicalName) {
return getShortClassName(getCanonicalName(canonicalName));
}
/**
* Gets the | without |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java | {
"start": 57445,
"end": 62163
} | class ____ extends Writer {
private int noBufferedRecords = 0;
private DataOutputBuffer keyLenBuffer = new DataOutputBuffer();
private DataOutputBuffer keyBuffer = new DataOutputBuffer();
private DataOutputBuffer valLenBuffer = new DataOutputBuffer();
private DataOutputBuffer valBuffer = new DataOutputBuffer();
private final int compressionBlockSize;
BlockCompressWriter(Configuration conf,
Option... options) throws IOException {
super(conf, options);
compressionBlockSize =
conf.getInt(IO_SEQFILE_COMPRESS_BLOCKSIZE_KEY,
IO_SEQFILE_COMPRESS_BLOCKSIZE_DEFAULT
);
keySerializer.close();
keySerializer.open(keyBuffer);
uncompressedValSerializer.close();
uncompressedValSerializer.open(valBuffer);
}
/** Workhorse to check and write out compressed data/lengths */
private synchronized
void writeBuffer(DataOutputBuffer uncompressedDataBuffer)
throws IOException {
deflateFilter.resetState();
buffer.reset();
deflateOut.write(uncompressedDataBuffer.getData(), 0,
uncompressedDataBuffer.getLength());
deflateOut.flush();
deflateFilter.finish();
WritableUtils.writeVInt(out, buffer.getLength());
out.write(buffer.getData(), 0, buffer.getLength());
}
/** Compress and flush contents to dfs */
@Override
public synchronized void sync() throws IOException {
if (noBufferedRecords > 0) {
super.sync();
// No. of records
WritableUtils.writeVInt(out, noBufferedRecords);
// Write 'keys' and lengths
writeBuffer(keyLenBuffer);
writeBuffer(keyBuffer);
// Write 'values' and lengths
writeBuffer(valLenBuffer);
writeBuffer(valBuffer);
// Flush the file-stream
out.flush();
// Reset internal states
keyLenBuffer.reset();
keyBuffer.reset();
valLenBuffer.reset();
valBuffer.reset();
noBufferedRecords = 0;
}
}
/** Close the file. */
@Override
public synchronized void close() throws IOException {
if (out != null) {
sync();
}
super.close();
}
/** Append a key/value pair. */
@Override
@SuppressWarnings("unchecked")
public synchronized void append(Object key, Object val)
throws IOException {
if (key.getClass() != keyClass)
throw new IOException("wrong key class: "+key+" is not "+keyClass);
if (val.getClass() != valClass)
throw new IOException("wrong value class: "+val+" is not "+valClass);
// Save key/value into respective buffers
int oldKeyLength = keyBuffer.getLength();
keySerializer.serialize(key);
int keyLength = keyBuffer.getLength() - oldKeyLength;
if (keyLength < 0)
throw new IOException("negative length keys not allowed: " + key);
WritableUtils.writeVInt(keyLenBuffer, keyLength);
int oldValLength = valBuffer.getLength();
uncompressedValSerializer.serialize(val);
int valLength = valBuffer.getLength() - oldValLength;
WritableUtils.writeVInt(valLenBuffer, valLength);
// Added another key/value pair
++noBufferedRecords;
// Compress and flush?
int currentBlockSize = keyBuffer.getLength() + valBuffer.getLength();
if (currentBlockSize >= compressionBlockSize) {
sync();
}
}
/** Append a key/value pair. */
@Override
public synchronized void appendRaw(byte[] keyData, int keyOffset,
int keyLength, ValueBytes val) throws IOException {
if (keyLength < 0)
throw new IOException("negative length keys not allowed");
int valLength = val.getSize();
// Save key/value data in relevant buffers
WritableUtils.writeVInt(keyLenBuffer, keyLength);
keyBuffer.write(keyData, keyOffset, keyLength);
WritableUtils.writeVInt(valLenBuffer, valLength);
val.writeUncompressedBytes(valBuffer);
// Added another key/value pair
++noBufferedRecords;
// Compress and flush?
int currentBlockSize = keyBuffer.getLength() + valBuffer.getLength();
if (currentBlockSize >= compressionBlockSize) {
sync();
}
}
} // BlockCompressionWriter
/** Get the configured buffer size */
private static int getBufferSize(Configuration conf) {
return conf.getInt(IO_FILE_BUFFER_SIZE_KEY, IO_FILE_BUFFER_SIZE_DEFAULT);
}
/** Reads key/value pairs from a sequence-format file. */
public static | BlockCompressWriter |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionBypassCircuitBreakerOnReplicaIT.java | {
"start": 2141,
"end": 2491
} | class ____ extends ReplicationRequest<Request> {
public Request(ShardId shardId) {
super(shardId);
}
public Request(StreamInput in) throws IOException {
super(in);
}
@Override
public String toString() {
return "test-request";
}
}
public static | Request |
java | elastic__elasticsearch | x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamTask.java | {
"start": 971,
"end": 6667
} | class ____ extends AllocatedPersistentTask {
public static final String TASK_NAME = "reindex-data-stream";
private final ProjectId projectId;
private final ClusterService clusterService;
private final long persistentTaskStartTime;
private final int initialTotalIndices;
private final int initialTotalIndicesToBeUpgraded;
private boolean isCompleteLocally = false;
private volatile Exception exception;
private final Set<String> inProgress = Collections.synchronizedSet(new HashSet<>());
private final AtomicInteger pending = new AtomicInteger();
private final List<Tuple<String, Exception>> errors = Collections.synchronizedList(new ArrayList<>());
private final RunOnce completeTask;
@SuppressWarnings("this-escape")
public ReindexDataStreamTask(
ProjectId projectId,
ClusterService clusterService,
long persistentTaskStartTime,
int initialTotalIndices,
int initialTotalIndicesToBeUpgraded,
long id,
String type,
String action,
String description,
TaskId parentTask,
Map<String, String> headers
) {
super(id, type, action, description, parentTask, headers);
this.projectId = projectId;
this.clusterService = clusterService;
this.persistentTaskStartTime = persistentTaskStartTime;
this.initialTotalIndices = initialTotalIndices;
this.initialTotalIndicesToBeUpgraded = initialTotalIndicesToBeUpgraded;
this.pending.set(initialTotalIndicesToBeUpgraded);
this.completeTask = new RunOnce(() -> {
if (exception == null) {
markAsCompleted();
} else {
markAsFailed(exception);
}
});
}
@Override
public ReindexDataStreamStatus getStatus() {
int totalIndices = initialTotalIndices;
int totalIndicesToBeUpgraded = initialTotalIndicesToBeUpgraded;
final var projectMetadata = clusterService.state().metadata().getProject(projectId);
PersistentTasksCustomMetadata.PersistentTask<?> persistentTask = projectMetadata == null
? null
: PersistentTasksCustomMetadata.getTaskWithId(projectMetadata, getPersistentTaskId());
boolean isComplete;
if (persistentTask != null) {
ReindexDataStreamPersistentTaskState state = (ReindexDataStreamPersistentTaskState) persistentTask.getState();
if (state != null) {
isComplete = state.isComplete();
if (state.totalIndices() != null && state.totalIndicesToBeUpgraded() != null) {
totalIndices = Math.toIntExact(state.totalIndices());
totalIndicesToBeUpgraded = Math.toIntExact(state.totalIndicesToBeUpgraded());
}
} else {
isComplete = false;
}
} else {
isComplete = false;
}
return new ReindexDataStreamStatus(
persistentTaskStartTime,
totalIndices,
totalIndicesToBeUpgraded,
isComplete,
exception,
inProgress,
pending.get(),
errors
);
}
public void allReindexesCompleted(ThreadPool threadPool, TimeValue timeToLive) {
isCompleteLocally = true;
if (isCancelled()) {
completeTask.run();
} else {
threadPool.schedule(completeTask, timeToLive, threadPool.generic());
}
}
public void taskFailed(ThreadPool threadPool, TimeValue timeToLive, Exception e) {
this.exception = e;
allReindexesCompleted(threadPool, timeToLive);
}
public void reindexSucceeded(String index) {
inProgress.remove(index);
}
public void reindexFailed(String index, Exception error) {
this.errors.add(Tuple.tuple(index, error));
inProgress.remove(index);
}
public void incrementInProgressIndicesCount(String index) {
inProgress.add(index);
pending.decrementAndGet();
}
private boolean isCompleteInClusterState() {
final var projectMetadata = clusterService.state().metadata().getProject(projectId);
PersistentTasksCustomMetadata.PersistentTask<?> persistentTask = projectMetadata == null
? null
: PersistentTasksCustomMetadata.getTaskWithId(projectMetadata, getPersistentTaskId());
if (persistentTask != null) {
ReindexDataStreamPersistentTaskState state = (ReindexDataStreamPersistentTaskState) persistentTask.getState();
if (state != null) {
return state.isComplete();
} else {
return false;
}
} else {
return false;
}
}
public void setPendingIndicesCount(int size) {
pending.set(size);
}
@Override
public void onCancelled() {
/*
* If the task is complete, but just waiting for its scheduled removal, we go ahead and call markAsCompleted/markAsFailed
* immediately. This results in the running task being removed from the task manager. If the task is not complete, then one of
* allReindexesCompleted or taskFailed will be called in the future, resulting in the same thing.
* We check both the cluster state and isCompleteLocally -- it is possible (especially in tests) that hte cluster state
* update has not happened in between when allReindexesCompleted was called and when this is called.
*/
if (isCompleteInClusterState() || isCompleteLocally) {
completeTask.run();
}
}
}
| ReindexDataStreamTask |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/inject/writer/AbstractClassWriterOutputVisitor.java | {
"start": 1149,
"end": 5506
} | class ____ implements ClassWriterOutputVisitor {
private final Map<String, Set<String>> serviceDescriptors = new LinkedHashMap<>();
private final boolean isWriteOnFinish;
/**
* Default constructor.
* @param isWriteOnFinish Is this the eclipse compiler
*/
protected AbstractClassWriterOutputVisitor(boolean isWriteOnFinish) {
this.isWriteOnFinish = isWriteOnFinish;
}
/**
* Compatibility constructor.
*/
public AbstractClassWriterOutputVisitor() {
this.isWriteOnFinish = false;
}
@Override
public final Map<String, Set<String>> getServiceEntries() {
return serviceDescriptors;
}
@Override
public final void visitServiceDescriptor(String type, String classname) {
if (StringUtils.isNotEmpty(type) && StringUtils.isNotEmpty(classname)) {
serviceDescriptors.computeIfAbsent(type, s -> new LinkedHashSet<>()).add(classname);
}
}
@Override
public final void finish() {
// for Java, we only write out service entries for the Eclipse compiler because
// for javac we support incremental compilation via ServiceDescriptionProcessor
// this approach doesn't work in Eclipse.
// see https://bugs.eclipse.org/bugs/show_bug.cgi?id=567116
// If the above issue is fixed then this workaround can be removed
// for Groovy writing service entries is also required as ServiceDescriptionProcessor
// is not triggered. See DirectoryClassWriterOutputVisitor
if (isWriteOnFinish) {
Map<String, Set<String>> serviceEntries = getServiceEntries();
writeServiceEntries(serviceEntries);
}
}
/**
* Writes the service entries.
*
* @param serviceEntries The service entries
* @param originatingElements The originating elements
*/
public void writeServiceEntries(Map<String, Set<String>> serviceEntries, Element... originatingElements) {
for (Map.Entry<String, Set<String>> entry : serviceEntries.entrySet()) {
String serviceName = entry.getKey();
Set<String> serviceTypes = new TreeSet<>(entry.getValue());
Optional<GeneratedFile> serviceFile = visitMetaInfFile("services/" + serviceName, originatingElements);
if (serviceFile.isPresent()) {
GeneratedFile generatedFile = serviceFile.get();
// add the existing definitions
try (BufferedReader bufferedReader = new BufferedReader(generatedFile.openReader())) {
String line = bufferedReader.readLine();
while (line != null) {
serviceTypes.add(line);
line = bufferedReader.readLine();
}
} catch (FileNotFoundException | java.nio.file.NoSuchFileException x) {
// doesn't exist
} catch (IOException x) {
Throwable cause = x.getCause();
if (isNotEclipseNotFound(cause)) {
throw new ClassGenerationException("Failed to load existing service definition files: " + x, x);
}
} catch (Throwable e) {
// horrible hack to support Eclipse
if (isNotEclipseNotFound(e)) {
throw new ClassGenerationException("Failed to load existing service definition files: " + e, e);
}
}
// write out new definitions
try (BufferedWriter writer = new BufferedWriter(generatedFile.openWriter())) {
for (String serviceType : serviceTypes) {
writer.write(serviceType);
writer.newLine();
}
} catch (IOException x) {
throw new ClassGenerationException("Failed to open writer for service definition files: " + x);
}
}
}
}
private boolean isNotEclipseNotFound(Throwable e) {
if (isWriteOnFinish) {
return false;
}
String message = e.getMessage();
return !message.contains("does not exist") || !e.getClass().getName().startsWith("org.eclipse");
}
}
| AbstractClassWriterOutputVisitor |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/pool/DruidDataSourceTest_autocommit4.java | {
"start": 437,
"end": 1412
} | class ____ extends TestCase {
private DruidDataSource dataSource;
protected void setUp() throws Exception {
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setTestOnBorrow(false);
dataSource.setDefaultAutoCommit(true);
dataSource.setInitialSize(1);
dataSource.getProxyFilters().add(new FilterAdapter() {
public ConnectionProxy connection_connect(FilterChain chain, Properties info) throws SQLException {
ConnectionProxy conn = chain.connection_connect(info);
conn.setAutoCommit(true);
return conn;
}
});
}
protected void tearDown() throws Exception {
dataSource.close();
}
public void test_autoCommit() throws Exception {
Connection conn = dataSource.getConnection();
assertTrue(conn.getAutoCommit());
conn.close();
}
}
| DruidDataSourceTest_autocommit4 |
java | apache__kafka | connect/runtime/src/test/resources/test-plugins/bad-packaging/test/plugins/NoDefaultConstructorConnector.java | {
"start": 1206,
"end": 1794
} | class ____ extends SinkConnector {
public NoDefaultConstructorConnector(int ignored) {
}
@Override
public String version() {
return null;
}
@Override
public void start(Map<String, String> props) {
}
@Override
public Class<? extends Task> taskClass() {
return null;
}
@Override
public List<Map<String, String>> taskConfigs(int maxTasks) {
return null;
}
@Override
public void stop() {
}
@Override
public ConfigDef config() {
return null;
}
}
| NoDefaultConstructorConnector |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableSkipLastTimed.java | {
"start": 1015,
"end": 1764
} | class ____<T> extends AbstractFlowableWithUpstream<T, T> {
final long time;
final TimeUnit unit;
final Scheduler scheduler;
final int bufferSize;
final boolean delayError;
public FlowableSkipLastTimed(Flowable<T> source, long time, TimeUnit unit, Scheduler scheduler, int bufferSize, boolean delayError) {
super(source);
this.time = time;
this.unit = unit;
this.scheduler = scheduler;
this.bufferSize = bufferSize;
this.delayError = delayError;
}
@Override
protected void subscribeActual(Subscriber<? super T> s) {
source.subscribe(new SkipLastTimedSubscriber<>(s, time, unit, scheduler, bufferSize, delayError));
}
static final | FlowableSkipLastTimed |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/common/convert/StringToCharArrayConverter.java | {
"start": 924,
"end": 1009
} | class ____ convert {@link String} to <code>char[]</code>
*
* @since 2.7.6
*/
public | to |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/over/frame/RowUnboundedPrecedingOverFrame.java | {
"start": 1386,
"end": 2507
} | class ____ extends UnboundedPrecedingOverFrame {
private long rightBound;
/** Index of the right bound input row. */
private long inputRightIndex = 0;
public RowUnboundedPrecedingOverFrame(
GeneratedAggsHandleFunction aggsHandleFunction, long rightBound) {
super(aggsHandleFunction);
this.rightBound = rightBound;
}
@Override
public void prepare(ResettableExternalBuffer rows) throws Exception {
super.prepare(rows);
inputRightIndex = 0;
}
@Override
public RowData process(int index, RowData current) throws Exception {
boolean bufferUpdated = index == 0;
// Add all rows to the aggregates util right bound.
while (nextRow != null && inputRightIndex <= index + rightBound) {
processor.accumulate(nextRow);
nextRow = OverWindowFrame.getNextOrNull(inputIterator);
inputRightIndex += 1;
bufferUpdated = true;
}
if (bufferUpdated) {
accValue = processor.getValue();
}
return accValue;
}
}
| RowUnboundedPrecedingOverFrame |
java | spring-projects__spring-boot | core/spring-boot-test/src/test/java/org/springframework/boot/test/context/PropertyMappingContextCustomizerFactoryTests.java | {
"start": 5471,
"end": 5547
} | class ____ {
}
@AttributeMappingAnnotation("Other")
static | AttributeMapping |
java | apache__camel | components/camel-ai/camel-tensorflow-serving/src/generated/java/org/apache/camel/component/tensorflow/serving/TensorFlowServingConfigurationConfigurer.java | {
"start": 756,
"end": 3733
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.component.tensorflow.serving.TensorFlowServingConfiguration target = (org.apache.camel.component.tensorflow.serving.TensorFlowServingConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "credentials": target.setCredentials(property(camelContext, io.grpc.ChannelCredentials.class, value)); return true;
case "modelname":
case "modelName": target.setModelName(property(camelContext, java.lang.String.class, value)); return true;
case "modelversion":
case "modelVersion": target.setModelVersion(property(camelContext, java.lang.Long.class, value)); return true;
case "modelversionlabel":
case "modelVersionLabel": target.setModelVersionLabel(property(camelContext, java.lang.String.class, value)); return true;
case "signaturename":
case "signatureName": target.setSignatureName(property(camelContext, java.lang.String.class, value)); return true;
case "target": target.setTarget(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "credentials": return io.grpc.ChannelCredentials.class;
case "modelname":
case "modelName": return java.lang.String.class;
case "modelversion":
case "modelVersion": return java.lang.Long.class;
case "modelversionlabel":
case "modelVersionLabel": return java.lang.String.class;
case "signaturename":
case "signatureName": return java.lang.String.class;
case "target": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.component.tensorflow.serving.TensorFlowServingConfiguration target = (org.apache.camel.component.tensorflow.serving.TensorFlowServingConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "credentials": return target.getCredentials();
case "modelname":
case "modelName": return target.getModelName();
case "modelversion":
case "modelVersion": return target.getModelVersion();
case "modelversionlabel":
case "modelVersionLabel": return target.getModelVersionLabel();
case "signaturename":
case "signatureName": return target.getSignatureName();
case "target": return target.getTarget();
default: return null;
}
}
}
| TensorFlowServingConfigurationConfigurer |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/format/support/FormattingConversionService.java | {
"start": 5070,
"end": 5799
} | class ____ the <T> generic type?");
return fieldType;
}
@SuppressWarnings("unchecked")
static Class<? extends Annotation> getAnnotationType(AnnotationFormatterFactory<? extends Annotation> factory) {
Class<? extends Annotation> annotationType = (Class<? extends Annotation>)
GenericTypeResolver.resolveTypeArgument(factory.getClass(), AnnotationFormatterFactory.class);
if (annotationType == null) {
throw new IllegalArgumentException("Unable to extract parameterized Annotation type argument from " +
"AnnotationFormatterFactory [" + factory.getClass().getName() +
"]; does the factory parameterize the <A extends Annotation> generic type?");
}
return annotationType;
}
private static | parameterize |
java | apache__flink | flink-libraries/flink-state-processing-api/src/test/java/org/apache/flink/state/api/input/UnionStateInputFormatTest.java | {
"start": 2167,
"end": 4036
} | class ____ {
private static ListStateDescriptor<Integer> descriptor =
new ListStateDescriptor<>("state", Types.INT);
@Test
public void testReadUnionOperatorState() throws Exception {
try (OneInputStreamOperatorTestHarness<Integer, Void> testHarness = getTestHarness()) {
testHarness.open();
testHarness.processElement(1, 0);
testHarness.processElement(2, 0);
testHarness.processElement(3, 0);
OperatorSubtaskState subtaskState = testHarness.snapshot(0, 0);
OperatorState state =
new OperatorState(null, null, OperatorIDGenerator.fromUid("uid"), 1, 4);
state.putState(0, subtaskState);
OperatorStateInputSplit split =
new OperatorStateInputSplit(subtaskState.getManagedOperatorState(), 0);
UnionStateInputFormat<Integer> format =
new UnionStateInputFormat<>(
state, new Configuration(), null, descriptor, new ExecutionConfig());
format.setRuntimeContext(new MockStreamingRuntimeContext(1, 0));
format.open(split);
List<Integer> results = new ArrayList<>();
while (!format.reachedEnd()) {
results.add(format.nextRecord(0));
}
results.sort(Comparator.naturalOrder());
Assert.assertEquals(
"Failed to read correct list state from state backend",
Arrays.asList(1, 2, 3),
results);
}
}
private OneInputStreamOperatorTestHarness<Integer, Void> getTestHarness() throws Exception {
return new OneInputStreamOperatorTestHarness<>(
new StreamFlatMap<>(new StatefulFunction()), IntSerializer.INSTANCE);
}
static | UnionStateInputFormatTest |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java | {
"start": 27890,
"end": 28196
} | class ____ {",
" private Baz() {}",
"",
" public abstract int buh();",
"}");
Compilation compilation =
javac().withProcessors(new AutoValueProcessor()).compile(javaFileObject);
assertThat(compilation)
.hadErrorContaining("@AutoValue | Baz |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/window/groupwindow/triggers/ProcessingTimeTriggers.java | {
"start": 6038,
"end": 6488
} | class ____ implements ReduceFunction<Long> {
private static final long serialVersionUID = 1L;
@Override
public Long reduce(Long value1, Long value2) throws Exception {
return Math.min(value1, value2);
}
}
}
/**
* A {@link Trigger} that fires once the current system time passes the end of the window to
* which a pane belongs.
*/
public static final | Min |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/elementCollection/CollectionElementConversionTest.java | {
"start": 1238,
"end": 2264
} | class ____ {
@Test
public void testElementCollectionConversion(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
Customer customer = new Customer();
customer.id = 1;
customer.set = new HashSet<>();
customer.set.add(Color.RED);
customer.set.add(Color.GREEN);
customer.set.add(Color.BLUE);
customer.map = new HashMap<>();
customer.map.put(Color.RED, Status.INACTIVE);
customer.map.put(Color.GREEN, Status.ACTIVE);
customer.map.put(Color.BLUE, Status.PENDING);
session.persist(customer);
}
);
scope.inTransaction(
(session) -> {
final Customer customer = session.find( Customer.class, 1 );
assertEquals( customer.set, customer.set );
assertEquals( customer.map, customer.map );
}
);
}
@AfterEach
public void dropTestData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Entity( name = "Customer" )
@Table(name = "Customer")
public static | CollectionElementConversionTest |
java | grpc__grpc-java | xds/src/generated/thirdparty/grpc/io/envoyproxy/envoy/service/load_stats/v3/LoadReportingServiceGrpc.java | {
"start": 14518,
"end": 15098
} | class ____
extends io.grpc.stub.AbstractFutureStub<LoadReportingServiceFutureStub> {
private LoadReportingServiceFutureStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected LoadReportingServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new LoadReportingServiceFutureStub(channel, callOptions);
}
}
private static final int METHODID_STREAM_LOAD_STATS = 0;
private static final | LoadReportingServiceFutureStub |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/InjectedConstructorAnnotationsTest.java | {
"start": 2362,
"end": 3050
} | class ____ {
@TestBindingAnnotation
@Inject(optional = true)
// BUG: Diagnostic contains:
public TestClass3() {}
}
}
""")
.doTest();
}
@Test
public void negativeCase() {
compilationHelper
.addSourceLines(
"InjectedConstructorAnnotationsNegativeCases.java",
"""
package com.google.errorprone.bugpatterns.inject.testdata;
import com.google.inject.BindingAnnotation;
import com.google.inject.Inject;
/** A negative test case for InjectedConstructorAnnotation. */
public | TestClass3 |
java | google__gson | gson/src/test/java/com/google/gson/functional/Java17RecordTest.java | {
"start": 1730,
"end": 16655
} | class ____ {
private final Gson gson = new Gson();
@Test
public void testFirstNameIsChosenForSerialization() {
RecordWithCustomNames target = new RecordWithCustomNames("v1", "v2");
// Ensure name1 occurs exactly once, and name2 and name3 don't appear
assertThat(gson.toJson(target)).isEqualTo("{\"name\":\"v1\",\"name1\":\"v2\"}");
}
@Test
public void testMultipleNamesDeserializedCorrectly() {
assertThat(gson.fromJson("{'name':'v1'}", RecordWithCustomNames.class).a).isEqualTo("v1");
// Both name1 and name2 gets deserialized to b
assertThat(gson.fromJson("{'name': 'v1', 'name1':'v11'}", RecordWithCustomNames.class).b)
.isEqualTo("v11");
assertThat(gson.fromJson("{'name': 'v1', 'name2':'v2'}", RecordWithCustomNames.class).b)
.isEqualTo("v2");
assertThat(gson.fromJson("{'name': 'v1', 'name3':'v3'}", RecordWithCustomNames.class).b)
.isEqualTo("v3");
}
@Test
public void testMultipleNamesInTheSameString() {
// The last value takes precedence
assertThat(
gson.fromJson(
"{'name': 'foo', 'name1':'v1','name2':'v2','name3':'v3'}",
RecordWithCustomNames.class)
.b)
.isEqualTo("v3");
}
private record RecordWithCustomNames(
@SerializedName("name") String a,
@SerializedName(
value = "name1",
alternate = {"name2", "name3"})
String b) {}
@Test
public void testSerializedNameOnAccessor() {
record LocalRecord(int i) {
@SerializedName("a")
@Override
@SuppressWarnings("UnusedMethod")
public int i() {
return i;
}
}
var exception = assertThrows(JsonIOException.class, () -> gson.getAdapter(LocalRecord.class));
assertThat(exception)
.hasMessageThat()
.isEqualTo(
"@SerializedName on method '" + LocalRecord.class.getName() + "#i()' is not supported");
}
@Test
public void testFieldNamingStrategy() {
record LocalRecord(int i) {}
Gson gson = new GsonBuilder().setFieldNamingStrategy(f -> f.getName() + "-custom").create();
assertThat(gson.toJson(new LocalRecord(1))).isEqualTo("{\"i-custom\":1}");
assertThat(gson.fromJson("{\"i-custom\":2}", LocalRecord.class)).isEqualTo(new LocalRecord(2));
}
@Test
public void testUnknownJsonProperty() {
record LocalRecord(int i) {}
// Unknown property 'x' should be ignored
assertThat(gson.fromJson("{\"i\":1,\"x\":2}", LocalRecord.class)).isEqualTo(new LocalRecord(1));
}
@Test
public void testDuplicateJsonProperties() {
record LocalRecord(Integer a, Integer b) {}
String json = "{\"a\":null,\"a\":2,\"b\":1,\"b\":null}";
// Should use value of last occurrence
assertThat(gson.fromJson(json, LocalRecord.class)).isEqualTo(new LocalRecord(2, null));
}
@Test
public void testConstructorRuns() {
record LocalRecord(String s) {
LocalRecord {
s = "custom-" + s;
}
}
LocalRecord deserialized = gson.fromJson("{\"s\": null}", LocalRecord.class);
assertThat(deserialized).isEqualTo(new LocalRecord(null));
assertThat(deserialized.s()).isEqualTo("custom-null");
}
/** Tests behavior when the canonical constructor throws an exception */
@Test
public void testThrowingConstructor() {
record LocalRecord(String s) {
@SuppressWarnings("StaticAssignmentOfThrowable")
static final RuntimeException thrownException = new RuntimeException("Custom exception");
@SuppressWarnings("unused")
LocalRecord {
throw thrownException;
}
}
// TODO: Adjust this once Gson throws more specific exception type
var e =
assertThrows(
RuntimeException.class, () -> gson.fromJson("{\"s\":\"value\"}", LocalRecord.class));
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Failed to invoke constructor '"
+ LocalRecord.class.getName()
+ "(String)' with args [value]");
assertThat(e).hasCauseThat().isSameInstanceAs(LocalRecord.thrownException);
}
@Test
public void testAccessorIsCalled() {
record LocalRecord(String s) {
@Override
@SuppressWarnings("UnusedMethod")
public String s() {
return "accessor-value";
}
}
assertThat(gson.toJson(new LocalRecord(null))).isEqualTo("{\"s\":\"accessor-value\"}");
}
/** Tests behavior when a record accessor method throws an exception */
@Test
public void testThrowingAccessor() {
record LocalRecord(String s) {
@SuppressWarnings("StaticAssignmentOfThrowable")
static final RuntimeException thrownException = new RuntimeException("Custom exception");
@Override
@SuppressWarnings("UnusedMethod")
public String s() {
throw thrownException;
}
}
var e = assertThrows(JsonIOException.class, () -> gson.toJson(new LocalRecord("a")));
assertThat(e)
.hasMessageThat()
.isEqualTo("Accessor method '" + LocalRecord.class.getName() + "#s()' threw exception");
assertThat(e).hasCauseThat().isSameInstanceAs(LocalRecord.thrownException);
}
/** Tests behavior for a record without components */
@Test
public void testEmptyRecord() {
record EmptyRecord() {}
assertThat(gson.toJson(new EmptyRecord())).isEqualTo("{}");
assertThat(gson.fromJson("{}", EmptyRecord.class)).isEqualTo(new EmptyRecord());
}
/**
* Tests behavior when {@code null} is serialized / deserialized as record value; basically makes
* sure the adapter is 'null-safe'
*/
@Test
public void testRecordNull() throws IOException {
record LocalRecord(int i) {}
TypeAdapter<LocalRecord> adapter = gson.getAdapter(LocalRecord.class);
assertThat(adapter.toJson(null)).isEqualTo("null");
assertThat(adapter.fromJson("null")).isNull();
}
@Test
public void testPrimitiveDefaultValues() {
RecordWithPrimitives expected =
new RecordWithPrimitives("s", (byte) 0, (short) 0, 0, 0, 0, 0, '\0', false);
assertThat(gson.fromJson("{'aString': 's'}", RecordWithPrimitives.class)).isEqualTo(expected);
}
@Test
public void testPrimitiveJsonNullValue() {
String s = "{'aString': 's', 'aByte': null, 'aShort': 0}";
var e =
assertThrows(JsonParseException.class, () -> gson.fromJson(s, RecordWithPrimitives.class));
assertThat(e)
.hasMessageThat()
.isEqualTo(
"null is not allowed as value for record component 'aByte' of primitive type; at path"
+ " $.aByte");
}
/**
* Tests behavior when JSON contains non-null value, but custom adapter returns null for primitive
* component
*/
@Test
public void testPrimitiveAdapterNullValue() {
Gson gson =
new GsonBuilder()
.registerTypeAdapter(
byte.class,
new TypeAdapter<Byte>() {
@Override
public Byte read(JsonReader in) throws IOException {
in.skipValue();
// Always return null
return null;
}
@Override
public void write(JsonWriter out, Byte value) {
throw new AssertionError("not needed for test");
}
})
.create();
String s = "{'aString': 's', 'aByte': 0}";
var exception =
assertThrows(JsonParseException.class, () -> gson.fromJson(s, RecordWithPrimitives.class));
assertThat(exception)
.hasMessageThat()
.isEqualTo(
"null is not allowed as value for record component 'aByte' of primitive type; at path"
+ " $.aByte");
}
private record RecordWithPrimitives(
String aString,
byte aByte,
short aShort,
int anInt,
long aLong,
float aFloat,
double aDouble,
char aChar,
boolean aBoolean) {}
/** Tests behavior when value of Object component is missing; should default to null */
@Test
public void testObjectDefaultValue() {
record LocalRecord(String s, int i) {}
assertThat(gson.fromJson("{\"i\":1}", LocalRecord.class)).isEqualTo(new LocalRecord(null, 1));
}
/**
* Tests serialization of a record with {@code static} field.
*
* <p>Important: It is not documented that this is officially supported; this test just checks the
* current behavior.
*/
@Test
public void testStaticFieldSerialization() {
// By default Gson should ignore static fields
assertThat(gson.toJson(new RecordWithStaticField())).isEqualTo("{}");
Gson gson =
new GsonBuilder()
// Include static fields
.excludeFieldsWithModifiers(0)
.create();
String json = gson.toJson(new RecordWithStaticField());
assertThat(json).isEqualTo("{\"s\":\"initial\"}");
}
/**
* Tests deserialization of a record with {@code static} field.
*
* <p>Important: It is not documented that this is officially supported; this test just checks the
* current behavior.
*/
@Test
public void testStaticFieldDeserialization() {
// By default Gson should ignore static fields
RecordWithStaticField unused = gson.fromJson("{\"s\":\"custom\"}", RecordWithStaticField.class);
assertThat(RecordWithStaticField.s).isEqualTo("initial");
Gson gson =
new GsonBuilder()
// Include static fields
.excludeFieldsWithModifiers(0)
.create();
String oldValue = RecordWithStaticField.s;
try {
RecordWithStaticField obj = gson.fromJson("{\"s\":\"custom\"}", RecordWithStaticField.class);
assertThat(obj).isNotNull();
// Currently record deserialization always ignores static fields
assertThat(RecordWithStaticField.s).isEqualTo("initial");
} finally {
RecordWithStaticField.s = oldValue;
}
}
private record RecordWithStaticField() {
@SuppressWarnings("NonFinalStaticField")
static String s = "initial";
}
@Test
public void testExposeAnnotation() {
record RecordWithExpose(@Expose int a, int b) {}
Gson gson = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().create();
String json = gson.toJson(new RecordWithExpose(1, 2));
assertThat(json).isEqualTo("{\"a\":1}");
}
@Test
public void testFieldExclusionStrategy() {
record LocalRecord(int a, int b, double c) {}
Gson gson =
new GsonBuilder()
.setExclusionStrategies(
new ExclusionStrategy() {
@Override
public boolean shouldSkipField(FieldAttributes f) {
return f.getName().equals("a");
}
@Override
public boolean shouldSkipClass(Class<?> clazz) {
return clazz == double.class;
}
})
.create();
assertThat(gson.toJson(new LocalRecord(1, 2, 3.0))).isEqualTo("{\"b\":2}");
}
@Test
public void testJsonAdapterAnnotation() {
record Adapter() implements JsonSerializer<String>, JsonDeserializer<String> {
@Override
public String deserialize(
JsonElement json, Type typeOfT, JsonDeserializationContext context) {
return "deserializer-" + json.getAsString();
}
@Override
public JsonElement serialize(String src, Type typeOfSrc, JsonSerializationContext context) {
return new JsonPrimitive("serializer-" + src);
}
}
record LocalRecord(@JsonAdapter(Adapter.class) String s) {}
assertThat(gson.toJson(new LocalRecord("a"))).isEqualTo("{\"s\":\"serializer-a\"}");
assertThat(gson.fromJson("{\"s\":\"a\"}", LocalRecord.class))
.isEqualTo(new LocalRecord("deserializer-a"));
}
@Test
public void testClassReflectionFilter() {
record Allowed(int a) {}
record Blocked(int b) {}
Gson gson =
new GsonBuilder()
.addReflectionAccessFilter(
c -> c == Allowed.class ? FilterResult.ALLOW : FilterResult.BLOCK_ALL)
.create();
String json = gson.toJson(new Allowed(1));
assertThat(json).isEqualTo("{\"a\":1}");
var exception = assertThrows(JsonIOException.class, () -> gson.toJson(new Blocked(1)));
assertThat(exception)
.hasMessageThat()
.isEqualTo(
"ReflectionAccessFilter does not permit using reflection for class "
+ Blocked.class.getName()
+ ". Register a TypeAdapter for this type or adjust the access filter.");
}
@Test
public void testReflectionFilterBlockInaccessible() {
Gson gson =
new GsonBuilder().addReflectionAccessFilter(c -> FilterResult.BLOCK_INACCESSIBLE).create();
var exception = assertThrows(JsonIOException.class, () -> gson.toJson(new PrivateRecord(1)));
assertThat(exception)
.hasMessageThat()
.isEqualTo(
"Constructor 'com.google.gson.functional.Java17RecordTest$PrivateRecord(int)' is not"
+ " accessible and ReflectionAccessFilter does not permit making it accessible."
+ " Register a TypeAdapter for the declaring type, adjust the access filter or"
+ " increase the visibility of the element and its declaring type.");
exception = assertThrows(JsonIOException.class, () -> gson.fromJson("{}", PrivateRecord.class));
assertThat(exception)
.hasMessageThat()
.isEqualTo(
"Constructor 'com.google.gson.functional.Java17RecordTest$PrivateRecord(int)' is not"
+ " accessible and ReflectionAccessFilter does not permit making it accessible."
+ " Register a TypeAdapter for the declaring type, adjust the access filter or"
+ " increase the visibility of the element and its declaring type.");
assertThat(gson.toJson(new PublicRecord(1))).isEqualTo("{\"i\":1}");
assertThat(gson.fromJson("{\"i\":2}", PublicRecord.class)).isEqualTo(new PublicRecord(2));
}
private record PrivateRecord(int i) {}
public record PublicRecord(int i) {}
/**
* Tests behavior when {@code java.lang.Record} is used as type for serialization and
* deserialization.
*/
@Test
public void testRecordBaseClass() {
record LocalRecord(int i) {}
assertThat(gson.toJson(new LocalRecord(1), Record.class)).isEqualTo("{}");
var exception = assertThrows(JsonIOException.class, () -> gson.fromJson("{}", Record.class));
assertThat(exception)
.hasMessageThat()
.isEqualTo(
"Abstract classes can't be instantiated! Adjust the R8 configuration or register an"
+ " InstanceCreator or a TypeAdapter for this type. Class name: java.lang.Record\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#r8-abstract-class");
}
}
| Java17RecordTest |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/elementcollection/House.java | {
"start": 348,
"end": 655
} | class ____ {
private Map<String, Room> roomsByName;
@ElementCollection(targetClass = Room.class)
@MapKeyColumn(name = "room_name")
public Map<String, Room> getRoomsByName() {
return roomsByName;
}
public void setRoomsByName(Map<String, Room> roomsByName) {
this.roomsByName = roomsByName;
}
}
| House |
java | apache__camel | components/camel-cxf/camel-cxf-spring-soap/src/test/java/org/apache/camel/component/cxf/ssl/SslTest.java | {
"start": 1616,
"end": 4347
} | class ____ extends CamelSpringTestSupport {
protected static final String GREET_ME_OPERATION = "greetMe";
protected static final String TEST_MESSAGE = "Hello World!";
protected static final String JAXWS_SERVER_ADDRESS
= "https://localhost:" + CXFTestSupport.getPort1() + "/CxfSslTest/SoapContext/SoapPort";
@AfterAll
public static void cleanUp() {
//System.clearProperty("cxf.config.file");
}
@BeforeAll
public static void startService() {
//System.getProperties().put("cxf.config.file", "/org/apache/camel/component/cxf/CxfSslContext.xml");
//Greeter implementor = new GreeterImpl();
//Endpoint.publish(JAXWS_SERVER_ADDRESS, implementor);
}
@Test
public void testInvokingTrustRoute() throws Exception {
Exchange reply = sendJaxWsMessage("direct:trust");
if (reply.isFailed()) {
Exception exception = reply.getException();
String msg = exception.getMessage();
if (msg.contains("socket reset for TTL")) {
// ignore flaky test on JDK11
return;
}
}
assertFalse(reply.isFailed(), "We expect no exception here");
}
@Test
public void testInvokingNoTrustRoute() throws Exception {
Exchange reply = sendJaxWsMessage("direct:noTrust");
assertTrue(reply.isFailed(), "We expect the exception here");
Throwable e = reply.getException().getCause();
assertEquals("javax.net.ssl.SSLHandshakeException", e.getClass().getCanonicalName());
}
@Test
public void testInvokingWrongTrustRoute() throws Exception {
Exchange reply = sendJaxWsMessage("direct:wrongTrust");
assertTrue(reply.isFailed(), "We expect the exception here");
Throwable e = reply.getException().getCause();
assertEquals("javax.net.ssl.SSLHandshakeException", e.getClass().getCanonicalName());
}
protected Exchange sendJaxWsMessage(String endpointUri) throws InterruptedException {
Exchange exchange = template.send(endpointUri, new Processor() {
public void process(final Exchange exchange) {
final List<String> params = new ArrayList<>();
params.add(TEST_MESSAGE);
exchange.getIn().setBody(params);
exchange.getIn().setHeader(CxfConstants.OPERATION_NAME, GREET_ME_OPERATION);
}
});
return exchange;
}
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
// we can put the http conduit configuration here
return new ClassPathXmlApplicationContext("org/apache/camel/component/cxf/CxfSslContext.xml");
}
}
| SslTest |
java | micronaut-projects__micronaut-core | inject-java/src/main/java/io/micronaut/annotation/processing/AnnotationsElement.java | {
"start": 1298,
"end": 3173
} | class ____ implements Element {
private final TypeMirror typeMirror;
public AnnotationsElement(TypeMirror clazz) {
this.typeMirror = clazz;
}
@Override
public TypeMirror asType() {
return typeMirror;
}
@Override
public ElementKind getKind() {
return ElementKind.OTHER;
}
@Override
public Set<Modifier> getModifiers() {
return Collections.emptySet();
}
@Override
public Name getSimpleName() {
throw notSupportedMethod();
}
@Override
public Element getEnclosingElement() {
return null;
}
@Override
public List<? extends Element> getEnclosedElements() {
return Collections.emptyList();
}
@Override
public List<? extends AnnotationMirror> getAnnotationMirrors() {
return typeMirror.getAnnotationMirrors();
}
@Override
public <A extends Annotation> A getAnnotation(Class<A> annotationType) {
return typeMirror.getAnnotation(annotationType);
}
@Override
public <A extends Annotation> A[] getAnnotationsByType(Class<A> annotationType) {
return typeMirror.getAnnotationsByType(annotationType);
}
@Override
public <R, P> R accept(ElementVisitor<R, P> v, P p) {
return v.visit(this);
}
private static IllegalStateException notSupportedMethod() {
return new IllegalStateException("Not supported method");
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AnnotationsElement that = (AnnotationsElement) o;
return Objects.equals(typeMirror, that.typeMirror);
}
@Override
public int hashCode() {
return typeMirror.hashCode();
}
}
| AnnotationsElement |
java | square__retrofit | retrofit-adapters/rxjava3/src/test/java/retrofit2/adapter/rxjava3/SingleThrowingTest.java | {
"start": 1708,
"end": 8600
} | interface ____ {
@GET("/")
Single<String> body();
@GET("/")
Single<Response<String>> response();
@GET("/")
Single<Result<String>> result();
}
private Service service;
@Before
public void setUp() {
Retrofit retrofit =
new Retrofit.Builder()
.baseUrl(server.url("/"))
.addConverterFactory(new StringConverterFactory())
.addCallAdapterFactory(RxJava3CallAdapterFactory.createSynchronous())
.build();
service = retrofit.create(Service.class);
}
@Test
public void bodyThrowingInOnSuccessDeliveredToPlugin() {
server.enqueue(new MockResponse());
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
RxJavaPlugins.setErrorHandler(
throwable -> {
if (!throwableRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
});
RecordingSingleObserver<String> observer = subscriberRule.create();
final RuntimeException e = new RuntimeException();
service
.body()
.subscribe(
new ForwardingObserver<String>(observer) {
@Override
public void onSuccess(String value) {
throw e;
}
});
assertThat(throwableRef.get()).hasCauseThat().isSameInstanceAs(e);
}
@Test
public void bodyThrowingInOnErrorDeliveredToPlugin() {
server.enqueue(new MockResponse().setResponseCode(404));
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
RxJavaPlugins.setErrorHandler(
throwable -> {
if (!throwableRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
});
RecordingSingleObserver<String> observer = subscriberRule.create();
final AtomicReference<Throwable> errorRef = new AtomicReference<>();
final RuntimeException e = new RuntimeException();
service
.body()
.subscribe(
new ForwardingObserver<String>(observer) {
@Override
public void onError(Throwable throwable) {
if (!errorRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
throw e;
}
});
//noinspection ThrowableResultOfMethodCallIgnored
CompositeException composite = (CompositeException) throwableRef.get();
assertThat(composite.getExceptions()).containsExactly(errorRef.get(), e);
}
@Test
public void responseThrowingInOnSuccessDeliveredToPlugin() {
server.enqueue(new MockResponse());
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
RxJavaPlugins.setErrorHandler(
throwable -> {
if (!throwableRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
});
RecordingSingleObserver<Response<String>> observer = subscriberRule.create();
final RuntimeException e = new RuntimeException();
service
.response()
.subscribe(
new ForwardingObserver<Response<String>>(observer) {
@Override
public void onSuccess(Response<String> value) {
throw e;
}
});
assertThat(throwableRef.get()).hasCauseThat().isSameInstanceAs(e);
}
@Test
public void responseThrowingInOnErrorDeliveredToPlugin() {
server.enqueue(new MockResponse().setSocketPolicy(DISCONNECT_AFTER_REQUEST));
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
RxJavaPlugins.setErrorHandler(
throwable -> {
if (!throwableRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
});
RecordingSingleObserver<Response<String>> observer = subscriberRule.create();
final AtomicReference<Throwable> errorRef = new AtomicReference<>();
final RuntimeException e = new RuntimeException();
service
.response()
.subscribe(
new ForwardingObserver<Response<String>>(observer) {
@Override
public void onError(Throwable throwable) {
if (!errorRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
throw e;
}
});
//noinspection ThrowableResultOfMethodCallIgnored
CompositeException composite = (CompositeException) throwableRef.get();
assertThat(composite.getExceptions()).containsExactly(errorRef.get(), e);
}
@Test
public void resultThrowingInOnSuccessDeliveredToPlugin() {
server.enqueue(new MockResponse());
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
RxJavaPlugins.setErrorHandler(
throwable -> {
if (!throwableRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
});
RecordingSingleObserver<Result<String>> observer = subscriberRule.create();
final RuntimeException e = new RuntimeException();
service
.result()
.subscribe(
new ForwardingObserver<Result<String>>(observer) {
@Override
public void onSuccess(Result<String> value) {
throw e;
}
});
assertThat(throwableRef.get()).hasCauseThat().isSameInstanceAs(e);
}
@Ignore("Single's contract is onNext|onError so we have no way of triggering this case")
@Test
public void resultThrowingInOnErrorDeliveredToPlugin() {
server.enqueue(new MockResponse());
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
RxJavaPlugins.setErrorHandler(
throwable -> {
if (!throwableRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
});
RecordingSingleObserver<Result<String>> observer = subscriberRule.create();
final RuntimeException first = new RuntimeException();
final RuntimeException second = new RuntimeException();
service
.result()
.subscribe(
new ForwardingObserver<Result<String>>(observer) {
@Override
public void onSuccess(Result<String> value) {
// The only way to trigger onError for Result is if onSuccess throws.
throw first;
}
@Override
public void onError(Throwable throwable) {
throw second;
}
});
//noinspection ThrowableResultOfMethodCallIgnored
CompositeException composite = (CompositeException) throwableRef.get();
assertThat(composite.getExceptions()).containsExactly(first, second);
}
private abstract static | Service |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/metrics/JmxReporterTest.java | {
"start": 1490,
"end": 8820
} | class ____ {
@Test
public void testJmxRegistration() throws Exception {
Metrics metrics = new Metrics();
MBeanServer server = ManagementFactory.getPlatformMBeanServer();
try {
JmxReporter reporter = new JmxReporter();
metrics.addReporter(reporter);
assertFalse(server.isRegistered(new ObjectName(":type=grp1")));
Sensor sensor = metrics.sensor("kafka.requests");
sensor.add(metrics.metricName("pack.bean1.avg", "grp1"), new Avg());
sensor.add(metrics.metricName("pack.bean2.total", "grp2"), new CumulativeSum());
assertTrue(server.isRegistered(new ObjectName(":type=grp1")));
assertEquals(Double.NaN, server.getAttribute(new ObjectName(":type=grp1"), "pack.bean1.avg"));
assertTrue(server.isRegistered(new ObjectName(":type=grp2")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=grp2"), "pack.bean2.total"));
MetricName metricName = metrics.metricName("pack.bean1.avg", "grp1");
String mBeanName = JmxReporter.getMBeanName("", metricName);
assertTrue(reporter.containsMbean(mBeanName));
metrics.removeMetric(metricName);
assertFalse(reporter.containsMbean(mBeanName));
assertFalse(server.isRegistered(new ObjectName(":type=grp1")));
assertTrue(server.isRegistered(new ObjectName(":type=grp2")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=grp2"), "pack.bean2.total"));
metricName = metrics.metricName("pack.bean2.total", "grp2");
metrics.removeMetric(metricName);
assertFalse(reporter.containsMbean(mBeanName));
assertFalse(server.isRegistered(new ObjectName(":type=grp1")));
assertFalse(server.isRegistered(new ObjectName(":type=grp2")));
} finally {
metrics.close();
}
}
@Test
public void testJmxRegistrationSanitization() throws Exception {
Metrics metrics = new Metrics();
MBeanServer server = ManagementFactory.getPlatformMBeanServer();
try {
metrics.addReporter(new JmxReporter());
Sensor sensor = metrics.sensor("kafka.requests");
sensor.add(metrics.metricName("name", "group", "desc", "id", "foo*"), new CumulativeSum());
sensor.add(metrics.metricName("name", "group", "desc", "id", "foo+"), new CumulativeSum());
sensor.add(metrics.metricName("name", "group", "desc", "id", "foo?"), new CumulativeSum());
sensor.add(metrics.metricName("name", "group", "desc", "id", "foo:"), new CumulativeSum());
sensor.add(metrics.metricName("name", "group", "desc", "id", "foo%"), new CumulativeSum());
assertTrue(server.isRegistered(new ObjectName(":type=group,id=\"foo\\*\"")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=group,id=\"foo\\*\""), "name"));
assertTrue(server.isRegistered(new ObjectName(":type=group,id=\"foo+\"")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=group,id=\"foo+\""), "name"));
assertTrue(server.isRegistered(new ObjectName(":type=group,id=\"foo\\?\"")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=group,id=\"foo\\?\""), "name"));
assertTrue(server.isRegistered(new ObjectName(":type=group,id=\"foo:\"")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=group,id=\"foo:\""), "name"));
assertTrue(server.isRegistered(new ObjectName(":type=group,id=foo%")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=group,id=foo%"), "name"));
metrics.removeMetric(metrics.metricName("name", "group", "desc", "id", "foo*"));
metrics.removeMetric(metrics.metricName("name", "group", "desc", "id", "foo+"));
metrics.removeMetric(metrics.metricName("name", "group", "desc", "id", "foo?"));
metrics.removeMetric(metrics.metricName("name", "group", "desc", "id", "foo:"));
metrics.removeMetric(metrics.metricName("name", "group", "desc", "id", "foo%"));
assertFalse(server.isRegistered(new ObjectName(":type=group,id=\"foo\\*\"")));
assertFalse(server.isRegistered(new ObjectName(":type=group,id=foo+")));
assertFalse(server.isRegistered(new ObjectName(":type=group,id=\"foo\\?\"")));
assertFalse(server.isRegistered(new ObjectName(":type=group,id=\"foo:\"")));
assertFalse(server.isRegistered(new ObjectName(":type=group,id=foo%")));
} finally {
metrics.close();
}
}
@Test
public void testPredicateAndDynamicReload() throws Exception {
Metrics metrics = new Metrics();
MBeanServer server = ManagementFactory.getPlatformMBeanServer();
Map<String, String> configs = new HashMap<>();
configs.put(JmxReporter.EXCLUDE_CONFIG,
JmxReporter.getMBeanName("", metrics.metricName("pack.bean2.total", "grp2")));
try {
JmxReporter reporter = new JmxReporter();
reporter.configure(configs);
metrics.addReporter(reporter);
Sensor sensor = metrics.sensor("kafka.requests");
sensor.add(metrics.metricName("pack.bean2.avg", "grp1"), new Avg());
sensor.add(metrics.metricName("pack.bean2.total", "grp2"), new CumulativeSum());
sensor.record();
assertTrue(server.isRegistered(new ObjectName(":type=grp1")));
assertEquals(1.0, server.getAttribute(new ObjectName(":type=grp1"), "pack.bean2.avg"));
assertFalse(server.isRegistered(new ObjectName(":type=grp2")));
sensor.record();
configs.put(JmxReporter.EXCLUDE_CONFIG,
JmxReporter.getMBeanName("", metrics.metricName("pack.bean2.avg", "grp1")));
reporter.reconfigure(configs);
assertFalse(server.isRegistered(new ObjectName(":type=grp1")));
assertTrue(server.isRegistered(new ObjectName(":type=grp2")));
assertEquals(2.0, server.getAttribute(new ObjectName(":type=grp2"), "pack.bean2.total"));
metrics.removeMetric(metrics.metricName("pack.bean2.total", "grp2"));
assertFalse(server.isRegistered(new ObjectName(":type=grp2")));
} finally {
metrics.close();
}
}
@Test
public void testJmxPrefix() throws Exception {
JmxReporter reporter = new JmxReporter();
MetricsContext metricsContext = new KafkaMetricsContext("kafka.server");
MetricConfig metricConfig = new MetricConfig();
Metrics metrics = new Metrics(metricConfig, new ArrayList<>(Collections.singletonList(reporter)), Time.SYSTEM, metricsContext);
MBeanServer server = ManagementFactory.getPlatformMBeanServer();
try {
Sensor sensor = metrics.sensor("kafka.requests");
sensor.add(metrics.metricName("pack.bean1.avg", "grp1"), new Avg());
assertEquals("kafka.server", server.getObjectInstance(new ObjectName("kafka.server:type=grp1")).getObjectName().getDomain());
} finally {
metrics.close();
}
}
}
| JmxReporterTest |
java | apache__kafka | connect/runtime/src/test/java/org/apache/kafka/connect/util/TableTest.java | {
"start": 1085,
"end": 1765
} | class ____ {
@Test
public void basicOperations() {
Table<String, Integer, String> table = new Table<>();
table.put("foo", 5, "bar");
table.put("foo", 6, "baz");
assertEquals("bar", table.get("foo", 5));
assertEquals("baz", table.get("foo", 6));
Map<Integer, String> row = table.row("foo");
assertEquals("bar", row.get(5));
assertEquals("baz", row.get(6));
assertEquals("bar", table.remove("foo", 5));
assertNull(table.get("foo", 5));
assertEquals("baz", table.remove("foo", 6));
assertNull(table.get("foo", 6));
assertTrue(table.row("foo").isEmpty());
}
}
| TableTest |
java | apache__camel | components/camel-spring-parent/camel-spring-ws/src/test/java/org/apache/camel/component/spring/ws/filter/impl/BasicMessageFilterTest.java | {
"start": 1467,
"end": 7163
} | class ____ extends ExchangeTestSupport {
private BasicMessageFilter filter;
private SoapMessage message;
@BeforeEach
public void before() {
filter = new BasicMessageFilter();
SaajSoapMessageFactory saajSoapMessageFactory = new SaajSoapMessageFactory();
saajSoapMessageFactory.afterPropertiesSet();
message = saajSoapMessageFactory.createWebServiceMessage();
}
@Test
public void testNulls() throws Exception {
filter.filterConsumer(null, null);
filter.filterProducer(null, null);
}
@Test
public void testNullsWithExchange() throws Exception {
filter.filterConsumer(exchange, null);
filter.filterProducer(exchange, null);
}
@Test
public void nonSoapMessageShouldBeSkipped() throws Exception {
DomPoxMessage domPoxMessage = new DomPoxMessageFactory().createWebServiceMessage();
filter.filterConsumer(exchange, domPoxMessage);
filter.filterProducer(exchange, domPoxMessage);
}
@Test
public void withoutHeader() throws Exception {
exchange.getIn().getHeaders().clear();
exchange.getOut().getHeaders().clear();
if (exchange.getIn(AttachmentMessage.class).hasAttachments()) {
exchange.getIn(AttachmentMessage.class).getAttachments().clear();
}
if (exchange.getOut(AttachmentMessage.class).hasAttachments()) {
exchange.getOut(AttachmentMessage.class).getAttachments().clear();
}
filter.filterProducer(exchange, message);
filter.filterConsumer(exchange, message);
Assertions.assertThat(message.getAttachments()).isEmpty();
Assertions.assertThat(message.getSoapHeader().examineAllHeaderElements()).isEmpty();
Assertions.assertThat(message.getSoapHeader().getAllAttributes()).isEmpty();
}
@Test
public void removeCamelInternalHeaderAttributes() throws Exception {
exchange.getOut().getHeaders().put(SpringWebserviceConstants.SPRING_WS_SOAP_ACTION, "mustBeRemoved");
exchange.getOut().getHeaders().put(SpringWebserviceConstants.SPRING_WS_ADDRESSING_ACTION, "mustBeRemoved");
exchange.getOut().getHeaders().put(SpringWebserviceConstants.SPRING_WS_ADDRESSING_PRODUCER_FAULT_TO, "mustBeRemoved");
exchange.getOut().getHeaders().put(SpringWebserviceConstants.SPRING_WS_ADDRESSING_PRODUCER_REPLY_TO, "mustBeRemoved");
exchange.getOut().getHeaders().put(SpringWebserviceConstants.SPRING_WS_ADDRESSING_CONSUMER_FAULT_ACTION,
"mustBeRemoved");
exchange.getOut().getHeaders().put(SpringWebserviceConstants.SPRING_WS_ADDRESSING_CONSUMER_OUTPUT_ACTION,
"mustBeRemoved");
exchange.getOut().getHeaders().put(SpringWebserviceConstants.SPRING_WS_ENDPOINT_URI, "mustBeRemoved");
exchange.getOut().getHeaders().put("breadcrumbId", "mustBeRemoved");
filter.filterConsumer(exchange, message);
Assertions.assertThat(message.getAttachments()).isEmpty();
Assertions.assertThat(message.getSoapHeader().examineAllHeaderElements()).isEmpty();
Assertions.assertThat(message.getSoapHeader().getAllAttributes()).isEmpty();
}
@Test
public void consumerWithHeader() throws Exception {
exchange.getOut().getHeaders().put("headerAttributeKey", "testAttributeValue");
exchange.getOut().getHeaders().put("headerAttributeElement", new QName("http://shouldBeInHeader", "myElement"));
filter.filterConsumer(exchange, message);
Assertions.assertThat(message.getAttachments()).isEmpty();
Assertions.assertThat(message.getSoapHeader().examineAllHeaderElements()).isNotEmpty().hasSize(1);
Assertions.assertThat(message.getSoapHeader().getAllAttributes()).isNotEmpty().hasSize(1);
}
@Test
public void producerWithHeader() throws Exception {
// foo is already in the header.in from the parent ExchangeTestSupport
exchange.getIn().getHeaders().put("headerAttributeKey", "testAttributeValue");
exchange.getIn().getHeaders().put("headerAttributeElement", new QName("http://shouldBeInHeader", "myElement"));
filter.filterProducer(exchange, message);
Assertions.assertThat(message.getAttachments()).isEmpty();
Assertions.assertThat(message.getSoapHeader().examineAllHeaderElements()).isNotEmpty().hasSize(1);
Assertions.assertThat(message.getSoapHeader().getAllAttributes()).isNotEmpty().hasSize(2);
}
@Test
public void withoutAttachment() throws Exception {
filter.filterConsumer(exchange, message);
filter.filterProducer(exchange, message);
Assertions.assertThat(message.getAttachments()).isEmpty();
}
@Test
public void producerWithAttachment() throws Exception {
exchange.getIn(AttachmentMessage.class).addAttachment("testAttachment",
new DataHandler(this.getClass().getResource("/sampleAttachment.txt")));
filter.filterProducer(exchange, message);
Assertions.assertThat(message.getAttachments()).isNotNull().isNotEmpty();
Assertions.assertThat(message.getAttachment("testAttachment")).isNotNull();
}
@Test
public void consumerWithAttachment() throws Exception {
exchange.getMessage(AttachmentMessage.class).addAttachment("testAttachment",
new DataHandler(this.getClass().getResource("/sampleAttachment.txt")));
filter.filterConsumer(exchange, message);
Assertions.assertThat(message.getAttachments()).isNotNull().isNotEmpty();
Assertions.assertThat(message.getAttachment("testAttachment")).isNotNull();
}
}
| BasicMessageFilterTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java | {
"start": 1582,
"end": 3505
} | class ____ implements Writable {
private final Class<? extends Writable> valueClass;
private Writable[] values;
public ArrayWritable(Class<? extends Writable> valueClass) {
if (valueClass == null) {
throw new IllegalArgumentException("null valueClass");
}
this.valueClass = valueClass;
}
public ArrayWritable(Class<? extends Writable> valueClass, Writable[] values) {
this(valueClass);
this.values = values;
}
public ArrayWritable(String[] strings) {
this(Text.class, new Writable[strings.length]);
for (int i = 0; i < strings.length; i++) {
values[i] = new UTF8(strings[i]);
}
}
public Class<? extends Writable> getValueClass() {
return valueClass;
}
public String[] toStrings() {
String[] strings = new String[values.length];
for (int i = 0; i < values.length; i++) {
strings[i] = values[i].toString();
}
return strings;
}
public Object toArray() {
return Arrays.copyOf(values, values.length);
}
public void set(Writable[] values) {
this.values = values;
}
public Writable[] get() {
return values;
}
@Override
public void readFields(DataInput in) throws IOException {
values = new Writable[in.readInt()]; // construct values
for (int i = 0; i < values.length; i++) {
Writable value = WritableFactories.newInstance(valueClass);
value.readFields(in); // read a value
values[i] = value; // store it in values
}
}
@Override
public void write(DataOutput out) throws IOException {
out.writeInt(values.length); // write values
for (int i = 0; i < values.length; i++) {
values[i].write(out);
}
}
@Override
public String toString() {
return "ArrayWritable [valueClass=" + valueClass + ", values="
+ Arrays.toString(values) + "]";
}
}
| ArrayWritable |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/context/visitor/ContextConfigurerVisitor.java | {
"start": 3078,
"end": 3216
} | class ____ no constructor arguments annotated with @ContextConfigurer, which sole role is configuring the application context.");
}
}
| with |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/WindowedSerdes.java | {
"start": 2140,
"end": 2752
} | class ____<T> extends Serdes.WrapperSerde<Windowed<T>> {
// Default constructor needed for reflection object creation
public SessionWindowedSerde() {
super(new SessionWindowedSerializer<>(), new SessionWindowedDeserializer<>());
}
public SessionWindowedSerde(final Serde<T> inner) {
super(new SessionWindowedSerializer<>(inner.serializer()), new SessionWindowedDeserializer<>(inner.deserializer()));
}
}
/**
* Construct a {@code TimeWindowedSerde} object to deserialize changelog topic
* for the specified inner | SessionWindowedSerde |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java | {
"start": 10159,
"end": 10328
} | class ____ with the snapshot functionality please refer to the
* documentation of the package {@link org.elasticsearch.repositories.blobstore}.
*/
public abstract | interact |
java | apache__camel | components/camel-solr/src/main/java/org/apache/camel/component/solr/SolrEndpoint.java | {
"start": 2152,
"end": 7305
} | class ____ extends DefaultEndpoint implements EndpointServiceLocation {
private static final Logger LOG = LoggerFactory.getLogger(SolrEndpoint.class);
@UriParam
private final SolrConfiguration configuration;
private SolrClient solrClient;
public SolrEndpoint(String uri, SolrComponent component, SolrConfiguration configuration) {
super(uri, component);
this.configuration = configuration;
}
public SolrConfiguration getConfiguration() {
return configuration;
}
@Override
public Producer createProducer() {
return new SolrProducer(this, configuration);
}
@Override
public Consumer createConsumer(Processor processor) {
throw new UnsupportedOperationException("Cannot consume from a Solr endpoint: " + getEndpointUri());
}
@Override
public String getServiceUrl() {
return configuration.getSolrBaseUrl();
}
@Override
public String getServiceProtocol() {
return "solr";
}
@Override
protected void doStart() throws Exception {
super.doStart();
// preconfigured solr client
if (solrClient == null && configuration.getSolrClient() == null) {
// create solr client from config
solrClient = createSolrClient();
LOG.info("Starting SolrClient: {}",
getSolrClientInfoString(solrClient, isProcessAsync(solrClient, configuration), this.getEndpointUri()));
}
}
@Override
protected void doShutdown() throws Exception {
// stop solr client when created (not pre-configured)
if (solrClient != null) {
LOG.info("Stopping SolrClient: {}",
getSolrClientInfoString(solrClient, isProcessAsync(solrClient, configuration), this.getEndpointUri()));
IOHelper.close(solrClient);
solrClient = null;
}
}
public SolrClient getSolrClient() {
if (configuration.getSolrClient() != null) {
return configuration.getSolrClient();
}
return solrClient;
}
public static String getSolrClientInfoString(SolrClient solrClient, boolean async, String endpointUri) {
return String.format(
"%s %s (async=%s; endpoint=%s)",
solrClient.getClass().getSimpleName(),
solrClient instanceof HttpJdkSolrClient httpJdkSolrClient
? "@ " + httpJdkSolrClient.getBaseURL()
: "",
async,
URISupport.sanitizeUri(endpointUri));
}
public SolrClient createSolrClient() {
final HttpJdkSolrClient.Builder builder = new HttpJdkSolrClient.Builder(configuration.getSolrBaseUrl());
builder.withConnectionTimeout(configuration.getConnectionTimeout(), TimeUnit.MILLISECONDS);
builder.withRequestTimeout(configuration.getRequestTimeout(), TimeUnit.MILLISECONDS);
if (ObjectHelper.isNotEmpty(configuration.getUsername()) && ObjectHelper.isNotEmpty(configuration.getPassword())) {
builder.withBasicAuthCredentials(configuration.getUsername(), configuration.getPassword());
}
if (ObjectHelper.isNotEmpty(configuration.getCertificatePath())) {
builder.withSSLContext(createSslContextFromCa(getCamelContext(), configuration.getCertificatePath()));
}
if (configuration.getCollection() != null) {
builder.withDefaultCollection(configuration.getCollection());
}
return builder.build();
}
protected static boolean isProcessAsync(SolrClient solrClient, SolrConfiguration configuration) {
if (!(solrClient instanceof HttpSolrClientBase)) {
return false;
}
return configuration.isAsync();
}
/**
* An SSL context based on the self-signed CA, so that using this SSL Context allows to connect to the solr instance
*
* @return a customized SSL Context
*/
private static SSLContext createSslContextFromCa(CamelContext camelContext, String certificatePath) {
try {
CertificateFactory factory = CertificateFactory.getInstance("X.509");
InputStream resolveMandatoryResourceAsInputStream
= ResourceHelper.resolveMandatoryResourceAsInputStream(
camelContext, certificatePath);
Certificate trustedCa = factory.generateCertificate(resolveMandatoryResourceAsInputStream);
KeyStore trustStore = KeyStore.getInstance("pkcs12");
trustStore.load(null, null);
trustStore.setCertificateEntry("ca", trustedCa);
final SSLContext sslContext = SSLContext.getInstance("TLSv1.3");
TrustManagerFactory trustManagerFactory
= TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
trustManagerFactory.init(trustStore);
sslContext.init(null, trustManagerFactory.getTrustManagers(), null);
return sslContext;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| SolrEndpoint |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/SqlDataTypes.java | {
"start": 2558,
"end": 25812
} | class ____ {
// tag::noformat
// date-only, time-only
public static final DataType DATE = new DataType("DATE", null, Long.BYTES, false, false, true);
public static final DataType TIME = new DataType("TIME", null, Long.BYTES, false, false, true);
// interval
public static final DataType INTERVAL_YEAR =
new DataType("INTERVAL_YEAR", null, Integer.BYTES, false, false, false);
public static final DataType INTERVAL_MONTH =
new DataType("INTERVAL_MONTH", null, Integer.BYTES, false, false, false);
public static final DataType INTERVAL_DAY =
new DataType("INTERVAL_DAY", null, Long.BYTES, false, false, false);
public static final DataType INTERVAL_HOUR =
new DataType("INTERVAL_HOUR", null, Long.BYTES, false, false, false);
public static final DataType INTERVAL_MINUTE =
new DataType("INTERVAL_MINUTE", null, Long.BYTES, false, false, false);
public static final DataType INTERVAL_SECOND =
new DataType("INTERVAL_SECOND", null, Long.BYTES, false, false, false);
public static final DataType INTERVAL_YEAR_TO_MONTH =
new DataType("INTERVAL_YEAR_TO_MONTH", null, Integer.BYTES, false, false, false);
public static final DataType INTERVAL_DAY_TO_HOUR =
new DataType("INTERVAL_DAY_TO_HOUR", null, Long.BYTES, false, false, false);
public static final DataType INTERVAL_DAY_TO_MINUTE =
new DataType("INTERVAL_DAY_TO_MINUTE", null, Long.BYTES, false, false, false);
public static final DataType INTERVAL_DAY_TO_SECOND =
new DataType("INTERVAL_DAY_TO_SECOND", null, Long.BYTES, false, false, false);
public static final DataType INTERVAL_HOUR_TO_MINUTE =
new DataType("INTERVAL_HOUR_TO_MINUTE", null, Long.BYTES, false, false, false);
public static final DataType INTERVAL_HOUR_TO_SECOND =
new DataType("INTERVAL_HOUR_TO_SECOND", null, Long.BYTES, false, false, false);
public static final DataType INTERVAL_MINUTE_TO_SECOND =
new DataType("INTERVAL_MINUTE_TO_SECOND", null, Long.BYTES, false, false, false);
// geo
public static final DataType GEO_SHAPE = new DataType("geo_shape", Integer.MAX_VALUE, false, false, false);
public static final DataType GEO_POINT = new DataType("geo_point", Double.BYTES * 2, false, false, false);
public static final DataType SHAPE = new DataType("shape", Integer.MAX_VALUE, false, false, false);
// end::noformat
private static final Map<String, DataType> ODBC_TO_ES = new HashMap<>(mapSize(38));
static {
// Numeric
ODBC_TO_ES.put("SQL_BIT", BOOLEAN);
ODBC_TO_ES.put("SQL_TINYINT", BYTE);
ODBC_TO_ES.put("SQL_SMALLINT", SHORT);
ODBC_TO_ES.put("SQL_INTEGER", INTEGER);
ODBC_TO_ES.put("SQL_BIGINT", LONG);
ODBC_TO_ES.put("SQL_UBIGINT", UNSIGNED_LONG);
ODBC_TO_ES.put("SQL_REAL", FLOAT);
ODBC_TO_ES.put("SQL_FLOAT", DOUBLE);
ODBC_TO_ES.put("SQL_DOUBLE", DOUBLE);
ODBC_TO_ES.put("SQL_DECIMAL", DOUBLE);
ODBC_TO_ES.put("SQL_NUMERIC", DOUBLE);
// String
ODBC_TO_ES.put("SQL_GUID", KEYWORD);
ODBC_TO_ES.put("SQL_CHAR", KEYWORD);
ODBC_TO_ES.put("SQL_WCHAR", KEYWORD);
ODBC_TO_ES.put("SQL_VARCHAR", TEXT);
ODBC_TO_ES.put("SQL_WVARCHAR", TEXT);
ODBC_TO_ES.put("SQL_LONGVARCHAR", TEXT);
ODBC_TO_ES.put("SQL_WLONGVARCHAR", TEXT);
// Binary
ODBC_TO_ES.put("SQL_BINARY", BINARY);
ODBC_TO_ES.put("SQL_VARBINARY", BINARY);
ODBC_TO_ES.put("SQL_LONGVARBINARY", BINARY);
// Date
ODBC_TO_ES.put("SQL_DATE", DATE);
ODBC_TO_ES.put("SQL_TIME", TIME);
ODBC_TO_ES.put("SQL_TIMESTAMP", DATETIME);
// Intervals
ODBC_TO_ES.put("SQL_INTERVAL_YEAR", INTERVAL_YEAR);
ODBC_TO_ES.put("SQL_INTERVAL_MONTH", INTERVAL_MONTH);
ODBC_TO_ES.put("SQL_INTERVAL_DAY", INTERVAL_DAY);
ODBC_TO_ES.put("SQL_INTERVAL_HOUR", INTERVAL_HOUR);
ODBC_TO_ES.put("SQL_INTERVAL_MINUTE", INTERVAL_MINUTE);
ODBC_TO_ES.put("SQL_INTERVAL_SECOND", INTERVAL_SECOND);
ODBC_TO_ES.put("SQL_INTERVAL_YEAR_TO_MONTH", INTERVAL_YEAR_TO_MONTH);
ODBC_TO_ES.put("SQL_INTERVAL_DAY_TO_HOUR", INTERVAL_DAY_TO_HOUR);
ODBC_TO_ES.put("SQL_INTERVAL_DAY_TO_MINUTE", INTERVAL_DAY_TO_MINUTE);
ODBC_TO_ES.put("SQL_INTERVAL_DAY_TO_SECOND", INTERVAL_DAY_TO_SECOND);
ODBC_TO_ES.put("SQL_INTERVAL_HOUR_TO_MINUTE", INTERVAL_HOUR_TO_MINUTE);
ODBC_TO_ES.put("SQL_INTERVAL_HOUR_TO_SECOND", INTERVAL_HOUR_TO_SECOND);
ODBC_TO_ES.put("SQL_INTERVAL_MINUTE_TO_SECOND", INTERVAL_MINUTE_TO_SECOND);
}
private static final Collection<DataType> TYPES = Stream.concat(
DataTypes.types().stream(),
Stream.of(
DATE,
TIME,
INTERVAL_YEAR,
INTERVAL_MONTH,
INTERVAL_DAY,
INTERVAL_HOUR,
INTERVAL_MINUTE,
INTERVAL_SECOND,
INTERVAL_YEAR_TO_MONTH,
INTERVAL_DAY_TO_HOUR,
INTERVAL_DAY_TO_MINUTE,
INTERVAL_DAY_TO_SECOND,
INTERVAL_HOUR_TO_MINUTE,
INTERVAL_HOUR_TO_SECOND,
INTERVAL_MINUTE_TO_SECOND,
GEO_SHAPE,
GEO_POINT,
SHAPE
)
).sorted(Comparator.comparing(DataType::typeName)).toList();
private static final Map<String, DataType> NAME_TO_TYPE = TYPES.stream().collect(toUnmodifiableMap(DataType::typeName, t -> t));
private static final Map<String, DataType> ES_TO_TYPE;
static {
Map<String, DataType> map = TYPES.stream().filter(e -> e.esType() != null).collect(Collectors.toMap(DataType::esType, t -> t));
map.put("date_nanos", DATETIME);
ES_TO_TYPE = Collections.unmodifiableMap(map);
}
private static final Map<String, DataType> SQL_TO_ES;
static {
Map<String, DataType> sqlToEs = new HashMap<>(mapSize(45));
// first add ES types
for (DataType type : SqlDataTypes.types()) {
if (type != OBJECT && type != NESTED) {
sqlToEs.put(type.typeName().toUpperCase(Locale.ROOT), type);
}
}
// reuse the ODBC definition (without SQL_)
// note that this will override existing types in particular FLOAT
for (Entry<String, DataType> entry : ODBC_TO_ES.entrySet()) {
sqlToEs.put(entry.getKey().substring(4), entry.getValue());
}
// special ones
sqlToEs.put("BOOL", BOOLEAN);
sqlToEs.put("INT", INTEGER);
sqlToEs.put("STRING", KEYWORD);
SQL_TO_ES = unmodifiableMap(sqlToEs);
}
private SqlDataTypes() {}
public static Collection<DataType> types() {
return TYPES;
}
public static DataType fromTypeName(String name) {
return NAME_TO_TYPE.get(name.toLowerCase(Locale.ROOT));
}
public static DataType fromEs(String name) {
DataType type = ES_TO_TYPE.get(name);
return type != null ? type : UNSUPPORTED;
}
public static DataType fromJava(Object value) {
DataType type = DataTypes.fromJava(value);
if (type != null) {
return type;
}
if (value instanceof OffsetTime) {
return TIME;
}
if (value instanceof Interval) {
return ((Interval<?>) value).dataType();
}
if (value instanceof GeoShape) {
return GEO_SHAPE;
}
if (value instanceof Version) {
return VERSION;
}
return null;
}
public static boolean isNullOrInterval(DataType type) {
return type == NULL || isInterval(type);
}
public static boolean isInterval(DataType dataType) {
return isYearMonthInterval(dataType) || isDayTimeInterval(dataType);
}
public static boolean isYearMonthInterval(DataType dataType) {
return dataType == INTERVAL_YEAR || dataType == INTERVAL_MONTH || dataType == INTERVAL_YEAR_TO_MONTH;
}
public static boolean isDayTimeInterval(DataType dataType) {
return dataType == INTERVAL_DAY
|| dataType == INTERVAL_HOUR
|| dataType == INTERVAL_MINUTE
|| dataType == INTERVAL_SECOND
|| dataType == INTERVAL_DAY_TO_HOUR
|| dataType == INTERVAL_DAY_TO_MINUTE
|| dataType == INTERVAL_DAY_TO_SECOND
|| dataType == INTERVAL_HOUR_TO_MINUTE
|| dataType == INTERVAL_HOUR_TO_SECOND
|| dataType == INTERVAL_MINUTE_TO_SECOND;
}
public static boolean isDateBased(DataType type) {
return isDateTime(type) || type == DATE;
}
public static boolean isTimeBased(DataType type) {
return type == TIME;
}
public static boolean isDateOrTimeBased(DataType type) {
return isDateBased(type) || isTimeBased(type);
}
public static boolean isDateOrIntervalBased(DataType type) {
return isDateBased(type) || isInterval(type);
}
public static boolean isGeo(DataType type) {
return type == GEO_POINT || type == GEO_SHAPE || type == SHAPE;
}
public static String format(DataType type) {
return isDateOrTimeBased(type) ? "strict_date_optional_time_nanos" : null;
}
public static boolean isFromDocValuesOnly(DataType dataType) {
return dataType == KEYWORD // because of ignore_above. Extracting this from _source wouldn't make sense
|| dataType == DATE // because of date formats
|| dataType == DATETIME
|| dataType == SCALED_FLOAT // because of scaling_factor
|| dataType == GEO_POINT
|| dataType == SHAPE;
}
public static boolean areCompatible(DataType left, DataType right) {
if (left == right) {
return true;
} else {
return (left == NULL || right == NULL)
|| (DataTypes.isString(left) && DataTypes.isString(right))
|| (left.isNumeric() && right.isNumeric())
|| (isDateBased(left) && isDateBased(right))
|| (isInterval(left) && isDateBased(right))
|| (isDateBased(left) && isInterval(right))
|| (isInterval(left) && isInterval(right) && Intervals.compatibleInterval(left, right) != null);
}
}
public static DataType fromOdbcType(String odbcType) {
return ODBC_TO_ES.get(odbcType);
}
public static DataType fromSqlOrEsType(String typeName) {
return SQL_TO_ES.get(typeName.toUpperCase(Locale.ROOT));
}
public static SQLType sqlType(DataType dataType) {
if (dataType == UNSUPPORTED) {
return JDBCType.OTHER;
}
if (dataType == NULL) {
return JDBCType.NULL;
}
if (dataType == BOOLEAN) {
return JDBCType.BOOLEAN;
}
if (dataType == BYTE) {
return JDBCType.TINYINT;
}
if (dataType == SHORT) {
return JDBCType.SMALLINT;
}
if (dataType == INTEGER) {
return JDBCType.INTEGER;
}
if (dataType == LONG) {
return JDBCType.BIGINT;
}
if (dataType == UNSIGNED_LONG) {
return JDBCType.NUMERIC;
}
if (dataType == DOUBLE) {
return JDBCType.DOUBLE;
}
if (dataType == FLOAT) {
return JDBCType.REAL;
}
if (dataType == HALF_FLOAT) {
return JDBCType.FLOAT;
}
if (dataType == SCALED_FLOAT) {
return JDBCType.DOUBLE;
}
if (dataType == KEYWORD) {
return JDBCType.VARCHAR;
}
if (dataType == TEXT) {
return JDBCType.VARCHAR;
}
if (isDateTime(dataType)) {
return JDBCType.TIMESTAMP;
}
if (dataType == IP) {
return JDBCType.VARCHAR;
}
if (dataType == VERSION) {
return JDBCType.VARCHAR;
}
if (dataType == BINARY) {
return JDBCType.BINARY;
}
if (dataType == OBJECT) {
return JDBCType.STRUCT;
}
if (dataType == NESTED) {
return JDBCType.STRUCT;
}
//
// SQL specific
//
if (dataType == DATE) {
return JDBCType.DATE;
}
if (dataType == TIME) {
return JDBCType.TIME;
}
if (dataType == GEO_SHAPE) {
return ExtTypes.GEOMETRY;
}
if (dataType == GEO_POINT) {
return ExtTypes.GEOMETRY;
}
if (dataType == SHAPE) {
return ExtTypes.GEOMETRY;
}
if (dataType == INTERVAL_YEAR) {
return ExtTypes.INTERVAL_YEAR;
}
if (dataType == INTERVAL_MONTH) {
return ExtTypes.INTERVAL_MONTH;
}
if (dataType == INTERVAL_DAY) {
return ExtTypes.INTERVAL_DAY;
}
if (dataType == INTERVAL_HOUR) {
return ExtTypes.INTERVAL_HOUR;
}
if (dataType == INTERVAL_MINUTE) {
return ExtTypes.INTERVAL_MINUTE;
}
if (dataType == INTERVAL_SECOND) {
return ExtTypes.INTERVAL_SECOND;
}
if (dataType == INTERVAL_YEAR_TO_MONTH) {
return ExtTypes.INTERVAL_YEAR_TO_MONTH;
}
if (dataType == INTERVAL_DAY_TO_HOUR) {
return ExtTypes.INTERVAL_DAY_TO_HOUR;
}
if (dataType == INTERVAL_DAY_TO_MINUTE) {
return ExtTypes.INTERVAL_DAY_TO_MINUTE;
}
if (dataType == INTERVAL_DAY_TO_SECOND) {
return ExtTypes.INTERVAL_DAY_TO_SECOND;
}
if (dataType == INTERVAL_HOUR_TO_MINUTE) {
return ExtTypes.INTERVAL_HOUR_TO_MINUTE;
}
if (dataType == INTERVAL_HOUR_TO_SECOND) {
return ExtTypes.INTERVAL_HOUR_TO_SECOND;
}
if (dataType == INTERVAL_MINUTE_TO_SECOND) {
return ExtTypes.INTERVAL_MINUTE_TO_SECOND;
}
return null;
}
/**
* Returns the precision of the field
* <p>
* Precision is the specified column size. For numeric data, this is the maximum precision. For character
* data, this is the length in characters. For datetime datatypes, this is the length in characters of the
* String representation (assuming the maximum allowed defaultPrecision of the fractional seconds component).
*/
public static int defaultPrecision(DataType dataType) {
if (dataType == UNSUPPORTED) {
return dataType.size();
}
if (dataType == NULL) {
return dataType.size();
}
if (dataType == BOOLEAN) {
return dataType.size();
}
if (dataType == BYTE) {
return 3;
}
if (dataType == SHORT) {
return 5;
}
if (dataType == INTEGER) {
return 10;
}
if (dataType == LONG) {
return 19;
}
if (dataType == UNSIGNED_LONG) {
return 20;
}
if (dataType == DOUBLE) {
return 15;
}
if (dataType == FLOAT) {
return 7;
}
if (dataType == HALF_FLOAT) {
return 3;
}
if (dataType == SCALED_FLOAT) {
return 15;
}
if (dataType == KEYWORD) {
return 15;
}
if (dataType == TEXT) {
return 32766;
}
if (isDateTime(dataType)) {
return 9;
}
if (dataType == IP) {
return dataType.size();
}
if (dataType == VERSION) {
return dataType.size();
}
if (dataType == BINARY) {
return dataType.size();
}
if (dataType == OBJECT) {
return dataType.size();
}
if (dataType == NESTED) {
return dataType.size();
}
//
// SQL specific
//
// since ODBC and JDBC interpret precision for Date as display size
// the precision is 23 (number of chars in ISO8601 with millis) + 6 chars for the timezone (e.g.: +05:00)
// see https://github.com/elastic/elasticsearch/issues/30386#issuecomment-386807288
if (dataType == DATE) {
return 3;
}
if (dataType == TIME) {
return 9;
}
if (dataType == GEO_SHAPE) {
return dataType.size();
}
if (dataType == GEO_POINT) {
return Integer.MAX_VALUE;
}
if (dataType == SHAPE) {
return dataType.size();
}
if (dataType == INTERVAL_YEAR) {
return 7;
}
if (dataType == INTERVAL_MONTH) {
return 7;
}
if (dataType == INTERVAL_DAY) {
return 23;
}
if (dataType == INTERVAL_HOUR) {
return 23;
}
if (dataType == INTERVAL_MINUTE) {
return 23;
}
if (dataType == INTERVAL_SECOND) {
return 23;
}
if (dataType == INTERVAL_YEAR_TO_MONTH) {
return 7;
}
if (dataType == INTERVAL_DAY_TO_HOUR) {
return 23;
}
if (dataType == INTERVAL_DAY_TO_MINUTE) {
return 23;
}
if (dataType == INTERVAL_DAY_TO_SECOND) {
return 23;
}
if (dataType == INTERVAL_HOUR_TO_MINUTE) {
return 23;
}
if (dataType == INTERVAL_HOUR_TO_SECOND) {
return 23;
}
if (dataType == INTERVAL_MINUTE_TO_SECOND) {
return 23;
}
return 0;
}
public static int displaySize(DataType dataType) {
if (dataType == UNSUPPORTED) {
return dataType.size();
}
if (dataType == NULL) {
return dataType.size();
}
if (dataType == BOOLEAN) {
return dataType.size();
}
if (dataType == BYTE) {
return 5;
}
if (dataType == SHORT) {
return 6;
}
if (dataType == INTEGER) {
return 11;
}
if (dataType == LONG || dataType == UNSIGNED_LONG) {
return 20;
}
if (dataType == DOUBLE) {
return 25;
}
if (dataType == FLOAT) {
return 15;
}
if (dataType == HALF_FLOAT) {
return 25;
}
if (dataType == SCALED_FLOAT) {
return 25;
}
if (dataType == KEYWORD) {
return 32766;
}
if (dataType == TEXT) {
return dataType.size();
}
if (isDateTime(dataType)) {
return 34;
}
if (dataType == IP) {
return dataType.size();
}
if (dataType == VERSION) {
return dataType.size();
}
if (dataType == BINARY) {
return dataType.size();
}
if (dataType == OBJECT) {
return dataType.size();
}
if (dataType == NESTED) {
return dataType.size();
}
//
// SQL specific
//
if (dataType == DATE) {
return 29;
}
if (dataType == TIME) {
return 24;
}
if (dataType == GEO_SHAPE) {
return dataType.size();
}
if (dataType == GEO_POINT) {
// 2 doubles + len("POINT( )")
return 25 * 2 + 8;
}
if (dataType == SHAPE) {
return dataType.size();
}
if (SqlDataTypes.isInterval(dataType)) {
return defaultPrecision(dataType);
}
return 0;
}
//
// Metadata methods, mainly for ODBC.
// As these are fairly obscure and limited in use, there is no point to promote them as a full type methods
// hence why they appear here as utility methods.
//
// https://docs.microsoft.com/en-us/sql/relational-databases/native-client-odbc-date-time/metadata-catalog
// https://github.com/elastic/elasticsearch/issues/30386
public static Integer metaSqlDataType(DataType t) {
if (isDateTime(t)) {
// ODBC SQL_DATETME
return Integer.valueOf(9);
}
// this is safe since the vendor SQL types are short despite the return value
return sqlType(t).getVendorTypeNumber();
}
// https://github.com/elastic/elasticsearch/issues/30386
// https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function
public static Integer metaSqlDateTimeSub(DataType t) {
if (isDateTime(t)) {
// ODBC SQL_CODE_TIMESTAMP
return Integer.valueOf(3);
} else if (t == DATE) {
// ODBC SQL_CODE_DATE
return Integer.valueOf(1);
} else if (t == TIME) {
// ODBC SQL_CODE_TIME
return Integer.valueOf(2);
}
// ODBC null
return 0;
}
public static Short metaSqlMinimumScale(DataType t) {
return metaSqlSameScale(t);
}
public static Short metaSqlMaximumScale(DataType t) {
return metaSqlSameScale(t);
}
// https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/decimal-digits
// https://github.com/elastic/elasticsearch/issues/40357
// since the scale is fixed, minimum and maximum should return the same value
// hence why this method exists
private static Short metaSqlSameScale(DataType t) {
// TODO: return info for SCALED_FLOATS (should be based on field not type)
if (t.isInteger()) {
return Short.valueOf((short) 0);
}
if (isDateTime(t) || t == TIME || t.isRational()) {
return Short.valueOf((short) defaultPrecision(t));
}
return null;
}
// https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function
public static Integer metaSqlRadix(DataType t) {
// RADIX - Determines how numbers returned by COLUMN_SIZE and DECIMAL_DIGITS should be interpreted.
// 10 means they represent the number of decimal digits allowed for the column.
// 2 means they represent the number of bits allowed for the column.
// null means radix is not applicable for the given type.
return t.isInteger() ? Integer.valueOf(10) : (t.isRational() ? Integer.valueOf(2) : null);
}
// https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function#comments
// https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/column-size
public static Integer precision(DataType t) {
if (t.isNumeric()) {
return defaultPrecision(t);
}
return displaySize(t);
}
}
| SqlDataTypes |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/override/mappedsuperclass/SubclassWithUuidAsId.java | {
"start": 551,
"end": 721
} | class ____ extends MappedSuperClassWithUuidAsBasic {
@Id
@Access(AccessType.PROPERTY)
@Override
public Long getUid() {
return super.getUid();
}
}
| SubclassWithUuidAsId |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1400/Issue1492.java | {
"start": 226,
"end": 1132
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
DubboResponse resp = new DubboResponse();
// test for JSONObject
JSONObject obj = new JSONObject();
obj.put("key1","value1");
obj.put("key2","value2");
resp.setData(obj);
String str = JSON.toJSONString(resp);
System.out.println(str);
DubboResponse resp1 = JSON.parseObject(str, DubboResponse.class);
assertEquals(str, JSON.toJSONString(resp1));
// test for JSONArray
JSONArray arr = new JSONArray();
arr.add("key1");
arr.add("key2");
resp.setData(arr);
String str2 = JSON.toJSONString(resp);
System.out.println(str2);
DubboResponse resp2 = JSON.parseObject(str2, DubboResponse.class);
assertEquals(str2, JSON.toJSONString(resp2));
}
public static final | Issue1492 |
java | google__dagger | javatests/dagger/internal/codegen/FrameworkTypeMapperTest.java | {
"start": 1305,
"end": 2241
} | class ____ {
@Test public void forProvider() {
FrameworkTypeMapper mapper = FrameworkTypeMapper.FOR_PROVIDER;
assertThat(mapper.getFrameworkType(INSTANCE)).isEqualTo(FrameworkType.PROVIDER);
assertThat(mapper.getFrameworkType(LAZY)).isEqualTo(FrameworkType.PROVIDER);
assertThat(mapper.getFrameworkType(PROVIDER)).isEqualTo(FrameworkType.PROVIDER);
}
@Test public void forProducer() {
FrameworkTypeMapper mapper = FrameworkTypeMapper.FOR_PRODUCER;
assertThat(mapper.getFrameworkType(INSTANCE)).isEqualTo(FrameworkType.PRODUCER_NODE);
assertThat(mapper.getFrameworkType(LAZY)).isEqualTo(FrameworkType.PROVIDER);
assertThat(mapper.getFrameworkType(PROVIDER)).isEqualTo(FrameworkType.PROVIDER);
assertThat(mapper.getFrameworkType(PRODUCER)).isEqualTo(FrameworkType.PRODUCER_NODE);
assertThat(mapper.getFrameworkType(PRODUCED)).isEqualTo(FrameworkType.PRODUCER_NODE);
}
}
| FrameworkTypeMapperTest |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/ParallelMapTest.java | {
"start": 844,
"end": 2015
} | class ____ {
@Test
public void parallelism() {
ParallelFlux<Integer> source = Flux.range(1, 4).parallel(3);
ParallelMap<Integer, String> test = new ParallelMap<>(source, i -> "" + i);
assertThat(test.parallelism()).isEqualTo(source.parallelism())
.isEqualTo(3);
}
@Test
public void scanOperator() {
ParallelFlux<Integer> source = Flux.range(1, 4).parallel(3);
ParallelMap<Integer, String> test = new ParallelMap<>(source, i -> "" + i);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(source);
assertThat(test.scan(Scannable.Attr.PREFETCH)).isEqualTo(-1);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
@Test
public void conditional() {
Flux<Integer> source = Flux.range(1, 1_000);
for (int i = 1; i < 33; i++) {
Flux<Integer> result = ParallelFlux.from(source, i)
.map(v -> v + 1)
.filter(t -> true)
.sequential();
StepVerifier.create(result)
.expectNextCount(1_000)
.verifyComplete();
}
}
}
| ParallelMapTest |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/datasource/DelegatingDataSource.java | {
"start": 1182,
"end": 1438
} | class ____ meant to be subclassed, with subclasses overriding only
* those methods (such as {@link #getConnection()}) that should not simply
* delegate to the target DataSource.
*
* @author Juergen Hoeller
* @since 1.1
* @see #getConnection
*/
public | is |
java | apache__flink | flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarListInfo.java | {
"start": 1306,
"end": 2633
} | class ____ implements ResponseBody {
public static final String JAR_LIST_FIELD_ADDRESS = "address";
public static final String JAR_LIST_FIELD_FILES = "files";
@JsonProperty(JAR_LIST_FIELD_ADDRESS)
private String address;
@JsonProperty(JAR_LIST_FIELD_FILES)
public List<JarFileInfo> jarFileList;
@JsonCreator
public JarListInfo(
@JsonProperty(JAR_LIST_FIELD_ADDRESS) String address,
@JsonProperty(JAR_LIST_FIELD_FILES) List<JarFileInfo> jarFileList) {
this.address = checkNotNull(address);
this.jarFileList = checkNotNull(jarFileList);
}
@Override
public int hashCode() {
return Objects.hash(address, jarFileList);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (null == o || this.getClass() != o.getClass()) {
return false;
}
JarListInfo that = (JarListInfo) o;
return Objects.equals(address, that.address)
&& Objects.equals(jarFileList, that.jarFileList);
}
// ---------------------------------------------------------------------------------
// Static helper classes
// ---------------------------------------------------------------------------------
/** Nested | JarListInfo |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/async/utils/BaseClass.java | {
"start": 1079,
"end": 1539
} | interface ____ designed to abstract the common operations that need
* to be performed in a time-consuming manner, such as processing a list
* of items or applying a method that involves I/O operations. By defining
* these methods in an interface, it allows for both synchronous and
* asynchronous implementations, providing flexibility and the ability to
* improve performance without changing the external API.
* </p>
*
* <p>
* Implementations of this | is |
java | apache__flink | flink-state-backends/flink-statebackend-changelog/src/main/java/org/apache/flink/state/changelog/StateChangeOperation.java | {
"start": 1149,
"end": 2409
} | enum ____ {
/** Scope: key + namespace. */
CLEAR((byte) 0),
/** Scope: key + namespace. */
SET((byte) 1),
/** Scope: key + namespace. */
SET_INTERNAL((byte) 2),
/** Scope: key + namespace. */
ADD((byte) 3),
/** Scope: key + namespace, also affecting other (source) namespaces. */
MERGE_NS((byte) 4),
/** Scope: key + namespace + element (e.g. user list append). */
ADD_ELEMENT((byte) 5),
/** Scope: key + namespace + element (e.g. user map key put). */
ADD_OR_UPDATE_ELEMENT((byte) 6),
/** Scope: key + namespace + element (e.g. user map remove or iterator remove). */
REMOVE_ELEMENT((byte) 7),
/** State metadata (name, serializers, etc.). */
METADATA((byte) 8);
private final byte code;
StateChangeOperation(byte code) {
this.code = code;
}
private static final Map<Byte, StateChangeOperation> BY_CODES =
Arrays.stream(StateChangeOperation.values())
.collect(Collectors.toMap(o -> o.code, Function.identity()));
public static StateChangeOperation byCode(byte opCode) {
return checkNotNull(BY_CODES.get(opCode), Byte.toString(opCode));
}
public byte getCode() {
return code;
}
}
| StateChangeOperation |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/StatementSwitchToExpressionSwitchTest.java | {
"start": 111678,
"end": 112406
} | class ____ {
int[] x;
public Test(int foo) {
x = null;
}
public int[] foo(Suit suit) {
switch (suit) {
case HEART:
throw new RuntimeException();
case DIAMOND:
x[6] <<= (((x[6] + 1) * (x[6] * x[5]) << 1));
break;
case SPADE:
throw new RuntimeException();
default:
throw new NullPointerException();
}
return x;
}
}
""")
.addOutputLines(
"Test.java",
"""
| Test |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_element_with_InstanceOfAssertFactory_Test.java | {
"start": 1687,
"end": 3015
} | class ____ {
private final Iterable<Object> iterable = asList(0.0, "string", 42);
@Test
void should_fail_if_iterable_is_empty() {
// GIVEN
Iterable<String> iterable = emptyList();
// WHEN
var assertionError = expectAssertionError(() -> assertThat(iterable).element(1, STRING));
// THEN
then(assertionError).hasMessage(actualIsEmpty());
}
@Test
void should_fail_throwing_npe_if_assert_factory_is_null() {
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(iterable).element(1, null));
// THEN
then(thrown).isInstanceOf(NullPointerException.class)
.hasMessage(shouldNotBeNull("instanceOfAssertFactory").create());
}
@Test
void should_pass_allowing_type_narrowed_assertions_if_element_is_an_instance_of_the_factory_type() {
// WHEN
AbstractStringAssert<?> result = assertThat(iterable).element(1, STRING);
// THEN
result.startsWith("str");
}
@Test
void should_fail_if_last_element_is_not_an_instance_of_the_factory_type() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat(iterable).element(1, INTEGER));
// THEN
then(assertionError).hasMessageContainingAll("Expecting actual:", "to be an instance of:", "but was instance of:");
}
}
| IterableAssert_element_with_InstanceOfAssertFactory_Test |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/floatarray/FloatArrayAssert_usingDefaultComparator_Test.java | {
"start": 1166,
"end": 1797
} | class ____ extends FloatArrayAssertBaseTest {
private FloatArrays arraysBefore;
@BeforeEach
void before() {
arraysBefore = getArrays(assertions);
}
@Override
protected FloatArrayAssert invoke_api_method() {
return assertions.usingComparator(alwaysEqual())
.usingDefaultComparator();
}
@Override
protected void verify_internal_effects() {
assertThat(getObjects(assertions).getComparator()).isNull();
assertThat(getObjects(assertions)).isSameAs(Objects.instance());
assertThat(getArrays(assertions)).isSameAs(arraysBefore);
}
}
| FloatArrayAssert_usingDefaultComparator_Test |
java | google__error-prone | core/src/test/java/com/google/errorprone/refaster/TemplatingTest.java | {
"start": 17192,
"end": 17694
} | class ____ {",
" public String example() {",
" return new String(\"foo\");",
" }",
"}");
assertThat(UTemplater.createTemplate(context, getMethodDeclaration("example")))
.isEqualTo(
ExpressionTemplate.create(
UNewClass.create(UClassIdent.create("java.lang.String"), ULiteral.stringLit("foo")),
UClassType.create("java.lang.String")));
}
@Test
public void forLoop() {
compile(
" | ConstructorExample |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/actuate/AbstractGatewayControllerEndpoint.java | {
"start": 3224,
"end": 12929
} | class ____ implements ApplicationEventPublisherAware {
private static final Log log = LogFactory.getLog(GatewayControllerEndpoint.class);
protected RouteDefinitionLocator routeDefinitionLocator;
protected List<GlobalFilter> globalFilters;
// TODO change casing in next major release
protected List<GatewayFilterFactory> GatewayFilters;
protected List<RoutePredicateFactory> routePredicates;
protected RouteDefinitionWriter routeDefinitionWriter;
protected RouteLocator routeLocator;
protected ApplicationEventPublisher publisher;
protected WebEndpointProperties webEndpointProperties;
private final SimpleMetadataReaderFactory simpleMetadataReaderFactory = new SimpleMetadataReaderFactory();
public AbstractGatewayControllerEndpoint(RouteDefinitionLocator routeDefinitionLocator,
List<GlobalFilter> globalFilters, List<GatewayFilterFactory> gatewayFilters,
List<RoutePredicateFactory> routePredicates, RouteDefinitionWriter routeDefinitionWriter,
RouteLocator routeLocator, WebEndpointProperties webEndpointProperties) {
this.routeDefinitionLocator = routeDefinitionLocator;
this.globalFilters = globalFilters;
this.GatewayFilters = gatewayFilters;
this.routePredicates = routePredicates;
this.routeDefinitionWriter = routeDefinitionWriter;
this.routeLocator = routeLocator;
this.webEndpointProperties = webEndpointProperties;
}
@GetMapping("/")
Mono<List<GatewayEndpointInfo>> getEndpoints() {
List<GatewayEndpointInfo> endpoints = mergeEndpoints(
getAvailableEndpointsForClass(AbstractGatewayControllerEndpoint.class.getName()),
getAvailableEndpointsForClass(GatewayControllerEndpoint.class.getName()));
return Flux.fromIterable(endpoints)
.map(p -> p)
.flatMap(path -> this.routeLocator.getRoutes()
.map(r -> generateHref(r, path))
.distinct()
.collectList()
.flatMapMany(Flux::fromIterable))
.distinct() // Ensure overall uniqueness
.collectList();
}
private List<GatewayEndpointInfo> mergeEndpoints(List<GatewayEndpointInfo> listA, List<GatewayEndpointInfo> listB) {
Map<String, List<String>> mergedMap = new HashMap<>();
Stream.concat(listA.stream(), listB.stream())
.forEach(e -> mergedMap.computeIfAbsent(e.getHref(), k -> new ArrayList<>())
.addAll(Arrays.asList(e.getMethods())));
return mergedMap.entrySet()
.stream()
.map(entry -> new GatewayEndpointInfo(entry.getKey(), entry.getValue()))
.collect(Collectors.toList());
}
private List<GatewayEndpointInfo> getAvailableEndpointsForClass(String className) {
try {
MetadataReader metadataReader = simpleMetadataReaderFactory.getMetadataReader(className);
Set<MethodMetadata> annotatedMethods = metadataReader.getAnnotationMetadata()
.getAnnotatedMethods(RequestMapping.class.getName());
String gatewayActuatorPath = webEndpointProperties.getBasePath() + "/gateway";
return annotatedMethods.stream()
.map(method -> new GatewayEndpointInfo(gatewayActuatorPath
+ ((String[]) method.getAnnotationAttributes(RequestMapping.class.getName()).get("path"))[0],
((RequestMethod[]) method.getAnnotationAttributes(RequestMapping.class.getName())
.get("method"))[0].name()))
.collect(Collectors.toList());
}
catch (IOException exception) {
log.warn(exception.getMessage());
throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, exception.getMessage());
}
}
private GatewayEndpointInfo generateHref(Route r, GatewayEndpointInfo path) {
return new GatewayEndpointInfo(path.getHref().replace("{id}", r.getId()), Arrays.asList(path.getMethods()));
}
@Override
public void setApplicationEventPublisher(ApplicationEventPublisher publisher) {
this.publisher = publisher;
}
// TODO: Add uncommited or new but not active routes endpoint
@PostMapping("/refresh")
public Mono<Void> refresh(@RequestParam(value = "metadata", required = false) List<String> byMetadata) {
publishRefreshEvent(byMetadata);
return Mono.empty();
}
private void publishRefreshEvent(List<String> byMetadata) {
RefreshRoutesEvent event;
if (!CollectionUtils.isEmpty(byMetadata)) {
event = new RefreshRoutesEvent(this, convertToMap(byMetadata));
}
else {
event = new RefreshRoutesEvent(this);
}
this.publisher.publishEvent(event);
}
private Map<String, Object> convertToMap(List<String> byMetadata) {
return byMetadata.stream()
.map(keyValueStr -> keyValueStr.split(":"))
.collect(Collectors.toMap(kv -> kv[0], kv -> kv.length > 1 ? kv[1] : null));
}
@GetMapping("/globalfilters")
public Mono<HashMap<String, Object>> globalfilters() {
return getNamesToOrders(this.globalFilters);
}
@GetMapping("/routefilters")
public Mono<HashMap<String, Object>> routefilers() {
return getNamesToOrders(this.GatewayFilters);
}
@GetMapping("/routepredicates")
public Mono<HashMap<String, Object>> routepredicates() {
return getNamesToOrders(this.routePredicates);
}
private <T> Mono<HashMap<String, Object>> getNamesToOrders(List<T> list) {
return Flux.fromIterable(list).reduce(new HashMap<>(), this::putItem);
}
private HashMap<String, Object> putItem(HashMap<String, Object> map, Object o) {
Integer order = null;
if (o instanceof Ordered) {
order = ((Ordered) o).getOrder();
}
// filters.put(o.getClass().getName(), order);
map.put(o.toString(), order);
return map;
}
/*
* http POST :8080/admin/gateway/routes/apiaddreqhead uri=http://httpbin.org:80
* predicates:='["Host=**.apiaddrequestheader.org", "Path=/headers"]'
* filters:='["AddRequestHeader=X-Request-ApiFoo, ApiBar"]'
*/
@PostMapping("/routes/{id}")
@SuppressWarnings("unchecked")
public Mono<ResponseEntity<Object>> save(@PathVariable String id, @RequestBody RouteDefinition route) {
return Mono.just(route)
.doOnNext(this::validateRouteDefinition)
.flatMap(routeDefinition -> this.routeDefinitionWriter.save(Mono.just(routeDefinition).map(r -> {
r.setId(id);
log.debug("Saving route: " + route);
return r;
})).then(Mono.defer(() -> Mono.just(ResponseEntity.created(URI.create("/routes/" + id)).build()))))
.switchIfEmpty(Mono.defer(() -> Mono.just(ResponseEntity.badRequest().build())));
}
@PostMapping("/routes")
@SuppressWarnings("unchecked")
public Mono<ResponseEntity<Object>> save(@RequestBody List<RouteDefinition> routes) {
routes.stream().forEach(routeDef -> {
validateRouteDefinition(routeDef);
validateRouteId(routeDef);
});
return Flux.fromIterable(routes)
.flatMap(routeDefinition -> this.routeDefinitionWriter.save(Mono.just(routeDefinition).map(r -> {
log.debug("Saving route: " + routeDefinition);
return r;
})))
.then(Mono.defer(() -> Mono.just(ResponseEntity.ok().build())))
.switchIfEmpty(Mono.defer(() -> Mono.just(ResponseEntity.badRequest().build())));
}
private void validateRouteId(RouteDefinition routeDefinition) {
if (routeDefinition.getId() == null) {
handleError("Saving multiple routes require specifying the ID for every route");
}
}
private void validateRouteDefinition(RouteDefinition routeDefinition) {
Set<String> unavailableFilterDefinitions = routeDefinition.getFilters()
.stream()
.filter(rd -> !isAvailable(rd))
.map(FilterDefinition::getName)
.collect(Collectors.toSet());
Set<String> unavailablePredicatesDefinitions = routeDefinition.getPredicates()
.stream()
.filter(rd -> !isAvailable(rd))
.map(PredicateDefinition::getName)
.collect(Collectors.toSet());
if (!unavailableFilterDefinitions.isEmpty()) {
handleUnavailableDefinition(FilterDefinition.class.getSimpleName(), unavailableFilterDefinitions);
}
else if (!unavailablePredicatesDefinitions.isEmpty()) {
handleUnavailableDefinition(PredicateDefinition.class.getSimpleName(), unavailablePredicatesDefinitions);
}
validateRouteUri(routeDefinition.getUri());
}
private void validateRouteUri(URI uri) {
if (uri == null) {
handleError("The URI can not be empty");
}
if (!StringUtils.hasText(uri.getScheme())) {
handleError(String.format("The URI format [%s] is incorrect, scheme can not be empty", uri));
}
}
private void handleUnavailableDefinition(String simpleName, Set<String> unavailableDefinitions) {
final String errorMessage = String.format("Invalid %s: %s", simpleName, unavailableDefinitions);
log.warn(errorMessage);
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, errorMessage);
}
private void handleError(String errorMessage) {
log.warn(errorMessage);
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, errorMessage);
}
private boolean isAvailable(FilterDefinition filterDefinition) {
return GatewayFilters.stream()
.anyMatch(gatewayFilterFactory -> filterDefinition.getName().equals(gatewayFilterFactory.name()));
}
private boolean isAvailable(PredicateDefinition predicateDefinition) {
return routePredicates.stream()
.anyMatch(routePredicate -> predicateDefinition.getName().equals(routePredicate.name()));
}
@DeleteMapping("/routes/{id}")
public Mono<ResponseEntity<Object>> delete(@PathVariable String id) {
return this.routeDefinitionWriter.delete(Mono.just(id)).then(Mono.defer(() -> {
publisher.publishEvent(new RouteDeletedEvent(this, id));
return Mono.just(ResponseEntity.ok().build());
})).onErrorResume(t -> t instanceof NotFoundException, t -> Mono.just(ResponseEntity.notFound().build()));
}
@GetMapping("/routes/{id}/combinedfilters")
public Mono<HashMap<String, Object>> combinedfilters(@PathVariable String id) {
// TODO: missing global filters
return this.routeLocator.getRoutes()
.filter(route -> route.getId().equals(id))
.reduce(new HashMap<>(), this::putItem);
}
}
| AbstractGatewayControllerEndpoint |
java | spring-projects__spring-security | oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/endpoint/OAuth2ClientCredentialsGrantRequest.java | {
"start": 1546,
"end": 2902
} | class ____ extends AbstractOAuth2AuthorizationGrantRequest {
/**
* Constructs an {@code OAuth2ClientCredentialsGrantRequest} using the provided
* parameters.
* @param clientRegistration the client registration
*/
public OAuth2ClientCredentialsGrantRequest(ClientRegistration clientRegistration) {
super(AuthorizationGrantType.CLIENT_CREDENTIALS, clientRegistration);
Assert.isTrue(AuthorizationGrantType.CLIENT_CREDENTIALS.equals(clientRegistration.getAuthorizationGrantType()),
"clientRegistration.authorizationGrantType must be AuthorizationGrantType.CLIENT_CREDENTIALS");
}
/**
* Populate default parameters for the Client Credentials Grant.
* @param grantRequest the authorization grant request
* @return a {@link MultiValueMap} of the parameters used in the OAuth 2.0 Access
* Token Request body
*/
static MultiValueMap<String, String> defaultParameters(OAuth2ClientCredentialsGrantRequest grantRequest) {
ClientRegistration clientRegistration = grantRequest.getClientRegistration();
MultiValueMap<String, String> parameters = new LinkedMultiValueMap<>();
if (!CollectionUtils.isEmpty(clientRegistration.getScopes())) {
parameters.set(OAuth2ParameterNames.SCOPE,
StringUtils.collectionToDelimitedString(clientRegistration.getScopes(), " "));
}
return parameters;
}
}
| OAuth2ClientCredentialsGrantRequest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/mapping/internal/EntityRowIdMappingImpl.java | {
"start": 1385,
"end": 7039
} | class ____ implements EntityRowIdMapping {
private final String rowIdName;
private final EntityMappingType declaringType;
private final String tableExpression;
private final BasicType<Object> rowIdType;
public EntityRowIdMappingImpl(String rowIdName, String tableExpression, EntityMappingType declaringType) {
this.rowIdName = rowIdName;
this.tableExpression = tableExpression;
this.declaringType = declaringType;
final SessionFactoryImplementor factory = declaringType.getEntityPersister().getFactory();
this.rowIdType = factory.getTypeConfiguration().getBasicTypeRegistry()
.resolve( Object.class, factory.getJdbcServices().getDialect().rowIdSqlType() );
}
@Override
public String getRowIdName() {
return rowIdName;
}
@Override
public MappingType getPartMappingType() {
return rowIdType;
}
@Override
public JavaType<?> getJavaType() {
return rowIdType.getJavaTypeDescriptor();
}
@Override
public String getPartName() {
return rowIdName;
}
@Override
public NavigableRole getNavigableRole() {
return null;
}
@Override
public EntityMappingType findContainingEntityMapping() {
return declaringType;
}
@Override
public boolean hasPartitionedSelectionMapping() {
return false;
}
@Override
public <T> DomainResult<T> createDomainResult(
NavigablePath navigablePath,
TableGroup tableGroup,
String resultVariable,
DomainResultCreationState creationState) {
final var sqlAstCreationState = creationState.getSqlAstCreationState();
final var sqlExpressionResolver = sqlAstCreationState.getSqlExpressionResolver();
final var columnTableReference = tableGroup.resolveTableReference( navigablePath, tableExpression );
final var sqlSelection = sqlExpressionResolver.resolveSqlSelection(
sqlExpressionResolver.resolveSqlExpression( columnTableReference, this ),
rowIdType.getJdbcJavaType(),
null,
sqlAstCreationState.getCreationContext().getTypeConfiguration()
);
return new BasicResult<>(
sqlSelection.getValuesArrayPosition(),
resultVariable,
rowIdType,
navigablePath,
false,
!sqlSelection.isVirtual()
);
}
@Override
public JdbcMapping getJdbcMapping(int index) {
if ( index != 0 ) {
throw new IndexOutOfBoundsException( index );
}
return getJdbcMapping();
}
@Override
public JdbcMapping getSingleJdbcMapping() {
return getJdbcMapping();
}
@Override
public Object disassemble(Object value, SharedSessionContractImplementor session) {
return rowIdType.disassemble( value, session );
}
@Override
public void addToCacheKey(MutableCacheKeyBuilder cacheKey, Object value, SharedSessionContractImplementor session) {
rowIdType.addToCacheKey( cacheKey, value, session );
}
@Override
public <X, Y> int forEachDisassembledJdbcValue(
Object value,
int offset,
X x,
Y y,
JdbcValuesBiConsumer<X, Y> valuesConsumer,
SharedSessionContractImplementor session) {
return rowIdType.forEachDisassembledJdbcValue( value, offset, x, y, valuesConsumer, session );
}
@Override
public int forEachJdbcType(int offset, IndexedConsumer<JdbcMapping> action) {
action.accept( offset, getJdbcMapping() );
return getJdbcTypeCount();
}
@Override
public void applySqlSelections(
NavigablePath navigablePath, TableGroup tableGroup, DomainResultCreationState creationState) {
}
@Override
public void applySqlSelections(
NavigablePath navigablePath,
TableGroup tableGroup,
DomainResultCreationState creationState,
BiConsumer<SqlSelection, JdbcMapping> selectionConsumer) {
}
@Override
public <X, Y> int breakDownJdbcValues(
Object domainValue,
int offset,
X x,
Y y,
JdbcValueBiConsumer<X, Y> valueConsumer,
SharedSessionContractImplementor session) {
valueConsumer.consume( offset, x, y, domainValue, this );
return getJdbcTypeCount();
}
@Override
public String getContainingTableExpression() {
return tableExpression;
}
@Override
public String getSelectionExpression() {
return rowIdName;
}
@Override
public @Nullable String getCustomReadExpression() {
return null;
}
@Override
public @Nullable String getCustomWriteExpression() {
return null;
}
@Override
public @Nullable String getColumnDefinition() {
return null;
}
@Override
public @Nullable Long getLength() {
return null;
}
@Override
public @Nullable Integer getArrayLength() {
return null;
}
@Override
public @Nullable Integer getPrecision() {
return null;
}
@Override
public @Nullable Integer getScale() {
return null;
}
@Override
public @Nullable Integer getTemporalPrecision() {
return null;
}
@Override
public boolean isFormula() {
return false;
}
@Override
public boolean isNullable() {
return false;
}
@Override
public boolean isInsertable() {
return false;
}
@Override
public boolean isUpdateable() {
return false;
}
@Override
public boolean isPartitioned() {
return false;
}
@Override
public JdbcMapping getJdbcMapping() {
return rowIdType.getJdbcMapping();
}
@Override
public MappingType getMappedType() {
return rowIdType;
}
@Override
public String getFetchableName() {
return rowIdName;
}
@Override
public int getFetchableKey() {
throw new UnsupportedOperationException();
}
@Override
public FetchOptions getMappedFetchOptions() {
throw new UnsupportedOperationException();
}
@Override
public Fetch generateFetch(
FetchParent fetchParent,
NavigablePath fetchablePath,
FetchTiming fetchTiming,
boolean selected,
String resultVariable,
DomainResultCreationState creationState) {
throw new UnsupportedOperationException();
}
}
| EntityRowIdMappingImpl |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/rerank/VoyageAIRerankModel.java | {
"start": 1451,
"end": 4803
} | class ____ extends VoyageAIModel {
public static VoyageAIRerankModel of(VoyageAIRerankModel model, Map<String, Object> taskSettings) {
var requestTaskSettings = VoyageAIRerankTaskSettings.fromMap(taskSettings);
return new VoyageAIRerankModel(model, VoyageAIRerankTaskSettings.of(model.getTaskSettings(), requestTaskSettings));
}
public VoyageAIRerankModel(
String inferenceId,
String service,
Map<String, Object> serviceSettings,
Map<String, Object> taskSettings,
@Nullable Map<String, Object> secrets,
ConfigurationParseContext context
) {
this(
inferenceId,
service,
VoyageAIRerankServiceSettings.fromMap(serviceSettings, context),
VoyageAIRerankTaskSettings.fromMap(taskSettings),
DefaultSecretSettings.fromMap(secrets),
buildUri(VoyageAIService.NAME, VoyageAIRerankModel::buildRequestUri)
);
}
public static URI buildRequestUri() throws URISyntaxException {
return new URIBuilder().setScheme("https")
.setHost(HOST)
.setPathSegments(VoyageAIUtils.VERSION_1, VoyageAIUtils.RERANK_PATH)
.build();
}
// should only be used for testing
VoyageAIRerankModel(
String inferenceId,
String service,
String url,
VoyageAIRerankServiceSettings serviceSettings,
VoyageAIRerankTaskSettings taskSettings,
@Nullable DefaultSecretSettings secretSettings
) {
this(inferenceId, service, serviceSettings, taskSettings, secretSettings, ServiceUtils.createUri(url));
}
private VoyageAIRerankModel(
String inferenceId,
String service,
VoyageAIRerankServiceSettings serviceSettings,
VoyageAIRerankTaskSettings taskSettings,
@Nullable DefaultSecretSettings secretSettings,
URI uri
) {
super(
new ModelConfigurations(inferenceId, TaskType.RERANK, service, serviceSettings, taskSettings),
new ModelSecrets(secretSettings),
secretSettings,
serviceSettings.getCommonSettings(),
uri
);
}
private VoyageAIRerankModel(VoyageAIRerankModel model, VoyageAIRerankTaskSettings taskSettings) {
super(model, taskSettings);
}
@Override
public VoyageAIRerankServiceSettings getServiceSettings() {
return (VoyageAIRerankServiceSettings) super.getServiceSettings();
}
@Override
public VoyageAIRerankTaskSettings getTaskSettings() {
return (VoyageAIRerankTaskSettings) super.getTaskSettings();
}
@Override
public DefaultSecretSettings getSecretSettings() {
return (DefaultSecretSettings) super.getSecretSettings();
}
/**
* Accepts a visitor to create an executable action. The returned action will not return documents in the response.
* @param visitor Interface for creating {@link ExecutableAction} instances for Voyage AI models.
* @param taskSettings Settings in the request to override the model's defaults
* @return the rerank action
*/
@Override
public ExecutableAction accept(VoyageAIActionVisitor visitor, Map<String, Object> taskSettings) {
return visitor.create(this, taskSettings);
}
}
| VoyageAIRerankModel |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/support/ManagedMap.java | {
"start": 3188,
"end": 4154
} | class ____) to be used for this map.
*/
public @Nullable String getValueTypeName() {
return this.valueTypeName;
}
/**
* Set whether merging should be enabled for this collection,
* in case of a 'parent' collection value being present.
*/
public void setMergeEnabled(boolean mergeEnabled) {
this.mergeEnabled = mergeEnabled;
}
@Override
public boolean isMergeEnabled() {
return this.mergeEnabled;
}
@Override
@SuppressWarnings("unchecked")
public Object merge(@Nullable Object parent) {
if (!this.mergeEnabled) {
throw new IllegalStateException("Not allowed to merge when the 'mergeEnabled' property is set to 'false'");
}
if (parent == null) {
return this;
}
if (!(parent instanceof Map)) {
throw new IllegalArgumentException("Cannot merge with object of type [" + parent.getClass() + "]");
}
Map<K, V> merged = new ManagedMap<>();
merged.putAll((Map<K, V>) parent);
merged.putAll(this);
return merged;
}
}
| name |
java | apache__flink | flink-formats/flink-csv/src/test/java/org/apache/flink/formats/csv/CsvFileCompactionITCase.java | {
"start": 972,
"end": 1114
} | class ____ extends FileCompactionITCaseBase {
@Override
protected String format() {
return "csv";
}
}
| CsvFileCompactionITCase |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/asyncprocessing/AbstractStateIteratorTest.java | {
"start": 1713,
"end": 7769
} | class ____ {
@Test
@SuppressWarnings({"unchecked", "rawtypes"})
public void testPartialLoading() {
TestIteratorStateExecutor stateExecutor = new TestIteratorStateExecutor(100, 3);
StateExecutionController aec =
new StateExecutionController(
new SyncMailboxExecutor(),
(a, b) -> {},
stateExecutor,
new DeclarationManager(),
EpochManager.ParallelMode.SERIAL_BETWEEN_EPOCH,
1,
100,
1000,
1,
null,
null);
stateExecutor.bindAec(aec);
RecordContext<String> recordContext = aec.buildContext("1", "key1");
aec.setCurrentContext(recordContext);
AtomicInteger processed = new AtomicInteger();
aec.handleRequest(null, StateRequestType.MAP_ITER, null)
.thenAccept(
(iter) -> {
assertThat(iter).isInstanceOf(StateIterator.class);
((StateIterator<Integer>) iter)
.onNext(
(item) -> {
assertThat(item)
.isEqualTo(processed.getAndIncrement());
})
.thenAccept(
(v) -> {
assertThat(processed.get()).isEqualTo(100);
});
});
aec.drainInflightRecords(0);
}
@Test
@SuppressWarnings({"unchecked", "rawtypes"})
public void testPartialLoadingWithReturnValue() {
TestIteratorStateExecutor stateExecutor = new TestIteratorStateExecutor(100, 3);
StateExecutionController aec =
new StateExecutionController(
new SyncMailboxExecutor(),
(a, b) -> {},
stateExecutor,
new DeclarationManager(),
EpochManager.ParallelMode.SERIAL_BETWEEN_EPOCH,
1,
100,
1000,
1,
null,
null);
stateExecutor.bindAec(aec);
RecordContext<String> recordContext = aec.buildContext("1", "key1");
aec.setCurrentContext(recordContext);
AtomicInteger processed = new AtomicInteger();
aec.handleRequest(null, StateRequestType.MAP_ITER, null)
.thenAccept(
(iter) -> {
assertThat(iter).isInstanceOf(StateIterator.class);
((StateIterator<Integer>) iter)
.onNext(
(item) -> {
assertThat(item)
.isEqualTo(processed.getAndIncrement());
return StateFutureUtils.completedFuture(
String.valueOf(item));
})
.thenAccept(
(strings) -> {
assertThat(processed.get()).isEqualTo(100);
int validate = 0;
for (String item : strings) {
assertThat(item)
.isEqualTo(String.valueOf(validate++));
}
});
});
aec.drainInflightRecords(0);
}
@Test
@SuppressWarnings({"unchecked", "rawtypes"})
public void testPartialLoadingWithConversionToIterable() {
TestIteratorStateExecutor stateExecutor = new TestIteratorStateExecutor(100, 3);
StateExecutionController aec =
new StateExecutionController(
new SyncMailboxExecutor(),
(a, b) -> {},
stateExecutor,
new DeclarationManager(),
EpochManager.ParallelMode.SERIAL_BETWEEN_EPOCH,
1,
100,
1000,
1,
null,
null);
stateExecutor.bindAec(aec);
RecordContext<String> recordContext = aec.buildContext("1", "key1");
aec.setCurrentContext(recordContext);
AtomicInteger processed = new AtomicInteger();
StateFutureUtils.toIterable(aec.handleRequest(null, StateRequestType.MAP_ITER, null))
.thenAccept(
(iter) -> {
assertThat(iter instanceof Iterable);
((Iterable<Integer>) iter)
.forEach(
item -> {
assertThat(item)
.isEqualTo(processed.getAndIncrement());
});
assertThat(processed.get()).isEqualTo(100);
});
aec.drainInflightRecords(0);
}
/**
* A brief implementation of {@link StateExecutor}, to illustrate the interaction between AEC
* and StateExecutor.
*/
@SuppressWarnings({"rawtypes"})
static | AbstractStateIteratorTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/CriteriaTreatedJoinInSubqueryTest.java | {
"start": 8034,
"end": 8436
} | class ____ extends MyEntity2 {
@ManyToOne
@JoinColumn( name = "ref3" )
private MyEntity3 ref3;
private String stringProp;
public MyEntity3 getRef3() {
return ref3;
}
public void setRef3(MyEntity3 ref3) {
this.ref3 = ref3;
}
public void setStringProp(String stringProp) {
this.stringProp = stringProp;
}
}
@Entity( name = "MyOtherSubEntity2" )
public static | MySubEntity2 |
java | spring-projects__spring-boot | module/spring-boot-data-jpa/src/test/java/org/springframework/boot/data/jpa/autoconfigure/DataJpaRepositoriesWithEnversRevisionAutoConfigurationTests.java | {
"start": 1009,
"end": 1378
} | class ____
extends AbstractDataJpaRepositoriesAutoConfigurationTests {
@Test
void autoConfigurationShouldSucceedWithRevisionRepository() {
this.contextRunner.withUserConfiguration(RevisionRepositoryConfiguration.class)
.run((context) -> assertThat(context).hasSingleBean(CountryRepository.class));
}
}
| DataJpaRepositoriesWithEnversRevisionAutoConfigurationTests |
java | spring-projects__spring-security | ldap/src/test/java/org/springframework/security/ldap/userdetails/LdapAuthorityTests.java | {
"start": 1023,
"end": 2556
} | class ____ {
public static final String DN = "cn=filip,ou=Users,dc=test,dc=com";
LdapAuthority authority;
@BeforeEach
public void setUp() {
Map<String, List<String>> attributes = new HashMap<>();
attributes.put(SpringSecurityLdapTemplate.DN_KEY, Arrays.asList(DN));
attributes.put("mail", Arrays.asList("filip@ldap.test.org", "filip@ldap.test2.org"));
this.authority = new LdapAuthority("testRole", DN, attributes);
}
@Test
public void testGetDn() {
assertThat(this.authority.getDn()).isEqualTo(DN);
assertThat(this.authority.getAttributeValues(SpringSecurityLdapTemplate.DN_KEY)).isNotNull();
assertThat(this.authority.getAttributeValues(SpringSecurityLdapTemplate.DN_KEY)).hasSize(1);
assertThat(this.authority.getFirstAttributeValue(SpringSecurityLdapTemplate.DN_KEY)).isEqualTo(DN);
}
@Test
public void testGetAttributes() {
assertThat(this.authority.getAttributes()).isNotNull();
assertThat(this.authority.getAttributeValues("mail")).isNotNull();
assertThat(this.authority.getAttributeValues("mail")).hasSize(2);
assertThat(this.authority.getFirstAttributeValue("mail")).isEqualTo("filip@ldap.test.org");
assertThat(this.authority.getAttributeValues("mail").get(0)).isEqualTo("filip@ldap.test.org");
assertThat(this.authority.getAttributeValues("mail").get(1)).isEqualTo("filip@ldap.test2.org");
}
@Test
public void testGetAuthority() {
assertThat(this.authority.getAuthority()).isNotNull();
assertThat(this.authority.getAuthority()).isEqualTo("testRole");
}
}
| LdapAuthorityTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ReturnValueIgnoredTest.java | {
"start": 27858,
"end": 28134
} | interface ____)
" }",
"}")
.doTest();
}
@Test
public void collectionToArray() {
compilationHelper
.addSourceLines(
"Test.java",
"import com.google.common.collect.ImmutableList;",
"final | method |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/health/node/FetchHealthInfoCacheActionTests.java | {
"start": 2096,
"end": 6569
} | class ____ extends ESTestCase {
private static ThreadPool threadPool;
private ClusterService clusterService;
private TransportService transportService;
private DiscoveryNode localNode;
private DiscoveryNode[] allNodes;
@BeforeClass
public static void beforeClass() {
threadPool = new TestThreadPool("FetchHealthInfoCacheAction");
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
CapturingTransport transport = new CapturingTransport();
clusterService = createClusterService(threadPool);
transportService = transport.createTransportService(
clusterService.getSettings(),
threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> clusterService.localNode(),
null,
Collections.emptySet()
);
transportService.start();
transportService.acceptIncomingRequests();
int totalNodes = randomIntBetween(1, 200);
allNodes = new DiscoveryNode[totalNodes];
localNode = DiscoveryNodeUtils.builder("local_node")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.DATA_ROLE))
.build();
allNodes[0] = localNode;
for (int i = 0; i < totalNodes - 1; i++) {
DiscoveryNode remoteNode = DiscoveryNodeUtils.builder("remote_node" + i)
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.DATA_ROLE))
.build();
allNodes[i + 1] = remoteNode;
}
}
@After
public void tearDown() throws Exception {
super.tearDown();
clusterService.close();
transportService.close();
}
@AfterClass
public static void afterClass() {
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
threadPool = null;
}
public void testAction() throws ExecutionException, InterruptedException {
FetchHealthInfoCacheAction.Request request = new FetchHealthInfoCacheAction.Request();
PlainActionFuture<FetchHealthInfoCacheAction.Response> listener = new PlainActionFuture<>();
setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, localNode, allNodes));
HealthInfoCache healthInfoCache = getTestHealthInfoCache();
final FetchHealthInfoCacheAction.Response expectedResponse = new FetchHealthInfoCacheAction.Response(
healthInfoCache.getHealthInfo()
);
ActionTestUtils.execute(
new FetchHealthInfoCacheAction.TransportAction(
transportService,
clusterService,
threadPool,
new ActionFilters(Set.of()),
healthInfoCache
),
null,
request,
listener
);
FetchHealthInfoCacheAction.Response actualResponse = listener.get();
assertThat(actualResponse, equalTo(expectedResponse));
assertThat(actualResponse.getHealthInfo(), equalTo(expectedResponse.getHealthInfo()));
}
private HealthInfoCache getTestHealthInfoCache() {
HealthInfoCache healthInfoCache = HealthInfoCache.create(clusterService);
for (DiscoveryNode allNode : allNodes) {
String nodeId = allNode.getId();
healthInfoCache.updateNodeHealth(
nodeId,
new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())),
randomDslHealthInfo(),
randomRepoHealthInfo(),
FileSettingsService.FileSettingsHealthInfo.INDETERMINATE
);
}
return healthInfoCache;
}
public void testResponseSerialization() {
var healthInfo = getTestHealthInfoCache().getHealthInfo();
FetchHealthInfoCacheAction.Response response = new FetchHealthInfoCacheAction.Response(healthInfo);
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
response,
responseWritable -> copyWriteable(responseWritable, writableRegistry(), FetchHealthInfoCacheAction.Response::new),
this::mutateResponse
);
}
private FetchHealthInfoCacheAction.Response mutateResponse(FetchHealthInfoCacheAction.Response originalResponse) {
return new FetchHealthInfoCacheAction.Response(mutateHealthInfo(originalResponse.getHealthInfo()));
}
}
| FetchHealthInfoCacheActionTests |
java | quarkusio__quarkus | extensions/devservices/runtime/src/main/java/io/quarkus/devservice/runtime/config/DevServicesOverrideConfigSource.java | {
"start": 525,
"end": 928
} | class ____ implements ConfigSource {
private final LaunchMode launchMode;
public DevServicesOverrideConfigSource(LaunchMode launchMode) {
this.launchMode = launchMode;
}
@Override
public Set<String> getPropertyNames() {
// We could make this more efficient by not invoking the supplier on the other end, but it would need a more complex | DevServicesOverrideConfigSource |
java | google__dagger | javatests/artifacts/hilt-android/simple/app/src/main/java/dagger/hilt/android/simple/ModelModule.java | {
"start": 858,
"end": 984
} | class ____ {
@Provides
@Model
static String provideModel() {
return MODEL;
}
private ModelModule() {}
}
| ModelModule |
java | apache__hadoop | hadoop-cloud-storage-project/hadoop-tos/src/test/java/org/apache/hadoop/fs/tosfs/util/TestIterables.java | {
"start": 1341,
"end": 5096
} | class ____ {
@Test
public void testTransform() {
List<Integer> list = Arrays.asList(1, 2, 3, 4, 5);
Function<Integer, Integer> transform = i -> i + 10;
Iterator<Integer> iter = Iterables.transform(list, transform).iterator();
for (int i = 0; i < 5; i++) {
assertTrue(iter.hasNext());
int value = iter.next();
assertEquals(10 + i + 1, value);
}
assertFalse(iter.hasNext());
}
@Test
public void testTransformEmptyIterable() {
List<Integer> list = Arrays.asList();
Function<Integer, Integer> transform = i -> i + 10;
Iterator<Integer> iter = Iterables.transform(list, transform).iterator();
assertFalse(iter.hasNext());
}
@Test
public void testFilter() {
// Filter odd elements.
List<Integer> list = Arrays.asList(1, 2, 3, 4, 5);
Predicate<Integer> filter = i -> (i % 2) == 0;
Iterator<Integer> iter = Iterables.filter(list, filter).iterator();
for (int i = 0; i < 2; i++) {
assertTrue(iter.hasNext());
int value = iter.next();
assertEquals((i + 1) * 2, value);
}
assertFalse(iter.hasNext());
// Ignore all elements.
filter = i -> false;
iter = Iterables.filter(list, filter).iterator();
assertFalse(iter.hasNext());
}
@Test
public void testFilterEmptyIterable() {
List<Integer> list = Arrays.asList();
Predicate<Integer> filter = i -> (i % 2) == 0;
Iterator<Integer> iter = Iterables.filter(list, filter).iterator();
assertFalse(iter.hasNext());
}
// Full iterators.
@Test
public void testConcatFullIterators() {
List<Integer> expectedList = new ArrayList<>();
List<Iterable<Integer>> iterList = new ArrayList<>();
for (int i = 0; i < 10; i++) {
List<Integer> list = new ArrayList<>();
for (int j = 0; j < 10; j++) {
list.add(i * 10 + j);
expectedList.add(i * 10 + j);
}
iterList.add(list);
}
verifyConcat(expectedList.iterator(), iterList);
}
// Empty iterators.
@Test
public void testConcatEmptyIterators() {
List<Integer> expectedList = new ArrayList<>();
List<Iterable<Integer>> iterList = new ArrayList<>();
for (int i = 0; i < 10; i++) {
iterList.add(Collections.emptyList());
}
verifyConcat(expectedList.iterator(), iterList);
}
// Mix full and empty iterators.
@Test
public void testConcatMixFullAndEmptyIterators() {
List<Integer> expectedList = new ArrayList<>();
List<Iterable<Integer>> iterList = new ArrayList<>();
for (int i = 0; i < 10; i++) {
List<Integer> list = new ArrayList<>();
for (int j = 0; j < 10; j++) {
list.add(i * 10 + j);
expectedList.add(i * 10 + j);
}
iterList.add(list);
iterList.add(Collections.emptyList());
iterList.add(Collections.emptyList());
}
verifyConcat(expectedList.iterator(), iterList);
}
// Invalid iterators.
@Test
public void testConcatNullMetaIterator() {
assertThrows(NullPointerException.class, () -> verifyConcat(Collections.emptyIterator(), null),
"Expect null verification error.");
}
// Concat null iterators.
@Test
public void testConcatNullElementIterators() {
List<Iterable<Integer>> list = new ArrayList<>();
for (int i = 0; i < 3; i++) {
list.add(() -> null);
}
verifyConcat(Collections.emptyIterator(), list);
}
private <T> void verifyConcat(Iterator<T> expectedValues, Iterable<Iterable<T>> metaIter) {
Iterator<T> iter = Iterables.concat(metaIter).iterator();
while (expectedValues.hasNext()) {
assertTrue(iter.hasNext());
T v1 = expectedValues.next();
T v2 = iter.next();
assertEquals(v1, v2);
}
assertFalse(iter.hasNext());
}
}
| TestIterables |
java | apache__camel | core/camel-xml-io/src/main/java/org/apache/camel/xml/in/BaseParser.java | {
"start": 21203,
"end": 22609
} | interface ____<T> {
void accept(T definition, String value) throws IOException, XmlPullParserException;
}
protected boolean matchNamespace(String ns, boolean optional) {
return matchNamespace(ns, namespace, secondaryNamespaces, optional);
}
protected static boolean matchNamespace(String ns, String namespace, Set<String> secondaryNamespaces, boolean optional) {
if (optional && ns.isEmpty()) {
return true;
}
if (Objects.equals(ns, namespace)) {
return true;
}
if (DEFAULT_NAMESPACE.equals(ns) && namespace.isEmpty()) {
return true;
}
if (DEFAULT_NAMESPACE.equals(namespace) && ns.isEmpty()) {
return true;
}
// xml-io should be backwards compatible with spring namespace
if (DEFAULT_NAMESPACE.equals(ns) && namespace.equals(SPRING_NAMESPACE)) {
return true;
}
if (SPRING_NAMESPACE.equals(ns) && namespace.equals(DEFAULT_NAMESPACE)) {
return true;
}
if (secondaryNamespaces != null) {
for (String second : secondaryNamespaces) {
if (Objects.equals(ns, second)) {
return true;
}
}
}
return false;
}
protected static String sanitizeUri(String uri) {
return uri;
}
}
| ValueHandler |
java | google__dagger | javatests/dagger/hilt/android/processor/internal/customtestapplication/CustomTestApplicationProcessorTest.java | {
"start": 12319,
"end": 12702
} | class ____ {}"))
.compile(
subject -> {
subject.hasErrorContaining(
"@CustomTestApplication does not support application classes (or super classes)"
+ " with @Inject constructors. Found test.BaseApplication with @Inject"
+ " constructors [BaseApplication()]");
});
}
}
| HiltTest |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/ansi/Ansi8BitColorTests.java | {
"start": 939,
"end": 2258
} | class ____ {
@Test
void toStringWhenForegroundAddsCorrectPrefix() {
assertThat(Ansi8BitColor.foreground(208)).hasToString("38;5;208");
}
@Test
void toStringWhenBackgroundAddsCorrectPrefix() {
assertThat(Ansi8BitColor.background(208)).hasToString("48;5;208");
}
@Test
void foregroundWhenOutsideBoundsThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> Ansi8BitColor.foreground(-1))
.withMessage("'code' must be between 0 and 255");
assertThatIllegalArgumentException().isThrownBy(() -> Ansi8BitColor.foreground(256))
.withMessage("'code' must be between 0 and 255");
}
@Test
void backgroundWhenOutsideBoundsThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> Ansi8BitColor.background(-1))
.withMessage("'code' must be between 0 and 255");
assertThatIllegalArgumentException().isThrownBy(() -> Ansi8BitColor.background(256))
.withMessage("'code' must be between 0 and 255");
}
@Test
void equalsAndHashCode() {
Ansi8BitColor one = Ansi8BitColor.foreground(123);
Ansi8BitColor two = Ansi8BitColor.foreground(123);
Ansi8BitColor three = Ansi8BitColor.background(123);
assertThat(one).hasSameHashCodeAs(two);
assertThat(one).isEqualTo(one).isEqualTo(two).isNotEqualTo(three).isNotNull().isNotEqualTo("foo");
}
}
| Ansi8BitColorTests |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/completable/CompletableSubscribeOnTest.java | {
"start": 1139,
"end": 2201
} | class ____ extends RxJavaTest {
@Test
public void normal() {
List<Throwable> list = TestHelper.trackPluginErrors();
try {
TestScheduler scheduler = new TestScheduler();
TestObserver<Void> to = Completable.complete()
.subscribeOn(scheduler)
.test();
scheduler.advanceTimeBy(1, TimeUnit.SECONDS);
to.assertResult();
assertTrue(list.toString(), list.isEmpty());
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void dispose() {
TestHelper.checkDisposed(PublishSubject.create().ignoreElements().subscribeOn(new TestScheduler()));
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeCompletable(new Function<Completable, CompletableSource>() {
@Override
public CompletableSource apply(Completable c) throws Exception {
return c.subscribeOn(Schedulers.single());
}
});
}
}
| CompletableSubscribeOnTest |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/Wasbs.java | {
"start": 1324,
"end": 1610
} | class ____ extends DelegateToFileSystem {
Wasbs(final URI theUri, final Configuration conf) throws IOException,
URISyntaxException {
super(theUri, new NativeAzureFileSystem(), conf, "wasbs", false);
}
@Override
public int getUriDefaultPort() {
return -1;
}
}
| Wasbs |
java | quarkusio__quarkus | extensions/funqy/funqy-server-common/runtime/src/main/java/io/quarkus/funqy/runtime/query/BaseCollectionReader.java | {
"start": 284,
"end": 2348
} | class ____ extends BaseObjectReader implements BaseObjectReader.ValueSetter {
protected Function<String, Object> valueExtractor;
protected QueryPropertySetter setter;
public BaseCollectionReader(Type genericType, QueryObjectMapper mapper) {
if (genericType == null) {
valueExtractor = mapper.extractor(String.class);
return;
}
if (genericType instanceof ParameterizedType) {
Type valueType = ((ParameterizedType) genericType).getActualTypeArguments()[0];
if (valueType != null) {
Class<Object> rawType = Reflections.getRawType(valueType);
valueExtractor = mapper.extractor(valueType);
if (valueExtractor == null) {
setter = mapper.setterFor(rawType, valueType);
}
} else {
valueExtractor = mapper.extractor(String.class);
}
} else {
Class<Object> rawType = Reflections.getRawType(genericType);
valueExtractor = mapper.extractor(rawType);
if (valueExtractor == null) {
setter = mapper.setterFor(rawType, genericType);
}
}
}
@Override
public Function<String, Object> getExtractor() {
return valueExtractor;
}
@Override
public QueryPropertySetter getSetter() {
return setter;
}
@Override
ValueSetter getValueSetter(String propName) {
return this;
}
@Override
public void setValue(Object target, String propName, Object value) {
((Collection) target).add(value);
}
@Override
public void setValue(Object target, String name, String value, Map<String, List<Object>> paramToObject) {
if (valueExtractor != null) {
if (name != null)
return; // ignore query parameter
((Collection) target).add(valueExtractor.apply(value));
} else {
super.setValue(target, name, value, paramToObject);
}
}
}
| BaseCollectionReader |
java | apache__rocketmq | store/src/test/java/org/apache/rocketmq/store/queue/RocksDBConsumeQueueTest.java | {
"start": 1538,
"end": 3225
} | class ____ extends QueueTestBase {
@Test
public void testIterator() throws Exception {
if (MixAll.isMac()) {
return;
}
DefaultMessageStore messageStore = mock(DefaultMessageStore.class);
RocksDBConsumeQueueStore rocksDBConsumeQueueStore = mock(RocksDBConsumeQueueStore.class);
when(messageStore.getQueueStore()).thenReturn(rocksDBConsumeQueueStore);
when(rocksDBConsumeQueueStore.getMaxOffsetInQueue(anyString(), anyInt())).thenReturn(10000L);
when(rocksDBConsumeQueueStore.get(anyString(), anyInt(), anyLong())).then(new Answer<ByteBuffer>() {
@Override
public ByteBuffer answer(InvocationOnMock mock) throws Throwable {
long startIndex = mock.getArgument(2);
final ByteBuffer byteBuffer = ByteBuffer.allocate(CQ_UNIT_SIZE);
long phyOffset = startIndex * 10;
byteBuffer.putLong(phyOffset);
byteBuffer.putInt(1);
byteBuffer.putLong(0);
byteBuffer.putLong(0);
byteBuffer.flip();
return byteBuffer;
}
});
RocksDBConsumeQueue consumeQueue = new RocksDBConsumeQueue(messageStore.getMessageStoreConfig(), rocksDBConsumeQueueStore, "topic", 0);
ReferredIterator<CqUnit> it = consumeQueue.iterateFrom(9000);
for (int i = 0; i < 1000; i++) {
assertTrue(it.hasNext());
CqUnit next = it.next();
assertEquals(9000 + i, next.getQueueOffset());
assertEquals(10 * (9000 + i), next.getPos());
}
assertFalse(it.hasNext());
}
} | RocksDBConsumeQueueTest |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/TestingResultPartitionProvider.java | {
"start": 5326,
"end": 5640
} | interface ____ {
ResultSubpartitionView createSubpartitionView(
ResultPartitionID partitionId,
ResultSubpartitionIndexSet indexSet,
BufferAvailabilityListener availabilityListener)
throws IOException;
}
/** Testing | CreateSubpartitionView |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/csrf/CsrfCapabilityMissingValidationFailureTest.java | {
"start": 511,
"end": 1037
} | class ____ {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap
.create(JavaArchive.class).addClass(CsrfConfig.class))
.assertException(throwable -> assertThat(throwable)
.hasMessageContaining("Please add an extension that provides a CSRF prevention feature"));
@Test
public void runTest() {
Assertions.fail("This test should not run");
}
public static | CsrfCapabilityMissingValidationFailureTest |
java | google__guava | android/guava-tests/benchmark/com/google/common/util/concurrent/MonitorBasedPriorityBlockingQueue.java | {
"start": 9689,
"end": 9961
} | class ____ method
@Override
public E take() throws InterruptedException {
Monitor monitor = this.monitor;
monitor.enterWhen(notEmpty);
try {
return q.poll();
} finally {
monitor.leave();
}
}
@CanIgnoreReturnValue // pushed down from | to |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/ServiceUtils.java | {
"start": 19594,
"end": 20021
} | class ____ {
public void terminate(int exitCode) {
// Sleep for 5 seconds in hope that the state can be recorded in ATS.
// in case there's a client polling the comp state, it can be notified.
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
log.info("Interrupted on sleep while exiting.", e);
}
ExitUtil.terminate(exitCode);
}
}
}
| ProcessTerminationHandler |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/DupSetterTest3.java | {
"start": 304,
"end": 565
} | class ____ extends TestCase {
public void testEnum() {
VO enumTest = new VO();
enumTest.status = 3;
String json = JSONObject.toJSONString(enumTest);
JSONObject.parseObject(json, VO.class);
}
public static | DupSetterTest3 |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/LabeledGlobalFailureHandler.java | {
"start": 1411,
"end": 1858
} | interface ____ {
/**
* An adapted version of {@link GlobalFailureHandler} that handles and associates global
* failures with enricher labels.
*
* @param cause A cause that describes the global failure.
* @param failureLabels Labels providing an additional context about the failure.
*/
void handleGlobalFailure(Throwable cause, CompletableFuture<Map<String, String>> failureLabels);
}
| LabeledGlobalFailureHandler |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/time/DateFormatters.java | {
"start": 1811,
"end": 97924
} | class ____ {
private static DateFormatter newDateFormatter(String format, DateTimeFormatter formatter) {
return new JavaDateFormatter(format, new JavaTimeDateTimePrinter(formatter), new JavaTimeDateTimeParser(formatter));
}
private static DateFormatter newDateFormatter(String format, DateTimeFormatter printer, DateTimeFormatter... parsers) {
return new JavaDateFormatter(
format,
new JavaTimeDateTimePrinter(printer),
Stream.of(parsers).map(JavaTimeDateTimeParser::new).toArray(DateTimeParser[]::new)
);
}
public static final WeekFields WEEK_FIELDS_ROOT = WeekFields.ISO;
private static final DateTimeFormatter TIME_ZONE_FORMATTER_NO_COLON = new DateTimeFormatterBuilder().appendOffset("+HHmm", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter STRICT_YEAR_MONTH_DAY_PRINTER = new DateTimeFormatterBuilder().appendValue(
ChronoField.YEAR,
4,
9,
SignStyle.EXCEEDS_PAD
)
.optionalStart()
.appendLiteral("-")
.appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral('-')
.appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalEnd()
.optionalEnd()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter STRICT_YEAR_MONTH_DAY_FORMATTER = new DateTimeFormatterBuilder().appendValue(
ChronoField.YEAR,
4,
4,
SignStyle.EXCEEDS_PAD
)
.optionalStart()
.appendLiteral("-")
.appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral('-')
.appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalEnd()
.optionalEnd()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder().appendValue(
HOUR_OF_DAY,
2,
2,
SignStyle.NOT_NEGATIVE
)
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_PRINTER = new DateTimeFormatterBuilder().append(
STRICT_YEAR_MONTH_DAY_PRINTER
)
.appendLiteral('T')
.optionalStart()
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendFraction(NANO_OF_SECOND, 3, 3, true)
.optionalEnd()
.optionalEnd()
.optionalStart()
.appendOffset("+HH:MM", "Z")
.optionalEnd()
.optionalEnd()
.optionalEnd()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/**
* Returns a generic ISO datetime parser where the date is mandatory and the time is optional.
*/
private static final DateFormatter STRICT_DATE_OPTIONAL_TIME = new JavaDateFormatter(
"strict_date_optional_time",
new JavaTimeDateTimePrinter(STRICT_DATE_OPTIONAL_TIME_PRINTER),
new Iso8601DateTimeParser(Set.of(), false, null, DecimalSeparator.BOTH, TimezonePresence.OPTIONAL).withLocale(Locale.ROOT)
);
private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS = new DateTimeFormatterBuilder().append(
STRICT_YEAR_MONTH_DAY_PRINTER
)
.appendLiteral('T')
.optionalStart()
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendFraction(NANO_OF_SECOND, 3, 9, true)
.optionalEnd()
.optionalEnd()
.optionalStart()
.appendOffset("+HH:MM", "Z")
.optionalEnd()
.optionalEnd()
.optionalEnd()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/**
* Returns a generic ISO datetime parser where the date is mandatory and the time is optional with nanosecond resolution.
*/
private static final DateFormatter STRICT_DATE_OPTIONAL_TIME_NANOS = new JavaDateFormatter(
"strict_date_optional_time_nanos",
new JavaTimeDateTimePrinter(STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS),
new Iso8601DateTimeParser(
Set.of(HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE),
true,
null,
DecimalSeparator.BOTH,
TimezonePresence.OPTIONAL
).withLocale(Locale.ROOT)
);
/**
* Returns a ISO 8601 compatible date time formatter and parser.
* This is not fully compatible to the existing spec, which would require far more edge cases, but merely compatible with the
* existing legacy joda time ISO date formatter
*/
private static final DateFormatter ISO_8601 = new JavaDateFormatter(
"iso8601",
new JavaTimeDateTimePrinter(STRICT_DATE_OPTIONAL_TIME_PRINTER),
new Iso8601DateTimeParser(Set.of(), false, null, DecimalSeparator.BOTH, TimezonePresence.OPTIONAL).withLocale(Locale.ROOT)
);
/////////////////////////////////////////
//
// BEGIN basic time formatters
//
// these formatters to not have any splitting characters between hours, minutes, seconds, milliseconds
// this means they have to be strict with the exception of the last element
//
/////////////////////////////////////////
private static final DateTimeFormatter BASIC_TIME_NO_MILLIS_BASE = new DateTimeFormatterBuilder().appendValue(
HOUR_OF_DAY,
2,
2,
SignStyle.NOT_NEGATIVE
)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a basic formatter for a two digit hour of day, two digit minute
* of hour, two digit second of minute, and time zone offset (HHmmssZ).
*/
private static final DateFormatter BASIC_TIME_NO_MILLIS = newDateFormatter(
"basic_time_no_millis",
new DateTimeFormatterBuilder().append(BASIC_TIME_NO_MILLIS_BASE)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(BASIC_TIME_NO_MILLIS_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(BASIC_TIME_NO_MILLIS_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
private static final DateTimeFormatter BASIC_TIME_FORMATTER = new DateTimeFormatterBuilder().appendValue(
HOUR_OF_DAY,
2,
2,
SignStyle.NOT_NEGATIVE
)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendFraction(NANO_OF_SECOND, 1, 9, true)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter BASIC_TIME_PRINTER = new DateTimeFormatterBuilder().appendValue(
HOUR_OF_DAY,
2,
2,
SignStyle.NOT_NEGATIVE
)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendFraction(NANO_OF_SECOND, 3, 3, true)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a basic formatter for a two digit hour of day, two digit minute
* of hour, two digit second of minute, three digit millis, and time zone
* offset (HHmmss.SSSZ).
*/
private static final DateFormatter BASIC_TIME = newDateFormatter(
"basic_time",
new DateTimeFormatterBuilder().append(BASIC_TIME_PRINTER)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(BASIC_TIME_FORMATTER)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(BASIC_TIME_FORMATTER)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
private static final DateTimeFormatter BASIC_T_TIME_PRINTER = new DateTimeFormatterBuilder().appendLiteral("T")
.append(BASIC_TIME_PRINTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter BASIC_T_TIME_FORMATTER = new DateTimeFormatterBuilder().appendLiteral("T")
.append(BASIC_TIME_FORMATTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a basic formatter for a two digit hour of day, two digit minute
* of hour, two digit second of minute, three digit millis, and time zone
* offset prefixed by 'T' ('T'HHmmss.SSSZ).
*/
private static final DateFormatter BASIC_T_TIME = newDateFormatter(
"basic_t_time",
new DateTimeFormatterBuilder().append(BASIC_T_TIME_PRINTER)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(BASIC_T_TIME_FORMATTER)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(BASIC_T_TIME_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT)
);
/*
* Returns a basic formatter for a two digit hour of day, two digit minute
* of hour, two digit second of minute, and time zone offset prefixed by 'T'
* ('T'HHmmssZ).
*/
private static final DateFormatter BASIC_T_TIME_NO_MILLIS = newDateFormatter(
"basic_t_time_no_millis",
new DateTimeFormatterBuilder().appendLiteral("T")
.append(BASIC_TIME_NO_MILLIS_BASE)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().appendLiteral("T")
.append(BASIC_TIME_NO_MILLIS_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().appendLiteral("T")
.append(BASIC_TIME_NO_MILLIS_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
private static final DateTimeFormatter BASIC_YEAR_MONTH_DAY_PRINTER = new DateTimeFormatterBuilder().appendValue(
ChronoField.YEAR,
4,
10,
SignStyle.NORMAL
)
.appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter BASIC_YEAR_MONTH_DAY_FORMATTER = new DateTimeFormatterBuilder().appendValue(
ChronoField.YEAR,
4,
4,
SignStyle.NORMAL
)
.appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter BASIC_DATE_TIME_FORMATTER = new DateTimeFormatterBuilder().append(BASIC_YEAR_MONTH_DAY_FORMATTER)
.append(BASIC_T_TIME_FORMATTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter BASIC_DATE_TIME_PRINTER = new DateTimeFormatterBuilder().append(BASIC_YEAR_MONTH_DAY_PRINTER)
.append(BASIC_T_TIME_PRINTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a basic formatter that combines a basic date and time, separated
* by a 'T' (uuuuMMdd'T'HHmmss.SSSZ).
*/
private static final DateFormatter BASIC_DATE_TIME = newDateFormatter(
"basic_date_time",
new DateTimeFormatterBuilder().append(BASIC_DATE_TIME_PRINTER)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(BASIC_DATE_TIME_FORMATTER)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(BASIC_DATE_TIME_FORMATTER)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
private static final DateTimeFormatter BASIC_DATE_T = new DateTimeFormatterBuilder().append(BASIC_YEAR_MONTH_DAY_FORMATTER)
.appendLiteral("T")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter BASIC_DATE_T_PRINTER = new DateTimeFormatterBuilder().append(BASIC_YEAR_MONTH_DAY_PRINTER)
.appendLiteral("T")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a basic formatter that combines a basic date and time without millis,
* separated by a 'T' (uuuuMMdd'T'HHmmssZ).
*/
private static final DateFormatter BASIC_DATE_TIME_NO_MILLIS = newDateFormatter(
"basic_date_time_no_millis",
new DateTimeFormatterBuilder().append(BASIC_DATE_T_PRINTER)
.append(BASIC_TIME_NO_MILLIS_BASE)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(BASIC_DATE_T)
.append(BASIC_TIME_NO_MILLIS_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(BASIC_DATE_T)
.append(BASIC_TIME_NO_MILLIS_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter for a full ordinal date, using a four
* digit year and three digit dayOfYear (uuuuDDD).
*/
private static final DateFormatter BASIC_ORDINAL_DATE = newDateFormatter(
"basic_ordinal_date",
DateTimeFormatter.ofPattern("uuuuDDD", Locale.ROOT)
);
/*
* Returns a formatter for a full ordinal date and time, using a four
* digit year and three digit dayOfYear (uuuuDDD'T'HHmmss.SSSZ).
*/
private static final DateFormatter BASIC_ORDINAL_DATE_TIME = newDateFormatter(
"basic_ordinal_date_time",
new DateTimeFormatterBuilder().appendPattern("yyyyDDD")
.append(BASIC_T_TIME_PRINTER)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().appendPattern("yyyyDDD")
.append(BASIC_T_TIME_FORMATTER)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().appendPattern("yyyyDDD")
.append(BASIC_T_TIME_FORMATTER)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter for a full ordinal date and time without millis,
* using a four digit year and three digit dayOfYear (uuuuDDD'T'HHmmssZ).
*/
private static final DateFormatter BASIC_ORDINAL_DATE_TIME_NO_MILLIS = newDateFormatter(
"basic_ordinal_date_time_no_millis",
new DateTimeFormatterBuilder().appendPattern("uuuuDDD")
.appendLiteral("T")
.append(BASIC_TIME_NO_MILLIS_BASE)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().appendPattern("uuuuDDD")
.appendLiteral("T")
.append(BASIC_TIME_NO_MILLIS_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().appendPattern("uuuuDDD")
.appendLiteral("T")
.append(BASIC_TIME_NO_MILLIS_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
private static final DateTimeFormatter BASIC_WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder().appendValue(IsoFields.WEEK_BASED_YEAR)
.appendLiteral("W")
.appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 1, 2, SignStyle.NEVER)
.appendValue(ChronoField.DAY_OF_WEEK)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/////////////////////////////////////////
//
// END basic time formatters
//
/////////////////////////////////////////
/////////////////////////////////////////
//
// start strict formatters
//
/////////////////////////////////////////
private static final DateTimeFormatter STRICT_BASIC_WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder().parseStrict()
.appendValue(IsoFields.WEEK_BASED_YEAR, 4)
.appendLiteral("W")
.appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 1, 2, SignStyle.NEVER)
.appendValue(ChronoField.DAY_OF_WEEK)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter STRICT_BASIC_WEEK_DATE_PRINTER = new DateTimeFormatterBuilder().parseStrict()
.appendValue(IsoFields.WEEK_BASED_YEAR, 4, 10, SignStyle.NORMAL)
.appendLiteral("W")
.appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 2, 2, SignStyle.NEVER)
.appendValue(ChronoField.DAY_OF_WEEK)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a basic formatter for a full date as four digit weekyear, two
* digit week of weekyear, and one digit day of week (YYYY'W'wwe).
*/
private static final DateFormatter STRICT_BASIC_WEEK_DATE = newDateFormatter(
"strict_basic_week_date",
STRICT_BASIC_WEEK_DATE_PRINTER,
STRICT_BASIC_WEEK_DATE_FORMATTER
);
/*
* Returns a basic formatter that combines a basic weekyear date and time
* without millis, separated by a 'T' (YYYY'W'wwe'T'HHmmssX).
*/
private static final DateFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS = newDateFormatter(
"strict_basic_week_date_time_no_millis",
new DateTimeFormatterBuilder().append(STRICT_BASIC_WEEK_DATE_PRINTER)
.appendLiteral("T")
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(STRICT_BASIC_WEEK_DATE_PRINTER)
.appendLiteral("T")
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(STRICT_BASIC_WEEK_DATE_PRINTER)
.appendLiteral("T")
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a basic formatter that combines a basic weekyear date and time,
* separated by a 'T' (YYYY'W'wwe'T'HHmmss.SSSX).
*/
private static final DateFormatter STRICT_BASIC_WEEK_DATE_TIME = newDateFormatter(
"strict_basic_week_date_time",
new DateTimeFormatterBuilder().append(STRICT_BASIC_WEEK_DATE_PRINTER)
.append(DateTimeFormatter.ofPattern("'T'HHmmss.SSSX", Locale.ROOT))
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(STRICT_BASIC_WEEK_DATE_FORMATTER)
.appendLiteral("T")
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendFraction(NANO_OF_SECOND, 1, 9, true)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(STRICT_BASIC_WEEK_DATE_FORMATTER)
.appendLiteral("T")
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendFraction(NANO_OF_SECOND, 1, 9, true)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* An ISO date formatter that formats or parses a date without an offset, such as '2011-12-03'.
*/
private static final DateFormatter STRICT_DATE = newDateFormatter(
"strict_date",
DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT).withLocale(Locale.ROOT)
);
/*
* A date formatter that formats or parses a date plus an hour without an offset, such as '2011-12-03T01'.
*/
private static final DateFormatter STRICT_DATE_HOUR = newDateFormatter(
"strict_date_hour",
DateTimeFormatter.ofPattern("uuuu-MM-dd'T'HH", Locale.ROOT)
);
/*
* A date formatter that formats or parses a date plus an hour/minute without an offset, such as '2011-12-03T01:10'.
*/
private static final DateFormatter STRICT_DATE_HOUR_MINUTE = newDateFormatter(
"strict_date_hour_minute",
DateTimeFormatter.ofPattern("uuuu-MM-dd'T'HH:mm", Locale.ROOT)
);
/*
* A strict date formatter that formats or parses a date without an offset, such as '2011-12-03'.
*/
private static final DateFormatter STRICT_YEAR_MONTH_DAY = newDateFormatter("strict_year_month_day", STRICT_YEAR_MONTH_DAY_FORMATTER);
/*
* A strict formatter that formats or parses a year and a month, such as '2011-12'.
*/
private static final DateFormatter STRICT_YEAR_MONTH = new JavaDateFormatter(
"strict_year_month",
new JavaTimeDateTimePrinter(
new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR, 4, 4, SignStyle.EXCEEDS_PAD)
.appendLiteral("-")
.appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
),
new Iso8601DateTimeParser(Set.of(MONTH_OF_YEAR), false, MONTH_OF_YEAR, DecimalSeparator.BOTH, TimezonePresence.FORBIDDEN)
.withLocale(Locale.ROOT)
);
/*
* A strict formatter that formats or parses a year, such as '2011'.
*/
private static final DateFormatter STRICT_YEAR = new JavaDateFormatter(
"strict_year",
new JavaTimeDateTimePrinter(
new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR, 4, 4, SignStyle.EXCEEDS_PAD)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
),
new Iso8601DateTimeParser(Set.of(), false, ChronoField.YEAR, DecimalSeparator.BOTH, TimezonePresence.FORBIDDEN).withLocale(
Locale.ROOT
)
);
/*
* A strict formatter that formats or parses a hour, minute and second, such as '09:43:25'.
*/
private static final DateFormatter STRICT_HOUR_MINUTE_SECOND = newDateFormatter(
"strict_hour_minute_second",
STRICT_HOUR_MINUTE_SECOND_FORMATTER
);
private static final DateTimeFormatter STRICT_DATE_PRINTER = new DateTimeFormatterBuilder().append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.appendLiteral('T')
.append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
.appendFraction(NANO_OF_SECOND, 3, 9, true)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter STRICT_DATE_FORMATTER = new DateTimeFormatterBuilder().append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.appendLiteral('T')
.append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
.optionalStart()
.appendFraction(NANO_OF_SECOND, 1, 9, true)
.optionalEnd()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter that combines a full date and time, separated by a 'T'
* (uuuu-MM-dd'T'HH:mm:ss.SSSZZ).
*/
private static final DateFormatter STRICT_DATE_TIME = new JavaDateFormatter(
"strict_date_time",
new JavaTimeDateTimePrinter(STRICT_DATE_PRINTER),
new Iso8601DateTimeParser(
Set.of(MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE),
false,
null,
DecimalSeparator.DOT,
TimezonePresence.MANDATORY
).withLocale(Locale.ROOT)
);
private static final DateTimeFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE = new DateTimeFormatterBuilder().appendValue(
ChronoField.YEAR,
4,
4,
SignStyle.EXCEEDS_PAD
)
.appendLiteral('-')
.appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE)
.appendLiteral('T')
.append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter for a full ordinal date and time without millis,
* using a four digit year and three digit dayOfYear (uuuu-DDD'T'HH:mm:ssZZ).
*/
private static final DateFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS = newDateFormatter(
"strict_ordinal_date_time_no_millis",
new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
private static final DateTimeFormatter STRICT_DATE_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder().append(
STRICT_YEAR_MONTH_DAY_FORMATTER
).appendLiteral('T').append(STRICT_HOUR_MINUTE_SECOND_FORMATTER).toFormatter(Locale.ROOT).withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter that combines a full date and time without millis,
* separated by a 'T' (uuuu-MM-dd'T'HH:mm:ssZZ).
*/
private static final DateFormatter STRICT_DATE_TIME_NO_MILLIS = new JavaDateFormatter(
"strict_date_time_no_millis",
new JavaTimeDateTimePrinter(
new DateTimeFormatterBuilder().append(STRICT_DATE_TIME_NO_MILLIS_FORMATTER)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
),
new Iso8601DateTimeParser(
Set.of(MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE),
false,
SECOND_OF_MINUTE,
DecimalSeparator.BOTH,
TimezonePresence.MANDATORY
).withLocale(Locale.ROOT)
);
// NOTE: this is not a strict formatter to retain the joda time based behaviour, even though it's named like this
private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER = new DateTimeFormatterBuilder().append(
STRICT_HOUR_MINUTE_SECOND_FORMATTER
).appendFraction(NANO_OF_SECOND, 1, 9, true).toFormatter(Locale.ROOT).withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER = new DateTimeFormatterBuilder().append(
STRICT_HOUR_MINUTE_SECOND_FORMATTER
).appendFraction(NANO_OF_SECOND, 3, 3, true).toFormatter(Locale.ROOT).withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter for a two digit hour of day, two digit minute of
* hour, two digit second of minute, and three digit fraction of
* second (HH:mm:ss.SSS).
*
* NOTE: this is not a strict formatter to retain the joda time based behaviour,
* even though it's named like this
*/
private static final DateFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS = newDateFormatter(
"strict_hour_minute_second_millis",
STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER,
STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER
);
private static final DateFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION = newDateFormatter(
"strict_hour_minute_second_fraction",
STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER,
STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER
);
/*
* Returns a formatter that combines a full date, two digit hour of day,
* two digit minute of hour, two digit second of minute, and three digit
* fraction of second (uuuu-MM-dd'T'HH:mm:ss.SSS).
*/
private static final DateFormatter STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION = new JavaDateFormatter(
"strict_date_hour_minute_second_fraction",
new JavaTimeDateTimePrinter(
new DateTimeFormatterBuilder().append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.appendLiteral("T")
.append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
),
new Iso8601DateTimeParser(
Set.of(MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE, NANO_OF_SECOND),
false,
null,
DecimalSeparator.DOT,
TimezonePresence.FORBIDDEN
).withLocale(Locale.ROOT)
);
private static final DateFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = new JavaDateFormatter(
"strict_date_hour_minute_second_millis",
new JavaTimeDateTimePrinter(
new DateTimeFormatterBuilder().append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.appendLiteral("T")
.append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
),
new Iso8601DateTimeParser(
Set.of(MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE, NANO_OF_SECOND),
false,
null,
DecimalSeparator.DOT,
TimezonePresence.FORBIDDEN
).withLocale(Locale.ROOT)
);
/*
* Returns a formatter for a two digit hour of day. (HH)
*/
private static final DateFormatter STRICT_HOUR = newDateFormatter("strict_hour", DateTimeFormatter.ofPattern("HH", Locale.ROOT));
/*
* Returns a formatter for a two digit hour of day and two digit minute of
* hour. (HH:mm)
*/
private static final DateFormatter STRICT_HOUR_MINUTE = newDateFormatter(
"strict_hour_minute",
DateTimeFormatter.ofPattern("HH:mm", Locale.ROOT)
);
private static final DateTimeFormatter STRICT_ORDINAL_DATE_TIME_PRINTER = new DateTimeFormatterBuilder().appendValue(
ChronoField.YEAR,
4,
4,
SignStyle.EXCEEDS_PAD
)
.appendLiteral('-')
.appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE)
.appendLiteral('T')
.appendPattern("HH:mm")
.optionalStart()
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendFraction(NANO_OF_SECOND, 3, 9, true)
.optionalEnd()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE = new DateTimeFormatterBuilder().appendValue(
ChronoField.YEAR,
4,
4,
SignStyle.EXCEEDS_PAD
)
.appendLiteral('-')
.appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE)
.appendLiteral('T')
.appendPattern("HH:mm")
.optionalStart()
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendFraction(NANO_OF_SECOND, 1, 9, true)
.optionalEnd()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter for a full ordinal date and time, using a four
* digit year and three digit dayOfYear (uuuu-DDD'T'HH:mm:ss.SSSZZ).
*/
private static final DateFormatter STRICT_ORDINAL_DATE_TIME = newDateFormatter(
"strict_ordinal_date_time",
new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_PRINTER)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
// Note: milliseconds parsing is not strict, others are
private static final DateTimeFormatter STRICT_TIME_FORMATTER_BASE = new DateTimeFormatterBuilder().appendValue(
HOUR_OF_DAY,
2,
2,
SignStyle.NOT_NEGATIVE
)
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendFraction(NANO_OF_SECOND, 1, 9, true)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter STRICT_TIME_PRINTER = new DateTimeFormatterBuilder().appendValue(
HOUR_OF_DAY,
2,
2,
SignStyle.NOT_NEGATIVE
)
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendFraction(NANO_OF_SECOND, 3, 3, true)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter for a two digit hour of day, two digit minute of
* hour, two digit second of minute, three digit fraction of second, and
* time zone offset (HH:mm:ss.SSSZZ).
*/
private static final DateFormatter STRICT_TIME = newDateFormatter(
"strict_time",
new DateTimeFormatterBuilder().append(STRICT_TIME_PRINTER)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(STRICT_TIME_FORMATTER_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(STRICT_TIME_FORMATTER_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter for a two digit hour of day, two digit minute of
* hour, two digit second of minute, three digit fraction of second, and
* time zone offset prefixed by 'T' ('T'HH:mm:ss.SSSZZ).
*/
private static final DateFormatter STRICT_T_TIME = newDateFormatter(
"strict_t_time",
new DateTimeFormatterBuilder().appendLiteral('T')
.append(STRICT_TIME_PRINTER)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().appendLiteral('T')
.append(STRICT_TIME_FORMATTER_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().appendLiteral('T')
.append(STRICT_TIME_FORMATTER_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
private static final DateTimeFormatter STRICT_TIME_NO_MILLIS_BASE = new DateTimeFormatterBuilder().appendValue(
HOUR_OF_DAY,
2,
2,
SignStyle.NOT_NEGATIVE
)
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter for a two digit hour of day, two digit minute of
* hour, two digit second of minute, and time zone offset (HH:mm:ssZZ).
*/
private static final DateFormatter STRICT_TIME_NO_MILLIS = newDateFormatter(
"strict_time_no_millis",
new DateTimeFormatterBuilder().append(STRICT_TIME_NO_MILLIS_BASE)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(STRICT_TIME_NO_MILLIS_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(STRICT_TIME_NO_MILLIS_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter for a two digit hour of day, two digit minute of
* hour, two digit second of minute, and time zone offset prefixed
* by 'T' ('T'HH:mm:ssZZ).
*/
private static final DateFormatter STRICT_T_TIME_NO_MILLIS = newDateFormatter(
"strict_t_time_no_millis",
new DateTimeFormatterBuilder().appendLiteral("T")
.append(STRICT_TIME_NO_MILLIS_BASE)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().appendLiteral("T")
.append(STRICT_TIME_NO_MILLIS_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().appendLiteral("T")
.append(STRICT_TIME_NO_MILLIS_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
private static final DateTimeFormatter ISO_WEEK_DATE = new DateTimeFormatterBuilder().parseCaseInsensitive()
.appendValue(IsoFields.WEEK_BASED_YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
.appendLiteral("-W")
.appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 2)
.appendLiteral('-')
.appendValue(DAY_OF_WEEK, 1)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter ISO_WEEK_DATE_T = new DateTimeFormatterBuilder().append(ISO_WEEK_DATE)
.appendLiteral('T')
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter for a full date as four digit weekyear, two digit
* week of weekyear, and one digit day of week (YYYY-'W'ww-e).
*/
private static final DateFormatter STRICT_WEEK_DATE = newDateFormatter("strict_week_date", ISO_WEEK_DATE);
/*
* Returns a formatter that combines a full weekyear date and time without millis,
* separated by a 'T' (YYYY-'W'ww-e'T'HH:mm:ssZZ).
*/
private static final DateFormatter STRICT_WEEK_DATE_TIME_NO_MILLIS = newDateFormatter(
"strict_week_date_time_no_millis",
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T)
.append(STRICT_TIME_NO_MILLIS_BASE)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T)
.append(STRICT_TIME_NO_MILLIS_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T)
.append(STRICT_TIME_NO_MILLIS_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter that combines a full weekyear date and time,
* separated by a 'T' (YYYY-'W'ww-e'T'HH:mm:ss.SSSZZ).
*/
private static final DateFormatter STRICT_WEEK_DATE_TIME = newDateFormatter(
"strict_week_date_time",
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T)
.append(STRICT_TIME_PRINTER)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T)
.append(STRICT_TIME_FORMATTER_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T)
.append(STRICT_TIME_FORMATTER_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter for a four digit weekyear
*/
private static final DateFormatter STRICT_WEEKYEAR = newDateFormatter(
"strict_weekyear",
new DateTimeFormatterBuilder().appendValue(WEEK_FIELDS_ROOT.weekBasedYear(), 4, 10, SignStyle.EXCEEDS_PAD)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
private static final DateTimeFormatter STRICT_WEEKYEAR_WEEK_FORMATTER = new DateTimeFormatterBuilder().appendValue(
WEEK_FIELDS_ROOT.weekBasedYear(),
4,
10,
SignStyle.EXCEEDS_PAD
)
.appendLiteral("-W")
.appendValue(WEEK_FIELDS_ROOT.weekOfWeekBasedYear(), 2, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter for a four digit weekyear and two digit week of
* weekyear. (YYYY-'W'ww)
*/
private static final DateFormatter STRICT_WEEKYEAR_WEEK = newDateFormatter("strict_weekyear_week", STRICT_WEEKYEAR_WEEK_FORMATTER);
/*
* Returns a formatter for a four digit weekyear, two digit week of
* weekyear, and one digit day of week. (YYYY-'W'ww-e)
*/
private static final DateFormatter STRICT_WEEKYEAR_WEEK_DAY = newDateFormatter(
"strict_weekyear_week_day",
new DateTimeFormatterBuilder().append(STRICT_WEEKYEAR_WEEK_FORMATTER)
.appendLiteral("-")
.appendValue(WEEK_FIELDS_ROOT.dayOfWeek())
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter that combines a full date, two digit hour of day,
* two digit minute of hour, and two digit second of
* minute. (uuuu-MM-dd'T'HH:mm:ss)
*/
private static final DateFormatter STRICT_DATE_HOUR_MINUTE_SECOND = new JavaDateFormatter(
"strict_date_hour_minute_second",
new JavaTimeDateTimePrinter(DateTimeFormatter.ofPattern("uuuu-MM-dd'T'HH:mm:ss", Locale.ROOT)),
new Iso8601DateTimeParser(
Set.of(MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE),
false,
SECOND_OF_MINUTE,
DecimalSeparator.BOTH,
TimezonePresence.FORBIDDEN
).withLocale(Locale.ROOT)
);
/*
* A basic formatter for a full date as four digit year, two digit
* month of year, and two digit day of month (uuuuMMdd).
*/
private static final DateFormatter BASIC_DATE = newDateFormatter(
"basic_date",
new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR, 4, 10, SignStyle.NORMAL)
.appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
.withZone(ZoneOffset.UTC),
new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR, 1, 4, SignStyle.NORMAL)
.appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NOT_NEGATIVE)
.appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
.withZone(ZoneOffset.UTC)
);
private static final DateTimeFormatter STRICT_ORDINAL_DATE_FORMATTER = new DateTimeFormatterBuilder().parseCaseInsensitive()
.appendValue(ChronoField.YEAR, 4, 4, SignStyle.EXCEEDS_PAD)
.appendLiteral('-')
.appendValue(DAY_OF_YEAR, 3)
.optionalStart()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter for a full ordinal date, using a four
* digit year and three digit dayOfYear (uuuu-DDD).
*/
private static final DateFormatter STRICT_ORDINAL_DATE = newDateFormatter("strict_ordinal_date", STRICT_ORDINAL_DATE_FORMATTER);
/////////////////////////////////////////
//
// end strict formatters
//
/////////////////////////////////////////
/////////////////////////////////////////
//
// start lenient formatters
//
/////////////////////////////////////////
private static final DateTimeFormatter DATE_FORMATTER = new DateTimeFormatterBuilder().appendValue(
ChronoField.YEAR,
1,
9,
SignStyle.NORMAL
)
.optionalStart()
.appendLiteral('-')
.appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral('-')
.appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NOT_NEGATIVE)
.optionalEnd()
.optionalEnd()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter HOUR_MINUTE_FORMATTER = new DateTimeFormatterBuilder().appendValue(
HOUR_OF_DAY,
1,
2,
SignStyle.NOT_NEGATIVE
)
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* a date formatter with optional time, being very lenient, format is
* uuuu-MM-dd'T'HH:mm:ss.SSSZ
*/
private static final DateFormatter DATE_OPTIONAL_TIME = newDateFormatter(
"date_optional_time",
STRICT_DATE_OPTIONAL_TIME_PRINTER,
new DateTimeFormatterBuilder().append(DATE_FORMATTER)
.optionalStart()
.appendLiteral('T')
.optionalStart()
.appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE)
.optionalEnd()
.optionalStart()
.appendFraction(NANO_OF_SECOND, 1, 9, true)
.optionalEnd()
.optionalStart()
.appendLiteral(',')
.appendFraction(NANO_OF_SECOND, 1, 9, false)
.optionalEnd()
.optionalStart()
.appendZoneOrOffsetId()
.optionalEnd()
.optionalStart()
.appendOffset("+HHmm", "Z")
.optionalEnd()
.optionalEnd()
.optionalEnd()
.optionalEnd()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
private static final DateTimeFormatter HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder().append(HOUR_MINUTE_FORMATTER)
.appendLiteral(":")
.appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter HOUR_MINUTE_SECOND_MILLIS_FORMATTER = new DateTimeFormatterBuilder().appendValue(
HOUR_OF_DAY,
1,
2,
SignStyle.NOT_NEGATIVE
)
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE)
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE)
.appendFraction(NANO_OF_SECOND, 1, 3, true)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter HOUR_MINUTE_SECOND_FRACTION_FORMATTER = new DateTimeFormatterBuilder().appendValue(
HOUR_OF_DAY,
1,
2,
SignStyle.NOT_NEGATIVE
)
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE)
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE)
.appendFraction(NANO_OF_SECOND, 1, 9, true)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter ORDINAL_DATE_FORMATTER = new DateTimeFormatterBuilder().appendValue(
ChronoField.YEAR,
4,
10,
SignStyle.EXCEEDS_PAD
)
.appendLiteral('-')
.appendValue(DAY_OF_YEAR, 1, 3, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter ORDINAL_DATE_PRINTER = new DateTimeFormatterBuilder().appendValue(
ChronoField.YEAR,
4,
10,
SignStyle.EXCEEDS_PAD
)
.appendLiteral('-')
.appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter for a full ordinal date, using a four
* digit year and three digit dayOfYear (uuuu-DDD).
*/
private static final DateFormatter ORDINAL_DATE = newDateFormatter("ordinal_date", ORDINAL_DATE_PRINTER, ORDINAL_DATE_FORMATTER);
private static final DateTimeFormatter TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder().appendValue(
HOUR_OF_DAY,
1,
2,
SignStyle.NOT_NEGATIVE
)
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE)
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter T_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder().appendLiteral("T")
.append(TIME_NO_MILLIS_FORMATTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter TIME_PREFIX = new DateTimeFormatterBuilder().append(TIME_NO_MILLIS_FORMATTER)
.appendFraction(NANO_OF_SECOND, 1, 9, true)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder().appendValue(
IsoFields.WEEK_BASED_YEAR,
4,
10,
SignStyle.EXCEEDS_PAD
)
.appendLiteral("-W")
.appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 1, 2, SignStyle.NOT_NEGATIVE)
.appendLiteral('-')
.appendValue(DAY_OF_WEEK, 1)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter for a four digit weekyear. (YYYY)
*/
private static final DateFormatter WEEKYEAR = newDateFormatter(
"weekyear",
new DateTimeFormatterBuilder().appendValue(WEEK_FIELDS_ROOT.weekBasedYear())
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter for a four digit year. (uuuu)
*/
private static final DateFormatter YEAR = newDateFormatter(
"year",
new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR).toFormatter(Locale.ROOT).withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter that combines a full date and two digit hour of
* day. (uuuu-MM-dd'T'HH)
*/
private static final DateFormatter DATE_HOUR = newDateFormatter(
"date_hour",
DateTimeFormatter.ofPattern("uuuu-MM-dd'T'HH", Locale.ROOT),
new DateTimeFormatterBuilder().append(DATE_FORMATTER)
.appendLiteral("T")
.appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter that combines a full date, two digit hour of day,
* two digit minute of hour, two digit second of minute, and three digit
* fraction of second (uuuu-MM-dd'T'HH:mm:ss.SSS).
*/
private static final DateFormatter DATE_HOUR_MINUTE_SECOND_MILLIS = newDateFormatter(
"date_hour_minute_second_millis",
new DateTimeFormatterBuilder().append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.appendLiteral("T")
.append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(DATE_FORMATTER)
.appendLiteral("T")
.append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
private static final DateFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = newDateFormatter(
"date_hour_minute_second_fraction",
new DateTimeFormatterBuilder().append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.appendLiteral("T")
.append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(DATE_FORMATTER)
.appendLiteral("T")
.append(HOUR_MINUTE_SECOND_FRACTION_FORMATTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter that combines a full date, two digit hour of day,
* and two digit minute of hour. (uuuu-MM-dd'T'HH:mm)
*/
private static final DateFormatter DATE_HOUR_MINUTE = newDateFormatter(
"date_hour_minute",
DateTimeFormatter.ofPattern("uuuu-MM-dd'T'HH:mm", Locale.ROOT),
new DateTimeFormatterBuilder().append(DATE_FORMATTER)
.appendLiteral("T")
.append(HOUR_MINUTE_FORMATTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter that combines a full date, two digit hour of day,
* two digit minute of hour, and two digit second of
* minute. (uuuu-MM-dd'T'HH:mm:ss)
*/
private static final DateFormatter DATE_HOUR_MINUTE_SECOND = newDateFormatter(
"date_hour_minute_second",
DateTimeFormatter.ofPattern("uuuu-MM-dd'T'HH:mm:ss", Locale.ROOT),
new DateTimeFormatterBuilder().append(DATE_FORMATTER)
.appendLiteral("T")
.append(HOUR_MINUTE_SECOND_FORMATTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
private static final DateTimeFormatter DATE_TIME_FORMATTER = new DateTimeFormatterBuilder().append(DATE_FORMATTER)
.appendLiteral('T')
.append(HOUR_MINUTE_FORMATTER)
.optionalStart()
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE)
.appendFraction(NANO_OF_SECOND, 1, 9, true)
.optionalEnd()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter that combines a full date and time, separated by a 'T'
* (uuuu-MM-dd'T'HH:mm:ss.SSSZZ).
*/
private static final DateFormatter DATE_TIME = newDateFormatter(
"date_time",
STRICT_DATE_OPTIONAL_TIME_PRINTER,
new DateTimeFormatterBuilder().append(DATE_TIME_FORMATTER)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(DATE_TIME_FORMATTER)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a basic formatter for a full date as four digit weekyear, two
* digit week of weekyear, and one digit day of week (YYYY'W'wwe).
*/
private static final DateFormatter BASIC_WEEK_DATE = newDateFormatter(
"basic_week_date",
STRICT_BASIC_WEEK_DATE_PRINTER,
BASIC_WEEK_DATE_FORMATTER
);
/*
* Returns a formatter for a full date as four digit year, two digit month
* of year, and two digit day of month (uuuu-MM-dd).
*/
private static final DateFormatter DATE = newDateFormatter(
"date",
DateTimeFormatter.ISO_LOCAL_DATE.withLocale(Locale.ROOT).withResolverStyle(ResolverStyle.STRICT),
DATE_FORMATTER
);
// only the formatter, nothing optional here
private static final DateTimeFormatter DATE_TIME_NO_MILLIS_PRINTER = new DateTimeFormatterBuilder().append(
DateTimeFormatter.ISO_LOCAL_DATE.withLocale(Locale.ROOT).withResolverStyle(ResolverStyle.LENIENT)
)
.appendLiteral('T')
.appendPattern("HH:mm")
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendZoneId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
private static final DateTimeFormatter DATE_TIME_PREFIX = new DateTimeFormatterBuilder().append(DATE_FORMATTER)
.appendLiteral('T')
.append(HOUR_MINUTE_FORMATTER)
.optionalStart()
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE)
.optionalEnd()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter that combines a full date and time without millis, but with a timezone that can be optional
* separated by a 'T' (uuuu-MM-dd'T'HH:mm:ssZ).
*/
private static final DateFormatter DATE_TIME_NO_MILLIS = newDateFormatter(
"date_time_no_millis",
DATE_TIME_NO_MILLIS_PRINTER,
new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX)
.optionalStart()
.appendZoneOrOffsetId()
.optionalEnd()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX)
.optionalStart()
.append(TIME_ZONE_FORMATTER_NO_COLON)
.optionalEnd()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter for a two digit hour of day, two digit minute of
* hour, two digit second of minute, and three digit fraction of
* second (HH:mm:ss.SSS).
*/
private static final DateFormatter HOUR_MINUTE_SECOND_MILLIS = newDateFormatter(
"hour_minute_second_millis",
STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER,
HOUR_MINUTE_SECOND_MILLIS_FORMATTER
);
private static final DateFormatter HOUR_MINUTE_SECOND_FRACTION = newDateFormatter(
"hour_minute_second_fraction",
STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER,
HOUR_MINUTE_SECOND_FRACTION_FORMATTER
);
/*
* Returns a formatter for a two digit hour of day and two digit minute of
* hour. (HH:mm)
*/
private static final DateFormatter HOUR_MINUTE = newDateFormatter(
"hour_minute",
DateTimeFormatter.ofPattern("HH:mm", Locale.ROOT),
HOUR_MINUTE_FORMATTER
);
/*
* A strict formatter that formats or parses a hour, minute and second, such as '09:43:25'.
*/
private static final DateFormatter HOUR_MINUTE_SECOND = newDateFormatter(
"hour_minute_second",
STRICT_HOUR_MINUTE_SECOND_FORMATTER,
new DateTimeFormatterBuilder().append(HOUR_MINUTE_FORMATTER)
.appendLiteral(":")
.appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter for a two digit hour of day. (HH)
*/
private static final DateFormatter HOUR = newDateFormatter(
"hour",
DateTimeFormatter.ofPattern("HH", Locale.ROOT),
new DateTimeFormatterBuilder().appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
private static final DateTimeFormatter ORDINAL_DATE_TIME_FORMATTER_BASE = new DateTimeFormatterBuilder().append(ORDINAL_DATE_FORMATTER)
.appendLiteral('T')
.append(HOUR_MINUTE_FORMATTER)
.optionalStart()
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE)
.appendFraction(NANO_OF_SECOND, 1, 9, true)
.optionalEnd()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter for a full ordinal date and time, using a four
* digit year and three digit dayOfYear (uuuu-DDD'T'HH:mm:ss.SSSZZ).
*/
private static final DateFormatter ORDINAL_DATE_TIME = newDateFormatter(
"ordinal_date_time",
new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_PRINTER)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_FORMATTER_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_FORMATTER_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
private static final DateTimeFormatter ORDINAL_DATE_TIME_NO_MILLIS_BASE = new DateTimeFormatterBuilder().append(ORDINAL_DATE_FORMATTER)
.appendLiteral('T')
.append(HOUR_MINUTE_SECOND_FORMATTER)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT);
/*
* Returns a formatter for a full ordinal date and time without millis,
* using a four digit year and three digit dayOfYear (uuuu-DDD'T'HH:mm:ssZZ).
*/
private static final DateFormatter ORDINAL_DATE_TIME_NO_MILLIS = newDateFormatter(
"ordinal_date_time_no_millis",
new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_NO_MILLIS_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_NO_MILLIS_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter that combines a full weekyear date and time,
* separated by a 'T' (YYYY-'W'ww-e'T'HH:mm:ss.SSSZZ).
*/
private static final DateFormatter WEEK_DATE_TIME = newDateFormatter(
"week_date_time",
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T)
.append(STRICT_TIME_PRINTER)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER)
.appendLiteral("T")
.append(TIME_PREFIX)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER)
.appendLiteral("T")
.append(TIME_PREFIX)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter that combines a full weekyear date and time,
* separated by a 'T' (YYYY-'W'ww-e'T'HH:mm:ssZZ).
*/
private static final DateFormatter WEEK_DATE_TIME_NO_MILLIS = newDateFormatter(
"week_date_time_no_millis",
new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T)
.append(STRICT_TIME_NO_MILLIS_BASE)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER)
.append(T_TIME_NO_MILLIS_FORMATTER)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER)
.append(T_TIME_NO_MILLIS_FORMATTER)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a basic formatter that combines a basic weekyear date and time,
* separated by a 'T' (YYYY'W'wwe'T'HHmmss.SSSX).
*/
private static final DateFormatter BASIC_WEEK_DATE_TIME = newDateFormatter(
"basic_week_date_time",
new DateTimeFormatterBuilder().append(STRICT_BASIC_WEEK_DATE_PRINTER)
.append(DateTimeFormatter.ofPattern("'T'HHmmss.SSSX", Locale.ROOT))
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER)
.append(BASIC_T_TIME_FORMATTER)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER)
.append(BASIC_T_TIME_FORMATTER)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a basic formatter that combines a basic weekyear date and time,
* separated by a 'T' (YYYY'W'wwe'T'HHmmssX).
*/
private static final DateFormatter BASIC_WEEK_DATE_TIME_NO_MILLIS = newDateFormatter(
"basic_week_date_time_no_millis",
new DateTimeFormatterBuilder().append(STRICT_BASIC_WEEK_DATE_PRINTER)
.append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT))
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER)
.appendLiteral("T")
.append(BASIC_TIME_NO_MILLIS_BASE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER)
.appendLiteral("T")
.append(BASIC_TIME_NO_MILLIS_BASE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter for a two digit hour of day, two digit minute of
* hour, two digit second of minute, three digit fraction of second, and
* time zone offset (HH:mm:ss.SSSZZ).
*/
private static final DateFormatter TIME = newDateFormatter(
"time",
new DateTimeFormatterBuilder().append(STRICT_TIME_PRINTER)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(TIME_PREFIX)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(TIME_PREFIX)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter for a two digit hour of day, two digit minute of
* hour, two digit second of minute, andtime zone offset (HH:mm:ssZZ).
*/
private static final DateFormatter TIME_NO_MILLIS = newDateFormatter(
"time_no_millis",
new DateTimeFormatterBuilder().append(STRICT_TIME_NO_MILLIS_BASE)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(TIME_NO_MILLIS_FORMATTER)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(TIME_NO_MILLIS_FORMATTER)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter for a two digit hour of day, two digit minute of
* hour, two digit second of minute, three digit fraction of second, and
* time zone offset prefixed by 'T' ('T'HH:mm:ss.SSSZZ).
*/
private static final DateFormatter T_TIME = newDateFormatter(
"t_time",
new DateTimeFormatterBuilder().appendLiteral('T')
.append(STRICT_TIME_PRINTER)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().appendLiteral("T")
.append(TIME_PREFIX)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().appendLiteral("T")
.append(TIME_PREFIX)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter for a two digit hour of day, two digit minute of
* hour, two digit second of minute, and time zone offset prefixed
* by 'T' ('T'HH:mm:ssZZ).
*/
private static final DateFormatter T_TIME_NO_MILLIS = newDateFormatter(
"t_time_no_millis",
new DateTimeFormatterBuilder().appendLiteral("T")
.append(STRICT_TIME_NO_MILLIS_BASE)
.appendOffset("+HH:MM", "Z")
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(T_TIME_NO_MILLIS_FORMATTER)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().append(T_TIME_NO_MILLIS_FORMATTER)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* A strict formatter that formats or parses a year and a month, such as '2011-12'.
*/
private static final DateFormatter YEAR_MONTH = newDateFormatter(
"year_month",
new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
.appendLiteral("-")
.appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR)
.appendLiteral("-")
.appendValue(MONTH_OF_YEAR)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* A strict date formatter that formats or parses a date without an offset, such as '2011-12-03'.
*/
private static final DateFormatter YEAR_MONTH_DAY = newDateFormatter(
"year_month_day",
STRICT_YEAR_MONTH_DAY_FORMATTER,
new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR)
.appendLiteral("-")
.appendValue(MONTH_OF_YEAR)
.appendLiteral("-")
.appendValue(DAY_OF_MONTH)
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter for a full date as four digit weekyear, two digit
* week of weekyear, and one digit day of week (YYYY-'W'ww-e).
*/
private static final DateFormatter WEEK_DATE = newDateFormatter("week_date", ISO_WEEK_DATE, WEEK_DATE_FORMATTER);
/*
* Returns a formatter for a four digit weekyear and two digit week of
* weekyear. (YYYY-'W'ww)
*/
private static final DateFormatter WEEKYEAR_WEEK = newDateFormatter(
"weekyear_week",
STRICT_WEEKYEAR_WEEK_FORMATTER,
new DateTimeFormatterBuilder().appendValue(WEEK_FIELDS_ROOT.weekBasedYear())
.appendLiteral("-W")
.appendValue(WEEK_FIELDS_ROOT.weekOfWeekBasedYear())
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/*
* Returns a formatter for a four digit weekyear, two digit week of
* weekyear, and one digit day of week. (YYYY-'W'ww-e)
*/
private static final DateFormatter WEEKYEAR_WEEK_DAY = newDateFormatter(
"weekyear_week_day",
new DateTimeFormatterBuilder().append(STRICT_WEEKYEAR_WEEK_FORMATTER)
.appendLiteral("-")
.appendValue(WEEK_FIELDS_ROOT.dayOfWeek())
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT),
new DateTimeFormatterBuilder().appendValue(WEEK_FIELDS_ROOT.weekBasedYear())
.appendLiteral("-W")
.appendValue(WEEK_FIELDS_ROOT.weekOfWeekBasedYear())
.appendLiteral("-")
.appendValue(WEEK_FIELDS_ROOT.dayOfWeek())
.toFormatter(Locale.ROOT)
.withResolverStyle(ResolverStyle.STRICT)
);
/////////////////////////////////////////
//
// end lenient formatters
//
/////////////////////////////////////////
static DateFormatter forPattern(String input) {
if (Strings.hasLength(input)) {
input = input.trim();
}
if (input == null || input.isEmpty()) {
throw new IllegalArgumentException("No date pattern provided");
}
if (FormatNames.ISO8601.matches(input)) {
return ISO_8601;
} else if (FormatNames.BASIC_DATE.matches(input)) {
return BASIC_DATE;
} else if (FormatNames.BASIC_DATE_TIME.matches(input)) {
return BASIC_DATE_TIME;
} else if (FormatNames.BASIC_DATE_TIME_NO_MILLIS.matches(input)) {
return BASIC_DATE_TIME_NO_MILLIS;
} else if (FormatNames.BASIC_ORDINAL_DATE.matches(input)) {
return BASIC_ORDINAL_DATE;
} else if (FormatNames.BASIC_ORDINAL_DATE_TIME.matches(input)) {
return BASIC_ORDINAL_DATE_TIME;
} else if (FormatNames.BASIC_ORDINAL_DATE_TIME_NO_MILLIS.matches(input)) {
return BASIC_ORDINAL_DATE_TIME_NO_MILLIS;
} else if (FormatNames.BASIC_TIME.matches(input)) {
return BASIC_TIME;
} else if (FormatNames.BASIC_TIME_NO_MILLIS.matches(input)) {
return BASIC_TIME_NO_MILLIS;
} else if (FormatNames.BASIC_T_TIME.matches(input)) {
return BASIC_T_TIME;
} else if (FormatNames.BASIC_T_TIME_NO_MILLIS.matches(input)) {
return BASIC_T_TIME_NO_MILLIS;
} else if (FormatNames.BASIC_WEEK_DATE.matches(input)) {
return BASIC_WEEK_DATE;
} else if (FormatNames.BASIC_WEEK_DATE_TIME.matches(input)) {
return BASIC_WEEK_DATE_TIME;
} else if (FormatNames.BASIC_WEEK_DATE_TIME_NO_MILLIS.matches(input)) {
return BASIC_WEEK_DATE_TIME_NO_MILLIS;
} else if (FormatNames.DATE.matches(input)) {
return DATE;
} else if (FormatNames.DATE_HOUR.matches(input)) {
return DATE_HOUR;
} else if (FormatNames.DATE_HOUR_MINUTE.matches(input)) {
return DATE_HOUR_MINUTE;
} else if (FormatNames.DATE_HOUR_MINUTE_SECOND.matches(input)) {
return DATE_HOUR_MINUTE_SECOND;
} else if (FormatNames.DATE_HOUR_MINUTE_SECOND_FRACTION.matches(input)) {
return DATE_HOUR_MINUTE_SECOND_FRACTION;
} else if (FormatNames.DATE_HOUR_MINUTE_SECOND_MILLIS.matches(input)) {
return DATE_HOUR_MINUTE_SECOND_MILLIS;
} else if (FormatNames.DATE_OPTIONAL_TIME.matches(input)) {
return DATE_OPTIONAL_TIME;
} else if (FormatNames.DATE_TIME.matches(input)) {
return DATE_TIME;
} else if (FormatNames.DATE_TIME_NO_MILLIS.matches(input)) {
return DATE_TIME_NO_MILLIS;
} else if (FormatNames.HOUR.matches(input)) {
return HOUR;
} else if (FormatNames.HOUR_MINUTE.matches(input)) {
return HOUR_MINUTE;
} else if (FormatNames.HOUR_MINUTE_SECOND.matches(input)) {
return HOUR_MINUTE_SECOND;
} else if (FormatNames.HOUR_MINUTE_SECOND_FRACTION.matches(input)) {
return HOUR_MINUTE_SECOND_FRACTION;
} else if (FormatNames.HOUR_MINUTE_SECOND_MILLIS.matches(input)) {
return HOUR_MINUTE_SECOND_MILLIS;
} else if (FormatNames.ORDINAL_DATE.matches(input)) {
return ORDINAL_DATE;
} else if (FormatNames.ORDINAL_DATE_TIME.matches(input)) {
return ORDINAL_DATE_TIME;
} else if (FormatNames.ORDINAL_DATE_TIME_NO_MILLIS.matches(input)) {
return ORDINAL_DATE_TIME_NO_MILLIS;
} else if (FormatNames.TIME.matches(input)) {
return TIME;
} else if (FormatNames.TIME_NO_MILLIS.matches(input)) {
return TIME_NO_MILLIS;
} else if (FormatNames.T_TIME.matches(input)) {
return T_TIME;
} else if (FormatNames.T_TIME_NO_MILLIS.matches(input)) {
return T_TIME_NO_MILLIS;
} else if (FormatNames.WEEK_DATE.matches(input)) {
return WEEK_DATE;
} else if (FormatNames.WEEK_DATE_TIME.matches(input)) {
return WEEK_DATE_TIME;
} else if (FormatNames.WEEK_DATE_TIME_NO_MILLIS.matches(input)) {
return WEEK_DATE_TIME_NO_MILLIS;
} else if (FormatNames.WEEKYEAR.matches(input)) {
return WEEKYEAR;
} else if (FormatNames.WEEK_YEAR_WEEK.matches(input)) {
return WEEKYEAR_WEEK;
} else if (FormatNames.WEEKYEAR_WEEK_DAY.matches(input)) {
return WEEKYEAR_WEEK_DAY;
} else if (FormatNames.YEAR.matches(input)) {
return YEAR;
} else if (FormatNames.YEAR_MONTH.matches(input)) {
return YEAR_MONTH;
} else if (FormatNames.YEAR_MONTH_DAY.matches(input)) {
return YEAR_MONTH_DAY;
} else if (FormatNames.EPOCH_SECOND.matches(input)) {
return EpochTime.SECONDS_FORMATTER;
} else if (FormatNames.EPOCH_MILLIS.matches(input)) {
return EpochTime.MILLIS_FORMATTER;
// strict date formats here, must be at least 4 digits for year and two for months and two for day
} else if (FormatNames.STRICT_BASIC_WEEK_DATE.matches(input)) {
return STRICT_BASIC_WEEK_DATE;
} else if (FormatNames.STRICT_BASIC_WEEK_DATE_TIME.matches(input)) {
return STRICT_BASIC_WEEK_DATE_TIME;
} else if (FormatNames.STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS.matches(input)) {
return STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS;
} else if (FormatNames.STRICT_DATE.matches(input)) {
return STRICT_DATE;
} else if (FormatNames.STRICT_DATE_HOUR.matches(input)) {
return STRICT_DATE_HOUR;
} else if (FormatNames.STRICT_DATE_HOUR_MINUTE.matches(input)) {
return STRICT_DATE_HOUR_MINUTE;
} else if (FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND.matches(input)) {
return STRICT_DATE_HOUR_MINUTE_SECOND;
} else if (FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION.matches(input)) {
return STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION;
} else if (FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS.matches(input)) {
return STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS;
} else if (FormatNames.STRICT_DATE_OPTIONAL_TIME.matches(input)) {
return STRICT_DATE_OPTIONAL_TIME;
} else if (FormatNames.STRICT_DATE_OPTIONAL_TIME_NANOS.matches(input)) {
return STRICT_DATE_OPTIONAL_TIME_NANOS;
} else if (FormatNames.STRICT_DATE_TIME.matches(input)) {
return STRICT_DATE_TIME;
} else if (FormatNames.STRICT_DATE_TIME_NO_MILLIS.matches(input)) {
return STRICT_DATE_TIME_NO_MILLIS;
} else if (FormatNames.STRICT_HOUR.matches(input)) {
return STRICT_HOUR;
} else if (FormatNames.STRICT_HOUR_MINUTE.matches(input)) {
return STRICT_HOUR_MINUTE;
} else if (FormatNames.STRICT_HOUR_MINUTE_SECOND.matches(input)) {
return STRICT_HOUR_MINUTE_SECOND;
} else if (FormatNames.STRICT_HOUR_MINUTE_SECOND_FRACTION.matches(input)) {
return STRICT_HOUR_MINUTE_SECOND_FRACTION;
} else if (FormatNames.STRICT_HOUR_MINUTE_SECOND_MILLIS.matches(input)) {
return STRICT_HOUR_MINUTE_SECOND_MILLIS;
} else if (FormatNames.STRICT_ORDINAL_DATE.matches(input)) {
return STRICT_ORDINAL_DATE;
} else if (FormatNames.STRICT_ORDINAL_DATE_TIME.matches(input)) {
return STRICT_ORDINAL_DATE_TIME;
} else if (FormatNames.STRICT_ORDINAL_DATE_TIME_NO_MILLIS.matches(input)) {
return STRICT_ORDINAL_DATE_TIME_NO_MILLIS;
} else if (FormatNames.STRICT_TIME.matches(input)) {
return STRICT_TIME;
} else if (FormatNames.STRICT_TIME_NO_MILLIS.matches(input)) {
return STRICT_TIME_NO_MILLIS;
} else if (FormatNames.STRICT_T_TIME.matches(input)) {
return STRICT_T_TIME;
} else if (FormatNames.STRICT_T_TIME_NO_MILLIS.matches(input)) {
return STRICT_T_TIME_NO_MILLIS;
} else if (FormatNames.STRICT_WEEK_DATE.matches(input)) {
return STRICT_WEEK_DATE;
} else if (FormatNames.STRICT_WEEK_DATE_TIME.matches(input)) {
return STRICT_WEEK_DATE_TIME;
} else if (FormatNames.STRICT_WEEK_DATE_TIME_NO_MILLIS.matches(input)) {
return STRICT_WEEK_DATE_TIME_NO_MILLIS;
} else if (FormatNames.STRICT_WEEKYEAR.matches(input)) {
return STRICT_WEEKYEAR;
} else if (FormatNames.STRICT_WEEKYEAR_WEEK.matches(input)) {
return STRICT_WEEKYEAR_WEEK;
} else if (FormatNames.STRICT_WEEKYEAR_WEEK_DAY.matches(input)) {
return STRICT_WEEKYEAR_WEEK_DAY;
} else if (FormatNames.STRICT_YEAR.matches(input)) {
return STRICT_YEAR;
} else if (FormatNames.STRICT_YEAR_MONTH.matches(input)) {
return STRICT_YEAR_MONTH;
} else if (FormatNames.STRICT_YEAR_MONTH_DAY.matches(input)) {
return STRICT_YEAR_MONTH_DAY;
} else {
try {
return newDateFormatter(
input,
new DateTimeFormatterBuilder().appendPattern(input).toFormatter(Locale.ROOT).withResolverStyle(ResolverStyle.STRICT)
);
} catch (IllegalArgumentException | ClassCastException e) {
// ClassCastException catches this bug https://bugs.openjdk.org/browse/JDK-8193877
throw new IllegalArgumentException("Invalid format: [" + input + "]: " + e.getMessage(), e);
}
}
}
private static final LocalDate LOCALDATE_EPOCH = LocalDate.of(1970, 1, 1);
/**
* Convert a temporal accessor to a zoned date time object - as performant as possible.
* The .from() methods from the JDK are throwing exceptions when for example ZonedDateTime.from(accessor)
* or Instant.from(accessor). This results in a huge performance penalty and should be prevented
* This method prevents exceptions by querying the accessor for certain capabilities
* and then act on it accordingly
*
* This action assumes that we can reliably fall back to some defaults if not all parts of a
* zoned date time are set
*
* - If a zoned date time is passed, it is returned
* - If no timezone is found, ZoneOffset.UTC is used
* - If we find a time and a date, converting to a ZonedDateTime is straight forward,
* no defaults will be applied
* - If an accessor only containing of seconds and nanos is found (like epoch_millis/second)
* an Instant is created out of that, that becomes a ZonedDateTime with a time zone
* - If no time is given, the start of the day is used
* - If no month of the year is found, the first day of the year is used
* - If an iso based weekyear is found, but not week is specified, the first monday
* of the new year is chosen (retaining BWC with joda time)
* - If an iso based weekyear is found and an iso based weekyear week, the start
* of the day is used
*
* @param accessor The accessor returned from a parser
*
* @return The converted zoned date time
*/
public static ZonedDateTime from(TemporalAccessor accessor) {
return from(accessor, Locale.ROOT, ZoneOffset.UTC);
}
public static ZonedDateTime from(TemporalAccessor accessor, Locale locale) {
return from(accessor, locale, ZoneOffset.UTC);
}
public static ZonedDateTime from(TemporalAccessor accessor, Locale locale, ZoneId defaultZone) {
if (accessor instanceof ZonedDateTime) {
return (ZonedDateTime) accessor;
}
ZoneId zoneId = accessor.query(TemporalQueries.zone());
if (zoneId == null) {
zoneId = defaultZone;
}
LocalDate localDate = accessor.query(LOCAL_DATE_QUERY);
LocalTime localTime = accessor.query(TemporalQueries.localTime());
boolean isLocalDateSet = localDate != null;
boolean isLocalTimeSet = localTime != null;
// the first two cases are the most common, so this allows us to exit early when parsing dates
WeekFields localeWeekFields;
if (isLocalDateSet && isLocalTimeSet) {
return of(localDate, localTime, zoneId);
} else if (accessor.isSupported(ChronoField.INSTANT_SECONDS) && accessor.isSupported(NANO_OF_SECOND)) {
return Instant.from(accessor).atZone(zoneId);
} else if (isLocalDateSet) {
return localDate.atStartOfDay(zoneId);
} else if (isLocalTimeSet) {
return of(getLocalDate(accessor, locale), localTime, zoneId);
} else if (accessor.isSupported(ChronoField.YEAR) || accessor.isSupported(ChronoField.YEAR_OF_ERA)) {
if (accessor.isSupported(MONTH_OF_YEAR)) {
return getFirstOfMonth(accessor).atStartOfDay(zoneId);
} else {
int year = getYear(accessor);
return Year.of(year).atDay(1).atStartOfDay(zoneId);
}
} else if (accessor.isSupported(MONTH_OF_YEAR)) {
// missing year, falling back to the epoch and then filling
return getLocalDate(accessor, locale).atStartOfDay(zoneId);
} else if (accessor.isSupported(WeekFields.ISO.weekBasedYear())) {
return localDateFromWeekBasedDate(accessor, locale, WeekFields.ISO).atStartOfDay(zoneId);
} else if (accessor.isSupported((localeWeekFields = WeekFields.of(locale)).weekBasedYear())) {
return localDateFromWeekBasedDate(accessor, locale, localeWeekFields).atStartOfDay(zoneId);
}
// we should not reach this piece of code, everything being parsed we should be able to
// convert to a zoned date time! If not, we have to extend the above methods
throw new IllegalArgumentException("temporal accessor [" + accessor + "] cannot be converted to zoned date time");
}
private static LocalDate localDateFromWeekBasedDate(TemporalAccessor accessor, Locale locale, WeekFields weekFields) {
if (accessor.isSupported(weekFields.weekOfWeekBasedYear())) {
return LocalDate.ofEpochDay(0)
.with(weekFields.weekBasedYear(), accessor.get(weekFields.weekBasedYear()))
.with(weekFields.weekOfWeekBasedYear(), accessor.get(weekFields.weekOfWeekBasedYear()))
.with(TemporalAdjusters.previousOrSame(weekFields.getFirstDayOfWeek()));
} else {
return LocalDate.ofEpochDay(0)
.with(weekFields.weekBasedYear(), accessor.get(weekFields.weekBasedYear()))
.with(TemporalAdjusters.previousOrSame(weekFields.getFirstDayOfWeek()));
}
}
/**
* extending the java.time.temporal.TemporalQueries.LOCAL_DATE implementation to also create local dates
* when YearOfEra was used instead of Year.
* This is to make it compatible with Joda behaviour
*/
static final TemporalQuery<LocalDate> LOCAL_DATE_QUERY = new TemporalQuery<LocalDate>() {
@Override
public LocalDate queryFrom(TemporalAccessor temporal) {
if (temporal.isSupported(ChronoField.EPOCH_DAY)) {
return LocalDate.ofEpochDay(temporal.getLong(ChronoField.EPOCH_DAY));
} else if (temporal.isSupported(ChronoField.YEAR_OF_ERA) || temporal.isSupported(ChronoField.YEAR)) {
int year = getYear(temporal);
if (temporal.isSupported(ChronoField.MONTH_OF_YEAR) && temporal.isSupported(ChronoField.DAY_OF_MONTH)) {
return LocalDate.of(year, temporal.get(ChronoField.MONTH_OF_YEAR), temporal.get(ChronoField.DAY_OF_MONTH));
} else if (temporal.isSupported(DAY_OF_YEAR)) {
return LocalDate.ofYearDay(year, temporal.get(DAY_OF_YEAR));
}
}
return null;
}
@Override
public String toString() {
return "LocalDate";
}
};
private static LocalDate getLocalDate(TemporalAccessor accessor, Locale locale) {
WeekFields localeWeekFields;
if (accessor.isSupported(WeekFields.ISO.weekBasedYear())) {
return localDateFromWeekBasedDate(accessor, locale, WeekFields.ISO);
} else if (accessor.isSupported((localeWeekFields = WeekFields.of(locale)).weekBasedYear())) {
return localDateFromWeekBasedDate(accessor, locale, localeWeekFields);
} else if (accessor.isSupported(MONTH_OF_YEAR)) {
int year = getYear(accessor);
if (accessor.isSupported(DAY_OF_MONTH)) {
return LocalDate.of(year, accessor.get(MONTH_OF_YEAR), accessor.get(DAY_OF_MONTH));
} else {
return LocalDate.of(year, accessor.get(MONTH_OF_YEAR), 1);
}
}
return LOCALDATE_EPOCH;
}
private static int getYear(TemporalAccessor accessor) {
if (accessor.isSupported(ChronoField.YEAR)) {
return accessor.get(ChronoField.YEAR);
}
if (accessor.isSupported(ChronoField.YEAR_OF_ERA)) {
return accessor.get(ChronoField.YEAR_OF_ERA);
}
return 1970;
}
@SuppressForbidden(reason = "ZonedDateTime.of is fine here")
private static ZonedDateTime of(LocalDate localDate, LocalTime localTime, ZoneId zoneId) {
return ZonedDateTime.of(localDate, localTime, zoneId);
}
@SuppressForbidden(reason = "LocalDate.of is fine here")
private static LocalDate getFirstOfMonth(TemporalAccessor accessor) {
return LocalDate.of(getYear(accessor), accessor.get(MONTH_OF_YEAR), 1);
}
}
| DateFormatters |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/parsetools/JsonParserTest.java | {
"start": 1458,
"end": 21322
} | class ____ {
@Test
public void testParseEmptyObject() {
JsonParser parser = JsonParser.newParser();
AtomicInteger status = new AtomicInteger();
parser.handler(event -> {
assertNull(event.fieldName());
assertNull(event.value());
switch (status.getAndIncrement()) {
case 0:
Assert.assertEquals(JsonEventType.START_OBJECT, event.type());
break;
case 1:
assertEquals(JsonEventType.END_OBJECT, event.type());
break;
default:
fail();
}
});
AtomicInteger count = new AtomicInteger();
parser.endHandler(v -> count.incrementAndGet());
parser.handle(Buffer.buffer("{}"));
assertEquals(2, status.get());
assertEquals(0, count.get());
parser.end();
assertEquals(1, count.get());
assertEquals(2, status.get());
try {
parser.end();
fail();
} catch (IllegalStateException ignore) {
// expected
}
}
@Test
public void testParseEmptyArray() {
JsonParser parser = JsonParser.newParser();
AtomicInteger status = new AtomicInteger();
parser.handler(event -> {
assertNull(event.fieldName());
assertNull(event.value());
switch (status.getAndIncrement()) {
case 0:
assertEquals(JsonEventType.START_ARRAY, event.type());
break;
case 1:
assertEquals(JsonEventType.END_ARRAY, event.type());
break;
default:
fail();
}
});
parser.handle(Buffer.buffer("[]"));
assertEquals(2, status.get());
}
@Test
public void parseUnfinishedThrowingException() {
StringBuilder events = new StringBuilder();
JsonParser parser = JsonParser.newParser();
parser.handler(e -> events.append("json,"));
parser.endHandler(v -> events.append("end,"));
parser.handle(Buffer.buffer("{\"un\":\"finished\""));
try {
parser.end();
fail();
} catch (DecodeException expected) {
}
assertEquals("json,json,", events.toString());
}
@Test
public void parseUnfinishedExceptionHandler() {
StringBuilder events = new StringBuilder();
JsonParser parser = JsonParser.newParser();
parser.handler(e -> events.append("json,"));
parser.endHandler(v -> events.append("end,"));
parser.exceptionHandler(e -> events.append("exception,"));
parser.handle(Buffer.buffer("{\"un\":\"finished\""));
parser.end();
assertEquals("json,json,exception,end,", events.toString());
}
@Test
public void testParseWithErrors() {
Buffer data = Buffer.buffer("{\"foo\":\"foo_value\"},{\"bar\":\"bar_value\"},{\"juu\":\"juu_value\"}");
JsonParser parser = JsonParser.newParser();
List<JsonObject> objects = new ArrayList<>();
List<Throwable> errors = new ArrayList<>();
AtomicInteger endCount = new AtomicInteger();
parser.objectValueMode()
.handler(event -> objects.add(event.objectValue()))
.exceptionHandler(errors::add)
.endHandler(v -> endCount.incrementAndGet());
parser.write(data);
assertEquals(3, objects.size());
List<JsonObject> expected = Arrays.asList(
new JsonObject().put("foo", "foo_value"),
new JsonObject().put("bar", "bar_value"),
new JsonObject().put("juu", "juu_value")
);
assertEquals(expected, objects);
assertEquals(2, errors.size());
assertEquals(0, endCount.get());
objects.clear();
errors.clear();
parser.end();
assertEquals(Collections.emptyList(), objects);
assertEquals(Collections.emptyList(), errors);
assertEquals(1, endCount.get());
}
@Test
public void testParseObjectValue() {
JsonParser parser = JsonParser.newParser();
AtomicInteger status = new AtomicInteger();
parser.objectValueMode();
JsonObject expected = new JsonObject()
.put("number", 3)
.put("floating", 3.5d)
.put("true", true)
.put("false", false)
.put("string", "s")
.put("object", new JsonObject().put("foo", "bar"))
.put("array", new JsonArray().add(0).add(1).add(2))
.putNull("null")
.put("bytes", new byte[]{1, 2, 3});
parser.handler(event -> {
assertEquals(0, status.getAndIncrement());
assertEquals(JsonEventType.VALUE, event.type());
assertEquals(expected, event.value());
});
parser.handle(expected.toBuffer());
assertEquals(1, status.get());
}
@Test
public void testParseArrayValue() {
JsonParser parser = JsonParser.newParser();
AtomicInteger status = new AtomicInteger();
parser.arrayValueMode();
JsonArray expected = new JsonArray()
.add(3)
.add(3.5d)
.add(true)
.add(false)
.add("s")
.addNull()
.add(new JsonObject().put("foo", "bar"))
.add(new JsonArray().add(0).add(1).add(2))
.add(new byte[]{1, 2, 3});
parser.handler(event -> {
assertEquals(expected, event.value());
assertEquals(0, status.getAndIncrement());
});
parser.handle(expected.toBuffer());
assertEquals(1, status.get());
}
private void assertThrowCCE(JsonEvent event, Consumer<JsonEvent>... checks) {
for (Consumer<JsonEvent> check : checks) {
try {
check.accept(event);
fail();
} catch (ClassCastException ignore) {
// Expected
}
}
}
@Test
public void testStringValue() {
testValue("\"bar\"", event -> {
assertEquals("bar", event.value());
assertFalse(event.isArray());
assertFalse(event.isObject());
assertFalse(event.isNumber());
assertFalse(event.isNull());
assertFalse(event.isBoolean());
assertTrue(event.isString());
assertEquals("bar", event.stringValue());
assertThrowCCE(event,
JsonEvent::integerValue,
JsonEvent::longValue,
JsonEvent::floatValue,
JsonEvent::doubleValue,
JsonEvent::booleanValue,
JsonEvent::objectValue,
JsonEvent::arrayValue);
try {
event.instantValue();
fail();
} catch (DateTimeParseException ignore) {
// Expected
}
});
}
@Test
public void testInstantValue() {
Instant value = Instant.now();
String encoded = ISO_INSTANT.format(value);
testValue('"' + encoded + '"', event -> {
assertEquals(encoded, event.value());
assertFalse(event.isArray());
assertFalse(event.isObject());
assertFalse(event.isNumber());
assertFalse(event.isNull());
assertFalse(event.isBoolean());
assertTrue(event.isString());
assertEquals(encoded, event.stringValue());
assertEquals(value, event.instantValue());
assertThrowCCE(event,
JsonEvent::integerValue,
JsonEvent::longValue,
JsonEvent::floatValue,
JsonEvent::doubleValue,
JsonEvent::booleanValue,
JsonEvent::objectValue,
JsonEvent::arrayValue);
});
}
@Test
public void testBinaryValue() {
byte[] value = TestUtils.randomByteArray(10);
String encoded = Base64.getUrlEncoder().withoutPadding().encodeToString(value);
testValue('"' + encoded + '"', event -> {
assertEquals(encoded, event.value());
assertFalse(event.isArray());
assertFalse(event.isObject());
assertFalse(event.isNumber());
assertFalse(event.isNull());
assertFalse(event.isBoolean());
assertTrue(event.isString());
assertEquals(encoded, event.stringValue());
assertEquals(Buffer.buffer(value), event.binaryValue());
assertThrowCCE(event,
JsonEvent::integerValue,
JsonEvent::longValue,
JsonEvent::floatValue,
JsonEvent::doubleValue,
JsonEvent::booleanValue,
JsonEvent::objectValue,
JsonEvent::arrayValue);
try {
event.instantValue();
fail();
} catch (DateTimeParseException ignore) {
// Expected
}
});
}
@Test
public void testNullValue() {
testValue("null", event -> {
assertEquals(null, event.value());
assertFalse(event.isArray());
assertFalse(event.isObject());
assertFalse(event.isNumber());
assertTrue(event.isNull());
assertFalse(event.isBoolean());
assertFalse(event.isString());
assertNull(event.integerValue());
assertNull(event.longValue());
assertNull(event.floatValue());
assertNull(event.doubleValue());
assertNull(event.binaryValue());
assertNull(event.instantValue());
assertNull(event.objectValue());
assertNull(event.arrayValue());
assertNull(event.stringValue());
assertNull(event.binaryValue());
});
}
@Test
public void testLongValue() {
testValue("567", event -> {
assertEquals(567L, event.value());
assertFalse(event.isArray());
assertFalse(event.isObject());
assertTrue(event.isNumber());
assertFalse(event.isNull());
assertFalse(event.isBoolean());
assertFalse(event.isString());
assertEquals(567, (long)event.integerValue());
assertEquals(567L, (long)event.longValue());
assertEquals(567f, event.floatValue(), 0.01f);
assertEquals(567d, event.doubleValue(), 0.01d);
assertThrowCCE(event,
JsonEvent::stringValue,
JsonEvent::booleanValue,
JsonEvent::binaryValue,
JsonEvent::instantValue,
JsonEvent::objectValue,
JsonEvent::arrayValue);
});
}
@Test
public void testDoubleValue() {
testValue("567.45", event -> {
assertEquals(567.45d, event.value());
assertFalse(event.isArray());
assertFalse(event.isObject());
assertTrue(event.isNumber());
assertFalse(event.isNull());
assertFalse(event.isBoolean());
assertFalse(event.isString());
assertEquals(567, (long)event.integerValue());
assertEquals(567L, (long)event.longValue());
assertEquals(567.45f, (float)event.floatValue(), 0.01f);
assertEquals(567.45d, (double)event.doubleValue(), 0.01d);
assertThrowCCE(event,
JsonEvent::stringValue,
JsonEvent::booleanValue,
JsonEvent::binaryValue,
JsonEvent::instantValue,
JsonEvent::objectValue,
JsonEvent::arrayValue);
});
}
@Test
public void testBigInteger() {
String expected = "18446744073709551615";
testValue(expected, event -> {
BigInteger big = new BigInteger(expected);
assertEquals(big, event.value());
assertFalse(event.isArray());
assertFalse(event.isObject());
assertTrue(event.isNumber());
assertFalse(event.isNull());
assertFalse(event.isBoolean());
assertFalse(event.isString());
assertEquals(big.intValue(), (int)event.integerValue());
assertEquals(big.longValue(), (long)event.longValue());
assertEquals(big.floatValue(), event.floatValue(), 0.01f);
assertEquals(big.doubleValue(), event.doubleValue(), 0.01d);
assertThrowCCE(event,
JsonEvent::stringValue,
JsonEvent::booleanValue,
JsonEvent::binaryValue,
JsonEvent::instantValue,
JsonEvent::objectValue,
JsonEvent::arrayValue);
});
}
@Test
public void testBooleanValue() {
testValue("true", event -> {
assertEquals(true, event.value());
assertFalse(event.isArray());
assertFalse(event.isObject());
assertFalse(event.isNumber());
assertFalse(event.isNull());
assertTrue(event.isBoolean());
assertFalse(event.isString());
assertTrue(event.booleanValue());
assertThrowCCE(event,
JsonEvent::integerValue,
JsonEvent::longValue,
JsonEvent::floatValue,
JsonEvent::doubleValue,
JsonEvent::stringValue,
JsonEvent::binaryValue,
JsonEvent::instantValue,
JsonEvent::objectValue,
JsonEvent::arrayValue);
});
testValue("false", event -> {
assertEquals(false, event.value());
assertFalse(event.isArray());
assertFalse(event.isObject());
assertFalse(event.isNumber());
assertFalse(event.isNull());
assertTrue(event.isBoolean());
assertFalse(event.isString());
assertFalse(event.booleanValue());
assertThrowCCE(event,
JsonEvent::integerValue,
JsonEvent::longValue,
JsonEvent::floatValue,
JsonEvent::doubleValue,
JsonEvent::stringValue,
JsonEvent::binaryValue,
JsonEvent::instantValue,
JsonEvent::objectValue,
JsonEvent::arrayValue);
});
}
private void testValue(String jsonValue, Handler<JsonEvent> checker) {
JsonParser parser = JsonParser.newParser();
AtomicInteger status = new AtomicInteger();
parser.handler(event -> {
switch (status.getAndIncrement()) {
case 0:
assertEquals(JsonEventType.START_OBJECT, event.type());
assertNull(event.fieldName());
assertNull(event.value());
break;
case 1:
assertEquals(JsonEventType.VALUE, event.type());
checker.handle(event);
assertEquals("foo", event.fieldName());
break;
case 2:
assertEquals(JsonEventType.END_OBJECT, event.type());
assertNull(event.fieldName());
assertNull(event.value());
break;
}
});
parser.handle(Buffer.buffer("{\"foo\":" + jsonValue + "}"));
assertEquals(3, status.get());
}
@Test
public void testParseObjectValueMembers() {
JsonParser parser = JsonParser.newParser();
AtomicInteger status = new AtomicInteger();
parser.handler(event -> {
switch (status.getAndIncrement()) {
case 0:
assertEquals(JsonEventType.START_OBJECT, event.type());
parser.objectValueMode();
break;
case 1:
assertEquals(JsonEventType.VALUE, event.type());
assertTrue(event.isObject());
assertEquals(new JsonObject(), event.value());
assertEquals("foo", event.fieldName());
break;
case 2:
assertEquals("bar", event.fieldName());
assertTrue(event.isObject());
assertEquals(JsonEventType.VALUE, event.type());
assertEquals(new JsonObject(), event.value());
break;
case 3:
assertEquals(JsonEventType.END_OBJECT, event.type());
break;
default:
fail();
break;
}
});
parser.handle(Buffer.buffer("{\"foo\":{},\"bar\":{}}"));
assertEquals(4, status.get());
}
@Test
public void testParseObjectValueList() {
JsonParser parser = JsonParser.newParser();
AtomicInteger status = new AtomicInteger();
parser.objectValueMode();
parser.handler(event -> {
switch (status.getAndIncrement()) {
case 0:
assertEquals(JsonEventType.START_ARRAY, event.type());
break;
case 1:
assertEquals(JsonEventType.VALUE, event.type());
assertTrue(event.isObject());
assertEquals(new JsonObject().put("one", 1), event.value());
break;
case 2:
assertEquals(JsonEventType.VALUE, event.type());
assertTrue(event.isObject());
assertEquals(new JsonObject().put("two", 2), event.value());
break;
case 3:
assertEquals(JsonEventType.VALUE, event.type());
assertTrue(event.isObject());
assertEquals(new JsonObject().put("three", 3), event.value());
break;
case 4:
assertEquals(JsonEventType.END_ARRAY, event.type());
break;
}
});
parser.handle(Buffer.buffer("[" +
"{\"one\":1}," +
"{\"two\":2}," +
"{\"three\":3}" +
"]"));
assertEquals(5, status.get());
}
@Test
public void testObjectHandlerScope() {
JsonParser parser = JsonParser.newParser();
List<JsonObject> objects = new ArrayList<>();
AtomicInteger ends = new AtomicInteger();
AtomicBoolean obj = new AtomicBoolean();
parser.handler(event -> {
switch (event.type()) {
case START_OBJECT:
parser.objectValueMode();
break;
case VALUE:
if (obj.get()) {
objects.add((JsonObject) event.value());
}
break;
case END_OBJECT:
ends.incrementAndGet();
obj.set(true);
break;
}
});
parser.handle(Buffer.buffer("[" +
"{\"one\":1}," +
"{\"two\":2}," +
"{\"three\":3}" +
"]"));
assertEquals(1, ends.get());
assertEquals(Arrays.asList(new JsonObject().put("two", 2), new JsonObject().put("three", 3)), objects);
}
@Test
public void testParseTopValues() {
Map<String, Object> tests = new HashMap<>();
tests.put("\"a-string\"", "a-string");
tests.put("true", true);
tests.put("false", false);
tests.put("1234", 1234L);
tests.put("" + Long.MAX_VALUE, Long.MAX_VALUE);
tests.forEach((test, expected) -> {
JsonParser parser = JsonParser.newParser();
List<Object> values = new ArrayList<>();
parser.handler(event -> values.add(event.value()));
parser.handle(Buffer.buffer(test));
parser.end();
assertEquals(Collections.singletonList(expected), values);
});
}
@Test
public void testObjectMapping() {
JsonParser parser = JsonParser.newParser();
List<Object> values = new ArrayList<>();
parser.objectValueMode();
parser.pause();
parser.handler(event -> values.add(event.mapTo(TheObject.class)));
parser.handle(Buffer.buffer("{\"f\":\"the-value-1\"}{\"f\":\"the-value-2\"}"));
assertEquals(Collections.emptyList(), values);
parser.fetch(1);
assertEquals(Collections.singletonList(new TheObject("the-value-1")), values);
parser.fetch(1);
assertEquals(Arrays.asList(new TheObject("the-value-1"), new TheObject("the-value-2")), values);
}
@Test
public void testObjectMappingError() {
List<Object> values = new ArrayList<>();
List<Throwable> errors = new ArrayList<>();
JsonParser.newParser().objectValueMode().handler(event -> values.add(event.mapTo(TheObject.class))).exceptionHandler(errors::add).write(Buffer.buffer("{\"destination\":\"unknown\"}")).end();
assertEquals(Collections.emptyList(), values);
assertEquals(1, errors.size());
try {
JsonParser.newParser().objectValueMode().handler(event -> values.add(event.mapTo(TheObject.class))).write(Buffer.buffer("{\"destination\":\"unknown\"}")).end();
fail();
} catch (DecodeException expected) {
}
assertEquals(Collections.emptyList(), values);
assertEquals(1, errors.size());
}
@Test
public void testArrayMapping() {
JsonParser parser = JsonParser.newParser();
List<Object> values = new ArrayList<>();
parser.arrayValueMode();
parser.handler(event -> {
values.add(event.mapTo(LinkedList.class));
});
parser.handle(new JsonArray().add(0).add(1).add(2).toBuffer());
assertEquals(Collections.singletonList(Arrays.asList(0L, 1L, 2L)), values);
assertEquals(LinkedList.class, values.get(0).getClass());
}
@Test
public void testArrayMappingError() {
List<Object> values = new ArrayList<>();
List<Throwable> errors = new ArrayList<>();
JsonParser.newParser().arrayValueMode().handler(event -> values.add(event.mapTo(TheObject.class))).exceptionHandler(errors::add).write(Buffer.buffer("[]")).end();
assertEquals(Collections.emptyList(), values);
assertEquals(1, errors.size());
try {
JsonParser.newParser().arrayValueMode().handler(event -> {
values.add(event.mapTo(TheObject.class));
}).write(Buffer.buffer("[]")).end();
fail();
} catch (DecodeException expected) {
}
assertEquals(Collections.emptyList(), values);
assertEquals(1, errors.size());
}
public static | JsonParserTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/pool/PSCacheTest4.java | {
"start": 768,
"end": 5320
} | class ____ extends TestCase {
private DruidDataSource dataSource;
protected void setUp() throws Exception {
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:x1");
dataSource.setPoolPreparedStatements(true);
dataSource.setMaxOpenPreparedStatements(50);
dataSource.setFilters("log4j");
}
protected void tearDown() throws Exception {
JdbcUtils.close(dataSource);
}
public void test_pscache() throws Exception {
DruidPooledConnection conn = dataSource.getConnection();
DruidConnectionHolder holder = conn.getConnectionHolder();
PreparedStatementPool stmtPool = holder.getStatementPool();
final String sql_0 = "select 0";
final String sql_1 = "select 1";
assertEquals(0, stmtPool.size());
PreparedStatementHolder stmtHoler_0;
PreparedStatementHolder stmtHoler_1_A;
PreparedStatementHolder stmtHoler_1_B;
PreparedStatementHolder stmtHoler_1_C;
DruidPooledPreparedStatement stmt_0;
DruidPooledPreparedStatement stmt_1_A;
DruidPooledPreparedStatement stmt_1_B;
DruidPooledPreparedStatement stmt_1_C;
DruidPooledPreparedStatement stmt_1_D;
DruidPooledPreparedStatement stmt_1_E;
DruidPooledPreparedStatement stmt_1_F;
DruidPooledPreparedStatement stmt_1_G;
DruidPooledPreparedStatement stmt_1_H;
DruidPooledResultSet rs_0;
DruidPooledResultSet rs_1_A;
DruidPooledResultSet rs_1_B;
DruidPooledResultSet rs_1_C;
DruidPooledResultSet rs_1_D;
DruidPooledResultSet rs_1_E;
DruidPooledResultSet rs_1_F;
DruidPooledResultSet rs_1_G;
DruidPooledResultSet rs_1_H;
stmt_0 = (DruidPooledPreparedStatement) conn.prepareStatement(sql_0);
rs_0 = (DruidPooledResultSet) stmt_0.executeQuery();
assertTrue(stmt_0.getPreparedStatementHolder().isInUse());
stmt_1_A = (DruidPooledPreparedStatement) conn.prepareStatement(sql_1);
rs_1_A = (DruidPooledResultSet) stmt_1_A.executeQuery();
assertTrue(stmt_0.getPreparedStatementHolder().isInUse());
assertTrue(stmt_1_A.getPreparedStatementHolder().isInUse());
stmt_1_B = (DruidPooledPreparedStatement) conn.prepareStatement(sql_1);
rs_1_B = (DruidPooledResultSet) stmt_1_B.executeQuery();
rs_1_B.close();
stmt_1_B.close();
assertTrue(stmt_0.getPreparedStatementHolder().isInUse());
assertTrue(stmt_1_A.getPreparedStatementHolder().isInUse());
assertFalse(stmt_1_B.getPreparedStatementHolder().isInUse());
stmt_1_C = (DruidPooledPreparedStatement) conn.prepareStatement(sql_1);
rs_1_C = (DruidPooledResultSet) stmt_1_C.executeQuery();
rs_1_C.close();
stmt_1_C.close();
assertTrue(stmt_0.getPreparedStatementHolder().isInUse());
assertTrue(stmt_1_A.getPreparedStatementHolder().isInUse());
assertFalse(stmt_1_B.getPreparedStatementHolder().isInUse());
assertFalse(stmt_1_C.getPreparedStatementHolder().isInUse());
stmt_1_D = (DruidPooledPreparedStatement) conn.prepareStatement(sql_1);
rs_1_D = (DruidPooledResultSet) stmt_1_D.executeQuery();
rs_1_D.close();
stmt_1_D.close();
assertTrue(stmt_0.getPreparedStatementHolder().isInUse());
assertTrue(stmt_1_A.getPreparedStatementHolder().isInUse());
assertFalse(stmt_1_B.getPreparedStatementHolder().isInUse());
assertFalse(stmt_1_C.getPreparedStatementHolder().isInUse());
assertFalse(stmt_1_D.getPreparedStatementHolder().isInUse());
stmt_1_E = (DruidPooledPreparedStatement) conn.prepareStatement(sql_1);
rs_1_E = (DruidPooledResultSet) stmt_1_E.executeQuery();
rs_1_E.close();
stmt_1_E.close();
rs_1_A.close();
stmt_1_A.close();
stmt_1_F = (DruidPooledPreparedStatement) conn.prepareStatement(sql_1);
rs_1_F = (DruidPooledResultSet) stmt_1_F.executeQuery();
rs_1_F.close();
stmt_1_F.close();
stmt_1_G = (DruidPooledPreparedStatement) conn.prepareStatement(sql_1);
rs_1_G = (DruidPooledResultSet) stmt_1_G.executeQuery();
stmt_1_H = (DruidPooledPreparedStatement) conn.prepareStatement(sql_1);
rs_1_H = (DruidPooledResultSet) stmt_1_H.executeQuery();
rs_1_H.close();
stmt_1_H.close();
rs_1_G.close();
stmt_1_G.close();
conn.close();
}
}
| PSCacheTest4 |
java | apache__camel | components/camel-quickfix/src/generated/java/org/apache/camel/component/quickfixj/QuickfixjEndpointUriFactory.java | {
"start": 519,
"end": 2344
} | class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":configurationName";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(7);
props.add("bridgeErrorHandler");
props.add("configurationName");
props.add("exceptionHandler");
props.add("exchangePattern");
props.add("lazyCreateEngine");
props.add("lazyStartProducer");
props.add("sessionID");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
SECRET_PROPERTY_NAMES = Collections.emptySet();
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "quickfix".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "configurationName", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
| QuickfixjEndpointUriFactory |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/propertyeditors/CustomEditorTests.java | {
"start": 60590,
"end": 61009
} | class ____ {
private byte[] byteArray;
private char[] charArray;
public byte[] getByteArray() {
return byteArray;
}
public void setByteArray(byte[] byteArray) {
this.byteArray = byteArray;
}
public char[] getCharArray() {
return charArray;
}
public void setCharArray(char[] charArray) {
this.charArray = charArray;
}
}
@SuppressWarnings("unused")
private static | PrimitiveArrayBean |
java | apache__camel | components/camel-cxf/camel-cxf-spring-soap/src/test/java/org/apache/camel/component/cxf/wsrm/WSRMTest.java | {
"start": 1590,
"end": 2966
} | class ____ {
protected static int port1 = CXFTestSupport.getPort2();
protected static int port2 = CXFTestSupport.getPort3();
@Autowired
protected CamelContext context;
protected String getClientAddress() {
return "http://localhost:" + port1 + "/wsrm/HelloWorld";
}
@Test
public void testWSAddressing() throws Exception {
JaxWsProxyFactoryBean proxyFactory = new JaxWsProxyFactoryBean();
ClientFactoryBean clientBean = proxyFactory.getClientFactoryBean();
clientBean.setAddress(getClientAddress());
clientBean.setServiceClass(HelloWorld.class);
clientBean.setWsdlURL(WSRMTest.class.getResource("/HelloWorld.wsdl").toString());
SpringBusFactory bf = new SpringBusFactory();
URL cxfConfig = null;
if (getCxfClientConfig() != null) {
cxfConfig = ClassLoaderUtils.getResource(getCxfClientConfig(), this.getClass());
}
proxyFactory.setBus(bf.createBus(cxfConfig));
proxyFactory.getOutInterceptors().add(new MessageLossSimulator());
HelloWorld client = (HelloWorld) proxyFactory.create();
String result = client.sayHi("world!");
assertEquals("Hello world!", result, "Get a wrong response");
}
/**
* @return
*/
protected String getCxfClientConfig() {
return "ws_rm.xml";
}
}
| WSRMTest |
java | google__guava | android/guava-tests/test/com/google/common/primitives/LongArrayAsListTest.java | {
"start": 3360,
"end": 3683
} | class ____ extends TestLongListGenerator {
@Override
protected List<Long> create(Long[] elements) {
Long[] suffix = {Long.MIN_VALUE, Long.MAX_VALUE};
Long[] all = concat(elements, suffix);
return asList(all).subList(0, elements.length);
}
}
public static final | LongsAsListHeadSubListGenerator |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.