language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
quarkusio__quarkus
|
integration-tests/gradle/src/main/resources/custom-config-sources/src/main/java/org/acme/MyConfigSourceFactory.java
|
{
"start": 202,
"end": 419
}
|
class ____ implements ConfigSourceFactory {
@Override
public Iterable<ConfigSource> getConfigSources(ConfigSourceContext context) {
return List.of(new InMemoryConfigSource());
}
}
|
MyConfigSourceFactory
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_374/Issue374Mapper.java
|
{
"start": 572,
"end": 1185
}
|
interface ____ {
Issue374Mapper INSTANCE = Mappers.getMapper( Issue374Mapper.class );
@Mapping(target = "constant", constant = "test")
Target map(Source source, @MappingTarget Target target);
@BeanMapping( nullValueMappingStrategy = NullValueMappingStrategy.RETURN_NULL )
@Mapping(target = "constant", constant = "test")
Target map2(Source source, @MappingTarget Target target);
List<String> mapIterable(List<String> source, @MappingTarget List<String> target);
Map<Integer, String> mapMap(Map<Integer, String> source, @MappingTarget Map<Integer, String> target);
}
|
Issue374Mapper
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/remote/RemoteServiceAckTimeoutException.java
|
{
"start": 876,
"end": 1114
}
|
class ____ extends RuntimeException {
private static final long serialVersionUID = 1820133675653636587L;
public RemoteServiceAckTimeoutException(String message) {
super(message);
}
}
|
RemoteServiceAckTimeoutException
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/mapping/JpaMetamodelMappingContext.java
|
{
"start": 3807,
"end": 6162
}
|
class ____ {
private final Set<Metamodel> metamodels;
private Metamodels(Set<Metamodel> metamodels) {
this.metamodels = metamodels;
}
/**
* Returns the {@link JpaMetamodel} for the given type.
*
* @param type must not be {@literal null}.
* @return
*/
public @Nullable JpaMetamodel getMetamodel(TypeInformation<?> type) {
Metamodel metamodel = getMetamodelFor(type.getType());
return metamodel == null ? null : JpaMetamodel.of(metamodel);
}
/**
* Returns the required {@link JpaMetamodel} for the given type or throw {@link IllegalArgumentException} if the
* {@code type} is not JPA-managed.
*
* @param type must not be {@literal null}.
* @return
* @throws IllegalArgumentException if {@code type} is not JPA-managed.
* @since 2.6.1
*/
public JpaMetamodel getRequiredMetamodel(TypeInformation<?> type) {
JpaMetamodel metamodel = getMetamodel(type);
Assert.notNull(metamodel, () -> String.format("Required JpaMetamodel not found for %s", type));
return metamodel;
}
/**
* Returns whether the given type is managed by one of the underlying {@link Metamodel} instances.
*
* @param type must not be {@literal null}.
* @return
*/
public boolean isMetamodelManagedType(TypeInformation<?> type) {
return isMetamodelManagedType(type.getType());
}
/**
* Returns whether the given type is managed by one of the underlying {@link Metamodel} instances.
*
* @param type must not be {@literal null}.
* @return
*/
public boolean isMetamodelManagedType(Class<?> type) {
return getMetamodelFor(type) != null;
}
/**
* Returns the {@link Metamodel} aware of the given type.
*
* @param type must not be {@literal null}.
* @return can be {@literal null}.
*/
private @Nullable Metamodel getMetamodelFor(Class<?> type) {
for (Metamodel model : metamodels) {
try {
model.managedType(type);
return model;
} catch (IllegalArgumentException o_O) {
// Fall back to inspect *all* managed types manually as Metamodel.managedType(…) only
// returns for entities, embeddables and managed superclasses.
for (ManagedType<?> managedType : model.getManagedTypes()) {
if (type.equals(managedType.getJavaType())) {
return model;
}
}
}
}
return null;
}
}
}
|
Metamodels
|
java
|
alibaba__nacos
|
persistence/src/test/java/com/alibaba/nacos/persistence/repository/extrnal/ExternalStoragePaginationHelperImplTest.java
|
{
"start": 1475,
"end": 16504
}
|
class ____ {
private static final String QUERY_SQL = "SELECT * FROM config_info LIMIT 1";
private static final String QUERY_COUNT_SQL = "SELECT count(*) FROM config_info";
@Mock
JdbcTemplate jdbcTemplate;
@Mock
RowMapper rowMapper;
ExternalStoragePaginationHelperImpl<Object> externalStoragePaginationHelper;
@BeforeEach
void setUp() {
externalStoragePaginationHelper = new ExternalStoragePaginationHelperImpl<>(jdbcTemplate);
}
@AfterEach
void tearDown() {
}
@Test
void testFetchPageWithIllegalPageInfo() {
assertThrows(IllegalArgumentException.class,
() -> externalStoragePaginationHelper.fetchPage("", "", new Object[] {}, 0, 0, null));
assertThrows(IllegalArgumentException.class,
() -> externalStoragePaginationHelper.fetchPage("", "", new Object[] {}, 1, 0, null));
}
@Test
void testFetchPageWithoutResult() {
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, new Object[] {}, Integer.class)).thenReturn(null);
assertThrows(IllegalArgumentException.class,
() -> externalStoragePaginationHelper.fetchPage(QUERY_COUNT_SQL, QUERY_SQL, new Object[] {}, 1, 1,
null));
}
@Test
void testFetchPageOnePage() {
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, new Object[] {}, Integer.class)).thenReturn(1);
when(jdbcTemplate.query(QUERY_SQL, new Object[] {}, rowMapper)).thenReturn(
Collections.singletonList(new Object()));
Page<Object> actual = externalStoragePaginationHelper.fetchPage(QUERY_COUNT_SQL, QUERY_SQL, new Object[] {}, 1,
1, rowMapper);
assertEquals(1, actual.getTotalCount());
assertEquals(1, actual.getPageNumber());
assertEquals(1, actual.getPagesAvailable());
assertEquals(1, actual.getPageItems().size());
}
@Test
void testFetchPageMorePageFull() {
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, new Object[] {}, Integer.class)).thenReturn(2);
when(jdbcTemplate.query(QUERY_SQL, new Object[] {}, rowMapper)).thenReturn(
Collections.singletonList(new Object()));
Page<Object> actual = externalStoragePaginationHelper.fetchPage(QUERY_COUNT_SQL, QUERY_SQL, new Object[] {}, 1,
1, rowMapper);
assertEquals(2, actual.getTotalCount());
assertEquals(1, actual.getPageNumber());
assertEquals(2, actual.getPagesAvailable());
assertEquals(1, actual.getPageItems().size());
}
@Test
void testFetchPageMorePageNotFull() {
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, new Object[] {}, Integer.class)).thenReturn(3);
List<Object> pageItems = new LinkedList<>();
pageItems.add(new Object());
pageItems.add(new Object());
when(jdbcTemplate.query(QUERY_SQL, new Object[] {}, rowMapper)).thenReturn(pageItems);
Page<Object> actual = externalStoragePaginationHelper.fetchPage(QUERY_COUNT_SQL, QUERY_SQL, new Object[] {}, 1,
2, rowMapper);
assertEquals(3, actual.getTotalCount());
assertEquals(1, actual.getPageNumber());
assertEquals(2, actual.getPagesAvailable());
assertEquals(2, actual.getPageItems().size());
}
@Test
void testFetchPageMorePageNextPage() {
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, new Object[] {}, Integer.class)).thenReturn(3);
when(jdbcTemplate.query(QUERY_SQL, new Object[] {}, rowMapper)).thenReturn(
Collections.singletonList(new Object()));
Page<Object> actual = externalStoragePaginationHelper.fetchPage(QUERY_COUNT_SQL, QUERY_SQL, new Object[] {}, 2,
2, rowMapper);
assertEquals(3, actual.getTotalCount());
assertEquals(2, actual.getPageNumber());
assertEquals(2, actual.getPagesAvailable());
assertEquals(1, actual.getPageItems().size());
}
@Test
void testFetchPageMoreThanItemCount() {
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, new Object[] {}, Integer.class)).thenReturn(3);
Page<Object> actual = externalStoragePaginationHelper.fetchPage(QUERY_COUNT_SQL, QUERY_SQL, new Object[] {}, 3,
2, rowMapper);
assertEquals(3, actual.getTotalCount());
assertEquals(3, actual.getPageNumber());
assertEquals(2, actual.getPagesAvailable());
assertEquals(0, actual.getPageItems().size());
}
@Test
void testFetchPageLimitWithIllegalPageInfo() {
assertThrows(IllegalArgumentException.class,
() -> externalStoragePaginationHelper.fetchPageLimit("", "", new Object[] {}, 0, 0, null));
assertThrows(IllegalArgumentException.class,
() -> externalStoragePaginationHelper.fetchPageLimit("", "", new Object[] {}, 1, 0, null));
}
@Test
void testFetchPageLimitWithoutResult() {
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, Integer.class)).thenReturn(null);
assertThrows(IllegalArgumentException.class,
() -> externalStoragePaginationHelper.fetchPageLimit(QUERY_COUNT_SQL, QUERY_SQL, new Object[] {}, 1, 1,
null));
}
@Test
void testFetchPageLimitOnePage() {
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, Integer.class)).thenReturn(1);
when(jdbcTemplate.query(QUERY_SQL, new Object[] {}, rowMapper)).thenReturn(
Collections.singletonList(new Object()));
Page<Object> actual = externalStoragePaginationHelper.fetchPageLimit(QUERY_COUNT_SQL, QUERY_SQL,
new Object[] {}, 1, 1, rowMapper);
assertEquals(1, actual.getTotalCount());
assertEquals(1, actual.getPageNumber());
assertEquals(1, actual.getPagesAvailable());
assertEquals(1, actual.getPageItems().size());
}
@Test
void testFetchPageLimitMorePageFull() {
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, Integer.class)).thenReturn(2);
when(jdbcTemplate.query(QUERY_SQL, new Object[] {}, rowMapper)).thenReturn(
Collections.singletonList(new Object()));
Page<Object> actual = externalStoragePaginationHelper.fetchPageLimit(QUERY_COUNT_SQL, QUERY_SQL,
new Object[] {}, 1, 1, rowMapper);
assertEquals(2, actual.getTotalCount());
assertEquals(1, actual.getPageNumber());
assertEquals(2, actual.getPagesAvailable());
assertEquals(1, actual.getPageItems().size());
}
@Test
void testFetchPageLimitMorePageNotFull() {
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, Integer.class)).thenReturn(3);
List<Object> pageItems = new LinkedList<>();
pageItems.add(new Object());
pageItems.add(new Object());
when(jdbcTemplate.query(QUERY_SQL, new Object[] {}, rowMapper)).thenReturn(pageItems);
Page<Object> actual = externalStoragePaginationHelper.fetchPageLimit(QUERY_COUNT_SQL, QUERY_SQL,
new Object[] {}, 1, 2, rowMapper);
assertEquals(3, actual.getTotalCount());
assertEquals(1, actual.getPageNumber());
assertEquals(2, actual.getPagesAvailable());
assertEquals(2, actual.getPageItems().size());
}
@Test
void testFetchPageLimitMorePageNextPage() {
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, Integer.class)).thenReturn(3);
when(jdbcTemplate.query(QUERY_SQL, new Object[] {}, rowMapper)).thenReturn(
Collections.singletonList(new Object()));
Page<Object> actual = externalStoragePaginationHelper.fetchPageLimit(QUERY_COUNT_SQL, QUERY_SQL,
new Object[] {}, 2, 2, rowMapper);
assertEquals(3, actual.getTotalCount());
assertEquals(2, actual.getPageNumber());
assertEquals(2, actual.getPagesAvailable());
assertEquals(1, actual.getPageItems().size());
}
@Test
void testFetchPageLimitMoreThanItemCount() {
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, Integer.class)).thenReturn(3);
Page<Object> actual = externalStoragePaginationHelper.fetchPageLimit(QUERY_COUNT_SQL, QUERY_SQL,
new Object[] {}, 3, 2, rowMapper);
assertEquals(3, actual.getTotalCount());
assertEquals(3, actual.getPageNumber());
assertEquals(2, actual.getPagesAvailable());
assertEquals(0, actual.getPageItems().size());
}
@Test
void testFetchPageLimitWithPluginWithIllegalPageInfo() {
MapperResult countMapper = new MapperResult(QUERY_COUNT_SQL, new ArrayList<>());
MapperResult queryMapper = new MapperResult(QUERY_SQL, new ArrayList<>());
assertThrows(IllegalArgumentException.class,
() -> externalStoragePaginationHelper.fetchPageLimit(countMapper, queryMapper, 0, 0, null));
assertThrows(IllegalArgumentException.class,
() -> externalStoragePaginationHelper.fetchPageLimit(countMapper, queryMapper, 1, 0, null));
}
@Test
void testFetchPageLimitWithPluginWithoutResult() {
MapperResult countMapper = new MapperResult(QUERY_COUNT_SQL, new ArrayList<>());
MapperResult queryMapper = new MapperResult(QUERY_SQL, new ArrayList<>());
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, new Object[] {}, Integer.class)).thenReturn(null);
assertThrows(IllegalArgumentException.class,
() -> externalStoragePaginationHelper.fetchPageLimit(countMapper, queryMapper, 1, 1, null));
}
@Test
void testFetchPageLimitWithPluginPageOnePage() {
MapperResult countMapper = new MapperResult(QUERY_COUNT_SQL, new ArrayList<>());
MapperResult queryMapper = new MapperResult(QUERY_SQL, new ArrayList<>());
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, new Object[] {}, Integer.class)).thenReturn(1);
when(jdbcTemplate.query(QUERY_SQL, new Object[] {}, rowMapper)).thenReturn(
Collections.singletonList(new Object()));
Page<Object> actual = externalStoragePaginationHelper.fetchPageLimit(countMapper, queryMapper, 1, 1, rowMapper);
assertEquals(1, actual.getTotalCount());
assertEquals(1, actual.getPageNumber());
assertEquals(1, actual.getPagesAvailable());
assertEquals(1, actual.getPageItems().size());
}
@Test
void testFetchPageLimitWithPluginMorePageFull() {
MapperResult countMapper = new MapperResult(QUERY_COUNT_SQL, new ArrayList<>());
MapperResult queryMapper = new MapperResult(QUERY_SQL, new ArrayList<>());
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, new Object[] {}, Integer.class)).thenReturn(2);
when(jdbcTemplate.query(QUERY_SQL, new Object[] {}, rowMapper)).thenReturn(
Collections.singletonList(new Object()));
Page<Object> actual = externalStoragePaginationHelper.fetchPageLimit(countMapper, queryMapper, 1, 1, rowMapper);
assertEquals(2, actual.getTotalCount());
assertEquals(1, actual.getPageNumber());
assertEquals(2, actual.getPagesAvailable());
assertEquals(1, actual.getPageItems().size());
}
@Test
void testFetchPageLimitWithPluginMorePageNotFull() {
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, new Object[] {}, Integer.class)).thenReturn(3);
List<Object> pageItems = new LinkedList<>();
pageItems.add(new Object());
pageItems.add(new Object());
MapperResult countMapper = new MapperResult(QUERY_COUNT_SQL, new ArrayList<>());
MapperResult queryMapper = new MapperResult(QUERY_SQL, new ArrayList<>());
when(jdbcTemplate.query(QUERY_SQL, new Object[] {}, rowMapper)).thenReturn(pageItems);
Page<Object> actual = externalStoragePaginationHelper.fetchPageLimit(countMapper, queryMapper, 1, 2, rowMapper);
assertEquals(3, actual.getTotalCount());
assertEquals(1, actual.getPageNumber());
assertEquals(2, actual.getPagesAvailable());
assertEquals(2, actual.getPageItems().size());
}
@Test
void testFetchPageLimitWithPluginMorePageNextPage() {
MapperResult countMapper = new MapperResult(QUERY_COUNT_SQL, new ArrayList<>());
MapperResult queryMapper = new MapperResult(QUERY_SQL, new ArrayList<>());
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, new Object[] {}, Integer.class)).thenReturn(3);
when(jdbcTemplate.query(QUERY_SQL, new Object[] {}, rowMapper)).thenReturn(
Collections.singletonList(new Object()));
Page<Object> actual = externalStoragePaginationHelper.fetchPageLimit(countMapper, queryMapper, 2, 2, rowMapper);
assertEquals(3, actual.getTotalCount());
assertEquals(2, actual.getPageNumber());
assertEquals(2, actual.getPagesAvailable());
assertEquals(1, actual.getPageItems().size());
}
@Test
void testFetchPageLimitWithPluginMoreThanItemCount() {
MapperResult countMapper = new MapperResult(QUERY_COUNT_SQL, new ArrayList<>());
MapperResult queryMapper = new MapperResult(QUERY_SQL, new ArrayList<>());
when(jdbcTemplate.queryForObject(QUERY_COUNT_SQL, new Object[] {}, Integer.class)).thenReturn(3);
Page<Object> actual = externalStoragePaginationHelper.fetchPageLimit(countMapper, queryMapper, 3, 2, rowMapper);
assertEquals(3, actual.getTotalCount());
assertEquals(3, actual.getPageNumber());
assertEquals(2, actual.getPagesAvailable());
assertEquals(0, actual.getPageItems().size());
}
@Test
void testFetchPageLimitSimpleWithIllegalPageInfo() {
assertThrows(IllegalArgumentException.class,
() -> externalStoragePaginationHelper.fetchPageLimit(QUERY_SQL, new Object[] {}, 0, 0, null));
assertThrows(IllegalArgumentException.class,
() -> externalStoragePaginationHelper.fetchPageLimit(QUERY_SQL, new Object[] {}, 1, 0, null));
}
@Test
void testFetchPageLimitSimpleWithData() {
List<Object> pageItems = new LinkedList<>();
pageItems.add(new Object());
pageItems.add(new Object());
pageItems.add(new Object());
when(jdbcTemplate.query(QUERY_SQL, new Object[] {}, rowMapper)).thenReturn(pageItems);
Page<Object> actual = externalStoragePaginationHelper.fetchPageLimit(QUERY_SQL, new Object[]{}, 3, 1, rowMapper);
assertEquals(0, actual.getTotalCount());
assertEquals(0, actual.getPageNumber());
assertEquals(0, actual.getPagesAvailable());
assertEquals(3, actual.getPageItems().size());
}
@Test
void updateLimit() {
Object[] args = new Object[] {};
externalStoragePaginationHelper.updateLimit(QUERY_SQL, args);
verify(jdbcTemplate).update(QUERY_SQL, args);
}
}
|
ExternalStoragePaginationHelperImplTest
|
java
|
resilience4j__resilience4j
|
resilience4j-circularbuffer/src/jcstress/java/io/github/resilience4j/circularbuffer/concurrent/ConcurrentEvictingQueueReadWriteTest.java
|
{
"start": 1036,
"end": 1617
}
|
class ____ {
ConcurrentEvictingQueue<Integer> queue;
private Object[] array;
public ConcurrentEvictingQueueReadWriteTest() {
queue = new ConcurrentEvictingQueue<>(2);
queue.offer(1);
queue.offer(2);
}
@Actor
public void firstActor() {
queue.poll();
}
@Actor
public void secondActor() {
array = queue.toArray();
}
@Arbiter
public void arbiter(StringResult2 result) {
result.r1 = Arrays.toString(array);
result.r2 = queue.toString();
}
}
|
ConcurrentEvictingQueueReadWriteTest
|
java
|
elastic__elasticsearch
|
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/EarlyAccessCatalogJdkToolchainResolver.java
|
{
"start": 1917,
"end": 2141
}
|
class ____ extends AbstractCustomJavaToolchainResolver {
public static final String RECENT_JDK_RELEASES_CATALOG_URL = "https://builds.es-jdk-archive.com/jdks/openjdk/recent.json";
|
EarlyAccessCatalogJdkToolchainResolver
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java
|
{
"start": 779,
"end": 2146
}
|
class ____ {
private SpanQueryBuilderUtil() {
// utility class
}
/**
* Checks boost value of a nested span clause is equal to {@link AbstractQueryBuilder#DEFAULT_BOOST}.
*
* @param queryName a query name
* @param fieldName a field name
* @param parser a parser
* @param clause a span query builder
* @throws ParsingException if query boost value isn't equal to {@link AbstractQueryBuilder#DEFAULT_BOOST}
*/
static void checkNoBoost(String queryName, String fieldName, XContentParser parser, SpanQueryBuilder clause) {
try {
if (clause.boost() != AbstractQueryBuilder.DEFAULT_BOOST) {
throw new ParsingException(
parser.getTokenLocation(),
queryName
+ " ["
+ fieldName
+ "] "
+ "as a nested span clause can't have non-default boost value ["
+ clause.boost()
+ "]"
);
}
} catch (UnsupportedOperationException ignored) {
// if boost is unsupported it can't have been set
}
}
}
}
|
SpanQueryBuilderUtil
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/spi/FactoryFinderResolver.java
|
{
"start": 2108,
"end": 2365
}
|
class ____ to use
* @param resourcePath the resource path as base to lookup files within
* @return a factory finder.
*/
FactoryFinder resolveBootstrapFactoryFinder(ClassResolver classResolver, String resourcePath);
}
|
resolver
|
java
|
micronaut-projects__micronaut-core
|
http-netty/src/main/java/io/micronaut/http/netty/body/NettyByteBufMessageBodyHandler.java
|
{
"start": 2110,
"end": 4564
}
|
class ____ implements TypedMessageBodyHandler<ByteBuf>, ChunkedMessageBodyReader<ByteBuf>, ResponseBodyWriter<ByteBuf> {
@Override
public Argument<ByteBuf> getType() {
return Argument.of(ByteBuf.class);
}
@Override
public Publisher<ByteBuf> readChunked(Argument<ByteBuf> type, MediaType mediaType, Headers httpHeaders, Publisher<ByteBuffer<?>> input) {
return Flux.from(input).map(bb -> (ByteBuf) bb.asNativeBuffer());
}
@Override
public ByteBuf read(Argument<ByteBuf> type, MediaType mediaType, Headers httpHeaders, ByteBuffer<?> byteBuffer) throws CodecException {
return (ByteBuf) byteBuffer.asNativeBuffer();
}
@Override
public ByteBuf read(Argument<ByteBuf> type, MediaType mediaType, Headers httpHeaders, InputStream inputStream) throws CodecException {
try {
return Unpooled.wrappedBuffer(inputStream.readAllBytes());
} catch (IOException e) {
throw new CodecException("Failed to read InputStream", e);
}
}
@Override
public void writeTo(Argument<ByteBuf> type, MediaType mediaType, ByteBuf object, MutableHeaders outgoingHeaders, OutputStream outputStream) throws CodecException {
try {
new ByteBufInputStream(object).transferTo(outputStream);
// ByteBufInputStream#close doesn't release properly
object.release();
} catch (IOException e) {
throw new CodecException("Failed to transfer byte buffer", e);
}
}
@Override
public ByteBuffer<?> writeTo(Argument<ByteBuf> type, MediaType mediaType, ByteBuf object, MutableHeaders outgoingHeaders, ByteBufferFactory<?, ?> bufferFactory) throws CodecException {
return NettyByteBufferFactory.DEFAULT.wrap(object);
}
@Override
public @NonNull CloseableByteBody writePiece(@NonNull ByteBodyFactory bodyFactory, @NonNull HttpRequest<?> request, @NonNull HttpResponse<?> response, @NonNull Argument<ByteBuf> type, @NonNull MediaType mediaType, ByteBuf object) throws CodecException {
NettyReadBufferFactory readBufferFactory;
if (bodyFactory.readBufferFactory() instanceof NettyReadBufferFactory nrbf) {
readBufferFactory = nrbf;
} else {
readBufferFactory = NettyReadBufferFactory.of(ByteBufAllocator.DEFAULT);
}
return bodyFactory.adapt(readBufferFactory.adapt(object));
}
}
|
NettyByteBufMessageBodyHandler
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/ImportAutoConfigurationImportSelectorTests.java
|
{
"start": 10312,
"end": 10373
}
|
class ____ {
}
@SelfAnnotating
static
|
ExclusionWithoutImport
|
java
|
apache__flink
|
flink-architecture-tests/flink-architecture-tests-base/src/main/java/org/apache/flink/architecture/common/JavaFieldPredicates.java
|
{
"start": 1522,
"end": 6515
}
|
class ____ {
/**
* Match the public modifier of the {@link JavaField}.
*
* @return A {@link DescribedPredicate} returning true, if and only if the tested {@link
* JavaField} has the public modifier.
*/
public static DescribedPredicate<JavaField> isPublic() {
return DescribedPredicate.describe(
"public", field -> field.getModifiers().contains(JavaModifier.PUBLIC));
}
/**
* Match the static modifier of the {@link JavaField}.
*
* @return A {@link DescribedPredicate} returning true, if and only if the tested {@link
* JavaField} has the static modifier.
*/
public static DescribedPredicate<JavaField> isStatic() {
return DescribedPredicate.describe(
"static", field -> field.getModifiers().contains(JavaModifier.STATIC));
}
/**
* Match none static modifier of the {@link JavaField}.
*
* @return A {@link DescribedPredicate} returning true, if and only if the tested {@link
* JavaField} has no static modifier.
*/
public static DescribedPredicate<JavaField> isNotStatic() {
return DescribedPredicate.describe(
"not static", field -> !field.getModifiers().contains(JavaModifier.STATIC));
}
/**
* Match the final modifier of the {@link JavaField}.
*
* @return A {@link DescribedPredicate} returning true, if and only if the tested {@link
* JavaField} has the final modifier.
*/
public static DescribedPredicate<JavaField> isFinal() {
return DescribedPredicate.describe(
"final", field -> field.getModifiers().contains(JavaModifier.FINAL));
}
/**
* Match the {@link Class} of the {@link JavaField}.
*
* @return A {@link DescribedPredicate} returning true, if the tested {@link JavaField} has the
* same type of the given {@code clazz}.
*/
public static DescribedPredicate<JavaField> ofType(Class<?> clazz) {
return DescribedPredicate.describe(
"of type " + clazz.getSimpleName(),
field -> field.getRawType().isEquivalentTo(clazz));
}
/**
* Match the {@link Class} of the {@link JavaField}.
*
* @return A {@link DescribedPredicate} returning true, if and only if the tested {@link
* JavaField} has the same type of the given {@code clazz}.
*/
public static DescribedPredicate<JavaField> ofType(String fqClassName) {
String className = getClassSimpleNameFromFqName(fqClassName);
return DescribedPredicate.describe(
"of type " + className, field -> field.getType().getName().equals(fqClassName));
}
/**
* Match the {@link Class} of the {@link JavaField}'s assignability.
*
* @param clazz the Class type to check for assignability
* @return a {@link DescribedPredicate} that returns {@code true}, if the respective {@link
* JavaField} is assignable to the supplied {@code clazz}.
*/
public static DescribedPredicate<JavaField> isAssignableTo(Class<?> clazz) {
return DescribedPredicate.describe(
"is assignable to " + clazz.getSimpleName(),
field -> field.getRawType().isAssignableTo(clazz));
}
/**
* Match the single Annotation of the {@link JavaField}.
*
* @return A {@link DescribedPredicate} returning true, if and only if the tested {@link
* JavaField} has exactly the given Annotation {@code annotationType}.
*/
public static DescribedPredicate<JavaField> annotatedWith(
Class<? extends Annotation> annotationType) {
return matchAnnotationType(
annotationType.getSimpleName(),
annotation -> annotation.getRawType().isEquivalentTo(annotationType));
}
/**
* Match the single Annotation of the {@link JavaField}.
*
* @return A {@link DescribedPredicate} returning true, if the tested {@link JavaField} is
* annotated with the annotation identified by the fully qualified name {@code
* fqAnnotationTypeName}.
*/
public static DescribedPredicate<JavaField> annotatedWith(String fqAnnotationTypeName) {
String className = getClassSimpleNameFromFqName(fqAnnotationTypeName);
return matchAnnotationType(
className,
annotation -> annotation.getRawType().getName().equals(fqAnnotationTypeName));
}
private static DescribedPredicate<JavaField> matchAnnotationType(
String annotationName, Predicate<JavaAnnotation<JavaField>> annotationTypeEquality) {
return DescribedPredicate.describe(
"annotated with @" + annotationName,
field ->
field.getAnnotations().stream()
.map(annotationTypeEquality::test)
.reduce(false, Boolean::logicalOr));
}
}
|
JavaFieldPredicates
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/manytomany/ManyToManySQLJoinTableRestrictionTest.java
|
{
"start": 7081,
"end": 7705
}
|
class ____ {
@Id
private String name;
@ManyToMany( mappedBy = "managers" )
@SQLJoinTableRestriction( "role_name = 'manager'" )
private Set<Project> managedProjects = new HashSet<>();
@ManyToMany( mappedBy = "members" )
@SQLJoinTableRestriction( "role_name = 'member'" )
private Set<Project> otherProjects = new HashSet<>();
public User() {
}
public User(String name) {
this.name = name;
}
public String getName() {
return name;
}
public Set<Project> getManagedProjects() {
return managedProjects;
}
public Set<Project> getOtherProjects() {
return otherProjects;
}
}
}
|
User
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/internal/SessionFactoryBuilderImpl.java
|
{
"start": 1544,
"end": 13618
}
|
class ____ implements SessionFactoryBuilderImplementor {
private final MetadataImplementor metadata;
private final SessionFactoryOptionsBuilder optionsBuilder;
private final BootstrapContext bootstrapContext;
public SessionFactoryBuilderImpl(MetadataImplementor metadata, BootstrapContext bootstrapContext) {
this(
metadata,
new SessionFactoryOptionsBuilder(
metadata.getMetadataBuildingOptions().getServiceRegistry(),
bootstrapContext
),
bootstrapContext
);
}
public SessionFactoryBuilderImpl(MetadataImplementor metadata, SessionFactoryOptionsBuilder optionsBuilder, BootstrapContext context) {
this.metadata = metadata;
this.optionsBuilder = optionsBuilder;
this.bootstrapContext = context;
if ( metadata.getSqlFunctionMap() != null ) {
for ( var sqlFunctionEntry : metadata.getSqlFunctionMap().entrySet() ) {
applySqlFunction( sqlFunctionEntry.getKey(), sqlFunctionEntry.getValue() );
}
}
final var bytecodeProvider =
metadata.getMetadataBuildingOptions().getServiceRegistry()
.getService( BytecodeProvider.class );
addSessionFactoryObservers( new SessionFactoryObserverForBytecodeEnhancer( bytecodeProvider ) );
addSessionFactoryObservers( new SessionFactoryObserverForNamedQueryValidation( metadata ) );
addSessionFactoryObservers( new SessionFactoryObserverForSchemaExport( metadata ) );
addSessionFactoryObservers( new SessionFactoryObserverForRegistration() );
}
@Override
public SessionFactoryBuilder applyBeanManager(Object beanManager) {
optionsBuilder.applyBeanManager( beanManager );
return this;
}
@Override
public SessionFactoryBuilder applyValidatorFactory(Object validatorFactory) {
optionsBuilder.applyValidatorFactory( validatorFactory );
return this;
}
@Override
public SessionFactoryBuilder applyName(String sessionFactoryName) {
optionsBuilder.applySessionFactoryName( sessionFactoryName );
return this;
}
@Override
public SessionFactoryBuilder applyNameAsJndiName(boolean isJndiName) {
optionsBuilder.enableSessionFactoryNameAsJndiName( isJndiName );
return this;
}
@Override
public SessionFactoryBuilder applyAutoClosing(boolean enabled) {
optionsBuilder.enableSessionAutoClosing( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyAutoFlushing(boolean enabled) {
optionsBuilder.enableSessionAutoFlushing( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyJtaTrackingByThread(boolean enabled) {
optionsBuilder.enableJtaTrackingByThread( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyPreferUserTransactions(boolean preferUserTransactions) {
optionsBuilder.enablePreferUserTransaction( preferUserTransactions );
return this;
}
@Override
public SessionFactoryBuilder applyStatisticsSupport(boolean enabled) {
optionsBuilder.enableStatisticsSupport( enabled );
return this;
}
@Override
public SessionFactoryBuilder addSessionFactoryObservers(SessionFactoryObserver... observers) {
optionsBuilder.addSessionFactoryObservers( observers );
return this;
}
@Override
public SessionFactoryBuilder applyInterceptor(Interceptor interceptor) {
optionsBuilder.applyInterceptor( interceptor );
return this;
}
@Override
public SessionFactoryBuilder applyStatelessInterceptor(Class<? extends Interceptor> statelessInterceptorClass) {
optionsBuilder.applyStatelessInterceptor( statelessInterceptorClass );
return this;
}
@Override
public SessionFactoryBuilder applyStatelessInterceptor(Supplier<? extends Interceptor> statelessInterceptorSupplier) {
optionsBuilder.applyStatelessInterceptorSupplier( statelessInterceptorSupplier );
return this;
}
@Override
public SessionFactoryBuilder applyStatementInspector(StatementInspector statementInspector) {
optionsBuilder.applyStatementInspector( statementInspector );
return this;
}
@Override
public SessionFactoryBuilder applyCustomEntityDirtinessStrategy(CustomEntityDirtinessStrategy strategy) {
optionsBuilder.applyCustomEntityDirtinessStrategy( strategy );
return this;
}
@Override
public SessionFactoryBuilder addEntityNameResolver(EntityNameResolver... entityNameResolvers) {
optionsBuilder.addEntityNameResolvers( entityNameResolvers );
return this;
}
@Override
public SessionFactoryBuilder applyEntityNotFoundDelegate(EntityNotFoundDelegate entityNotFoundDelegate) {
optionsBuilder.applyEntityNotFoundDelegate( entityNotFoundDelegate );
return this;
}
@Override
public SessionFactoryBuilder applyIdentifierRollbackSupport(boolean enabled) {
optionsBuilder.enableIdentifierRollbackSupport( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyNullabilityChecking(boolean enabled) {
optionsBuilder.enableNullabilityChecking( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyLazyInitializationOutsideTransaction(boolean enabled) {
optionsBuilder.allowLazyInitializationOutsideTransaction( enabled );
return this;
}
@Override @Deprecated
public SessionFactoryBuilder applyTempTableDdlTransactionHandling(TempTableDdlTransactionHandling handling) {
optionsBuilder.applyTempTableDdlTransactionHandling( handling );
return this;
}
@Override
public SessionFactoryBuilder applyDelayedEntityLoaderCreations(boolean delay) {
optionsBuilder.applyDelayedEntityLoaderCreations( delay );
return this;
}
@Override
public SessionFactoryBuilder applyDefaultBatchFetchSize(int size) {
optionsBuilder.applyDefaultBatchFetchSize( size );
return this;
}
@Override
public SessionFactoryBuilder applyMaximumFetchDepth(int depth) {
optionsBuilder.applyMaximumFetchDepth( depth );
return this;
}
@Override
public SessionFactoryBuilder applySubselectFetchEnabled(boolean enabled) {
optionsBuilder.applySubselectFetchEnabled( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyDefaultNullPrecedence(Nulls nullPrecedence) {
optionsBuilder.applyDefaultNullPrecedence( nullPrecedence );
return this;
}
@Override
public SessionFactoryBuilder applyOrderingOfInserts(boolean enabled) {
optionsBuilder.enableOrderingOfInserts( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyOrderingOfUpdates(boolean enabled) {
optionsBuilder.enableOrderingOfUpdates( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyMultiTenancy(boolean enabled) {
optionsBuilder.applyMultiTenancy(enabled);
return this;
}
@Override
public SessionFactoryBuilder applyCurrentTenantIdentifierResolver(CurrentTenantIdentifierResolver<?> resolver) {
optionsBuilder.applyCurrentTenantIdentifierResolver( resolver );
return this;
}
@Override
public SessionFactoryBuilder applyTenantSchemaMapper(TenantSchemaMapper<?> mapper) {
optionsBuilder.applyTenantSchemaMapper( mapper );
return this;
}
@Override
public SessionFactoryBuilder applyNamedQueryCheckingOnStartup(boolean enabled) {
optionsBuilder.enableNamedQueryCheckingOnStartup( enabled );
return this;
}
@Override
public SessionFactoryBuilder applySecondLevelCacheSupport(boolean enabled) {
optionsBuilder.enableSecondLevelCacheSupport( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyQueryCacheSupport(boolean enabled) {
optionsBuilder.enableQueryCacheSupport( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyQueryCacheLayout(CacheLayout cacheLayout) {
optionsBuilder.applyQueryCacheLayout( cacheLayout );
return this;
}
@Override
public SessionFactoryBuilder applyTimestampsCacheFactory(TimestampsCacheFactory factory) {
optionsBuilder.applyTimestampsCacheFactory( factory );
return this;
}
@Override
public SessionFactoryBuilder applyCacheRegionPrefix(String prefix) {
optionsBuilder.applyCacheRegionPrefix( prefix );
return this;
}
@Override
public SessionFactoryBuilder applyMinimalPutsForCaching(boolean enabled) {
optionsBuilder.enableMinimalPuts( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyStructuredCacheEntries(boolean enabled) {
optionsBuilder.enabledStructuredCacheEntries( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyDirectReferenceCaching(boolean enabled) {
optionsBuilder.allowDirectReferenceCacheEntries( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyAutomaticEvictionOfCollectionCaches(boolean enabled) {
optionsBuilder.enableAutoEvictCollectionCaches( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyJdbcBatchSize(int size) {
optionsBuilder.applyJdbcBatchSize( size );
return this;
}
@Override
public SessionFactoryBuilder applyScrollableResultsSupport(boolean enabled) {
optionsBuilder.enableScrollableResultSupport( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyGetGeneratedKeysSupport(boolean enabled) {
optionsBuilder.enableGeneratedKeysSupport( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyJdbcFetchSize(int size) {
optionsBuilder.applyJdbcFetchSize( size );
return this;
}
@Override
public SessionFactoryBuilder applyConnectionHandlingMode(PhysicalConnectionHandlingMode connectionHandlingMode) {
optionsBuilder.applyConnectionHandlingMode( connectionHandlingMode );
return this;
}
@Override
public SessionFactoryBuilder applyConnectionProviderDisablesAutoCommit(boolean providerDisablesAutoCommit) {
optionsBuilder.applyConnectionProviderDisablesAutoCommit( providerDisablesAutoCommit );
return this;
}
@Override
public SessionFactoryBuilder applySqlComments(boolean enabled) {
optionsBuilder.enableCommentsSupport( enabled );
return this;
}
@Override
public SessionFactoryBuilder applySqlFunction(String registrationName, SqmFunctionDescriptor functionDescriptor) {
optionsBuilder.applySqlFunction( registrationName, functionDescriptor );
return this;
}
@Override
public SessionFactoryBuilder applyCollectionsInDefaultFetchGroup(boolean enabled) {
optionsBuilder.enableCollectionInDefaultFetchGroup( enabled );
return this;
}
@Override
public SessionFactoryBuilder allowOutOfTransactionUpdateOperations(boolean allow) {
optionsBuilder.allowOutOfTransactionUpdateOperations( allow );
return this;
}
@Override @Deprecated
public SessionFactoryBuilder enableReleaseResourcesOnCloseEnabled(boolean enable) {
optionsBuilder.enableReleaseResourcesOnClose( enable );
return this;
}
@Override
public SessionFactoryBuilder enableJpaQueryCompliance(boolean enabled) {
optionsBuilder.enableJpaQueryCompliance( enabled );
return this;
}
@Override
public SessionFactoryBuilder enableJpaOrderByMappingCompliance(boolean enabled) {
optionsBuilder.enableJpaOrderByMappingCompliance( enabled );
return this;
}
@Override
public SessionFactoryBuilder enableJpaTransactionCompliance(boolean enabled) {
optionsBuilder.enableJpaTransactionCompliance( enabled );
return this;
}
@Override @Deprecated
public SessionFactoryBuilder enableJpaCascadeCompliance(boolean enabled) {
optionsBuilder.enableJpaCascadeCompliance( enabled );
return this;
}
@Override
public SessionFactoryBuilder enableJpaClosedCompliance(boolean enabled) {
optionsBuilder.enableJpaClosedCompliance( enabled );
return this;
}
@Override
public SessionFactoryBuilder applyJsonFormatMapper(FormatMapper jsonFormatMapper) {
optionsBuilder.applyJsonFormatMapper( jsonFormatMapper );
return this;
}
@Override
public SessionFactoryBuilder applyXmlFormatMapper(FormatMapper xmlFormatMapper) {
optionsBuilder.applyXmlFormatMapper( xmlFormatMapper );
return this;
}
@Override
public void disableJtaTransactionAccess() {
optionsBuilder.disableJtaTransactionAccess();
}
@Override
public SessionFactory build() {
return instantiateSessionFactory( metadata, buildSessionFactoryOptions(), bootstrapContext );
}
@Override
public SessionFactoryOptions buildSessionFactoryOptions() {
return optionsBuilder.buildOptions();
}
}
|
SessionFactoryBuilderImpl
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test/src/main/java/org/springframework/boot/test/context/AnnotatedClassFinder.java
|
{
"start": 2347,
"end": 4334
}
|
class ____ use to initiate the search
* @return the first {@link Class} annotated with the target annotation within the
* hierarchy defined by the given {@code source} or {@code null} if none is found.
*/
public @Nullable Class<?> findFromClass(Class<?> source) {
Assert.notNull(source, "'source' must not be null");
return findFromPackage(ClassUtils.getPackageName(source));
}
/**
* Find the first {@link Class} that is annotated with the target annotation, starting
* from the package defined by the given {@code source} up to the root.
* @param source the source package to use to initiate the search
* @return the first {@link Class} annotated with the target annotation within the
* hierarchy defined by the given {@code source} or {@code null} if none is found.
*/
public @Nullable Class<?> findFromPackage(String source) {
Assert.notNull(source, "'source' must not be null");
Class<?> configuration = cache.get(source);
if (configuration == null) {
configuration = scanPackage(source);
cache.put(source, configuration);
}
return configuration;
}
private @Nullable Class<?> scanPackage(String source) {
while (!source.isEmpty()) {
Set<BeanDefinition> components = this.scanner.findCandidateComponents(source);
if (!components.isEmpty()) {
Assert.state(components.size() == 1, () -> "Found multiple @" + this.annotationType.getSimpleName()
+ " annotated classes " + components);
String beanClassName = components.iterator().next().getBeanClassName();
Assert.state(beanClassName != null, "'beanClassName' must not be null");
return ClassUtils.resolveClassName(beanClassName, null);
}
source = getParentPackage(source);
}
return null;
}
private String getParentPackage(String sourcePackage) {
int lastDot = sourcePackage.lastIndexOf('.');
return (lastDot != -1) ? sourcePackage.substring(0, lastDot) : "";
}
/**
* Cache implementation based on {@link LinkedHashMap}.
*/
private static
|
to
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/util/concurrent/FuturesGetCheckedInputs.java
|
{
"start": 5466,
"end": 5638
}
|
class ____ extends Exception {
public ExceptionWithoutThrowableConstructor(String s) {
super(s);
}
}
public static final
|
ExceptionWithoutThrowableConstructor
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/inheritance/discriminator/JoinedInheritanceTest.java
|
{
"start": 2489,
"end": 2579
}
|
class ____ {
@Id
private long id;
}
@Entity(name = "EntityA")
public static
|
BaseEntity
|
java
|
apache__flink
|
flink-core-api/src/main/java/org/apache/flink/api/java/tuple/Tuple3.java
|
{
"start": 1890,
"end": 2235
}
|
class ____ extends Tuple3", then don't use
* instances of Foo in a DataStream<Tuple3> / DataSet<Tuple3>, but declare it as
* DataStream<Foo> / DataSet<Foo>.)
* </ul>
*
* @see Tuple
* @param <T0> The type of field 0
* @param <T1> The type of field 1
* @param <T2> The type of field 2
*/
@Public
public
|
Foo
|
java
|
mapstruct__mapstruct
|
processor/src/main/java/org/mapstruct/ap/internal/model/common/Type.java
|
{
"start": 3009,
"end": 7216
}
|
class ____ extends ModelElement implements Comparable<Type> {
private static final Method SEALED_PERMITTED_SUBCLASSES_METHOD;
static {
Method permittedSubclassesMethod;
try {
permittedSubclassesMethod = TypeElement.class.getMethod( "getPermittedSubclasses" );
}
catch ( NoSuchMethodException e ) {
permittedSubclassesMethod = null;
}
SEALED_PERMITTED_SUBCLASSES_METHOD = permittedSubclassesMethod;
}
private final TypeUtils typeUtils;
private final ElementUtils elementUtils;
private final TypeFactory typeFactory;
private final AccessorNamingUtils accessorNaming;
private final TypeMirror typeMirror;
private final TypeElement typeElement;
private final List<Type> typeParameters;
private final ImplementationType implementationType;
private final Type componentType;
private final Type topLevelType;
private final String packageName;
private final String name;
private final String nameWithTopLevelTypeName;
private final String qualifiedName;
private final boolean isInterface;
private final boolean isEnumType;
private final boolean isIterableType;
private final boolean isCollectionType;
private final boolean isMapType;
private final boolean isVoid;
private final boolean isStream;
private final boolean isLiteral;
private final boolean loggingVerbose;
private final List<String> enumConstants;
private final Map<String, String> toBeImportedTypes;
private final Map<String, String> notToBeImportedTypes;
private Boolean isToBeImported;
private Map<String, ReadAccessor> readAccessors = null;
private Map<String, PresenceCheckAccessor> presenceCheckers = null;
private List<ExecutableElement> allMethods = null;
private List<VariableElement> allFields = null;
private List<Element> recordComponents = null;
private List<Accessor> setters = null;
private List<Accessor> adders = null;
private List<Accessor> alternativeTargetAccessors = null;
private Type boundingBase = null;
private List<Type> boundTypes = null;
private Type boxedEquivalent = null;
private Boolean hasAccessibleConstructor;
private final Filters filters;
//CHECKSTYLE:OFF
public Type(TypeUtils typeUtils, ElementUtils elementUtils, TypeFactory typeFactory,
AccessorNamingUtils accessorNaming,
TypeMirror typeMirror, TypeElement typeElement,
List<Type> typeParameters, ImplementationType implementationType, Type componentType,
String packageName, String name, String qualifiedName,
boolean isInterface, boolean isEnumType, boolean isIterableType,
boolean isCollectionType, boolean isMapType, boolean isStreamType,
Map<String, String> toBeImportedTypes,
Map<String, String> notToBeImportedTypes,
Boolean isToBeImported,
boolean isLiteral, boolean loggingVerbose) {
this.typeUtils = typeUtils;
this.elementUtils = elementUtils;
this.typeFactory = typeFactory;
this.accessorNaming = accessorNaming;
this.typeMirror = typeMirror;
this.typeElement = typeElement;
this.typeParameters = typeParameters;
this.componentType = componentType;
this.implementationType = implementationType;
this.packageName = packageName;
this.name = name;
this.qualifiedName = qualifiedName;
this.isInterface = isInterface;
this.isEnumType = isEnumType;
this.isIterableType = isIterableType;
this.isCollectionType = isCollectionType;
this.isMapType = isMapType;
this.isStream = isStreamType;
this.isVoid = typeMirror.getKind() == TypeKind.VOID;
this.isLiteral = isLiteral;
if ( isEnumType ) {
enumConstants = new ArrayList<>();
for ( Element element : typeElement.getEnclosedElements() ) {
// #162: The check for visibility shouldn't be required, but the Eclipse compiler implementation
// exposes non-
|
Type
|
java
|
quarkusio__quarkus
|
extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ImplicitSimpleKeyCacheTest.java
|
{
"start": 3454,
"end": 3884
}
|
class ____ {
static final String CACHE_NAME = "test-cache";
@CacheResult(cacheName = CACHE_NAME)
public String cachedMethod(Object key) {
return new String();
}
@CacheInvalidate(cacheName = CACHE_NAME)
public void invalidate(Object key) {
}
@CacheInvalidateAll(cacheName = CACHE_NAME)
public void invalidateAll() {
}
}
}
|
CachedService
|
java
|
spring-projects__spring-boot
|
module/spring-boot-activemq/src/test/java/org/springframework/boot/activemq/docker/compose/ActiveMQEnvironmentTests.java
|
{
"start": 913,
"end": 1737
}
|
class ____ {
@Test
void getUserWhenHasNoActiveMqUser() {
ActiveMQEnvironment environment = new ActiveMQEnvironment(Collections.emptyMap());
assertThat(environment.getUser()).isNull();
}
@Test
void getUserWhenHasActiveMqUser() {
ActiveMQEnvironment environment = new ActiveMQEnvironment(Map.of("ACTIVEMQ_USERNAME", "me"));
assertThat(environment.getUser()).isEqualTo("me");
}
@Test
void getPasswordWhenHasNoActiveMqPassword() {
ActiveMQEnvironment environment = new ActiveMQEnvironment(Collections.emptyMap());
assertThat(environment.getPassword()).isNull();
}
@Test
void getPasswordWhenHasActiveMqPassword() {
ActiveMQEnvironment environment = new ActiveMQEnvironment(Map.of("ACTIVEMQ_PASSWORD", "secret"));
assertThat(environment.getPassword()).isEqualTo("secret");
}
}
|
ActiveMQEnvironmentTests
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/ParameterizableViewController.java
|
{
"start": 1435,
"end": 5731
}
|
class ____ extends AbstractController {
private @Nullable Object view;
private @Nullable HttpStatusCode statusCode;
private boolean statusOnly;
public ParameterizableViewController() {
super(false);
setSupportedMethods(HttpMethod.GET.name(), HttpMethod.HEAD.name());
}
/**
* Set a view name for the ModelAndView to return, to be resolved by the
* DispatcherServlet via a ViewResolver. Will override any pre-existing
* view name or View.
*/
public void setViewName(@Nullable String viewName) {
this.view = viewName;
}
/**
* Return the name of the view to delegate to, or {@code null} if using a
* View instance.
*/
public @Nullable String getViewName() {
if (this.view instanceof String viewName) {
if (getStatusCode() != null && getStatusCode().is3xxRedirection()) {
return viewName.startsWith("redirect:") ? viewName : "redirect:" + viewName;
}
else {
return viewName;
}
}
return null;
}
/**
* Set a View object for the ModelAndView to return.
* Will override any pre-existing view name or View.
* @since 4.1
*/
public void setView(View view) {
this.view = view;
}
/**
* Return the View object, or {@code null} if we are using a view name
* to be resolved by the DispatcherServlet via a ViewResolver.
* @since 4.1
*/
public @Nullable View getView() {
return (this.view instanceof View v ? v : null);
}
/**
* Configure the HTTP status code that this controller should set on the
* response.
* <p>When a "redirect:" prefixed view name is configured, there is no need
* to set this property since RedirectView will do that. However, this property
* may still be used to override the 3xx status code of {@code RedirectView}.
* For full control over redirecting provide a {@code RedirectView} instance.
* <p>If the status code is 204 and no view is configured, the request is
* fully handled within the controller.
* @since 4.1
*/
public void setStatusCode(@Nullable HttpStatusCode statusCode) {
this.statusCode = statusCode;
}
/**
* Return the configured HTTP status code or {@code null}.
* @since 4.1
*/
public @Nullable HttpStatusCode getStatusCode() {
return this.statusCode;
}
/**
* The property can be used to indicate the request is considered fully
* handled within the controller and that no view should be used for rendering.
* Useful in combination with {@link #setStatusCode}.
* <p>By default this is set to {@code false}.
* @since 4.1
*/
public void setStatusOnly(boolean statusOnly) {
this.statusOnly = statusOnly;
}
/**
* Whether the request is fully handled within the controller.
*/
public boolean isStatusOnly() {
return this.statusOnly;
}
/**
* Return a ModelAndView object with the specified view name.
* <p>The content of the {@link RequestContextUtils#getInputFlashMap
* "input" FlashMap} is also added to the model.
* @see #getViewName()
*/
@Override
protected @Nullable ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response)
throws Exception {
String viewName = getViewName();
if (getStatusCode() != null) {
if (getStatusCode().is3xxRedirection()) {
request.setAttribute(View.RESPONSE_STATUS_ATTRIBUTE, getStatusCode());
}
else {
response.setStatus(getStatusCode().value());
if (getStatusCode().equals(HttpStatus.NO_CONTENT) && viewName == null) {
return null;
}
}
}
if (isStatusOnly()) {
return null;
}
ModelAndView modelAndView = new ModelAndView();
modelAndView.addAllObjects(RequestContextUtils.getInputFlashMap(request));
if (viewName != null) {
modelAndView.setViewName(viewName);
}
else {
modelAndView.setView(getView());
}
return modelAndView;
}
@Override
public String toString() {
return "ParameterizableViewController [" + formatStatusAndView() + "]";
}
private String formatStatusAndView() {
StringBuilder sb = new StringBuilder();
if (this.statusCode != null) {
sb.append("status=").append(this.statusCode);
}
if (this.view != null) {
sb.append(sb.length() != 0 ? ", " : "");
String viewName = getViewName();
sb.append("view=").append(viewName != null ? "\"" + viewName + "\"" : this.view);
}
return sb.toString();
}
}
|
ParameterizableViewController
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/fastjson/deserializer/issues3796/bean/ObjectP.java
|
{
"start": 143,
"end": 523
}
|
class ____ {
public static final String tsnst = "tsnst";
@JSONField(name = "a")
private long a;
private List<ObjectP_A> b;
public static String getTsnst() {
return tsnst;
}
public long getA() {
return a;
}
public void setA(long a) {
this.a = a;
}
public List<ObjectP_A> getB() {
return b;
}
public void setB(List<ObjectP_A> b) {
this.b = b;
}
}
|
ObjectP
|
java
|
FasterXML__jackson-core
|
src/main/java/tools/jackson/core/ObjectReadContext.java
|
{
"start": 3197,
"end": 5146
}
|
class ____ implements ObjectReadContext {
protected static Base EMPTY_CONTEXT = new Base();
// // // Config access methods
@Override
public FormatSchema getSchema() { return null; }
@Override
public StreamReadConstraints streamReadConstraints() {
return StreamReadConstraints.defaults();
}
@Override
public int getStreamReadFeatures(int defaults) {
return defaults;
}
@Override
public int getFormatReadFeatures(int defaults) {
return defaults;
}
@Override
public TokenStreamFactory tokenStreamFactory() {
return _reportUnsupportedOperation();
}
// // // Databind, trees
@Override
public ObjectTreeNode createObjectNode() {
return _reportUnsupportedOperation();
}
@Override
public ArrayTreeNode createArrayNode() {
return _reportUnsupportedOperation();
}
// // // Databind integration, trees
@Override
public <T extends TreeNode> T readTree(JsonParser p) {
return _reportUnsupportedOperation();
}
// // // Databind integration, other values
@Override
public <T> T readValue(JsonParser p, Class<T> valueType) {
return _reportUnsupportedOperation();
}
@Override
public <T> T readValue(JsonParser p, TypeReference<T> valueTypeRef) {
return _reportUnsupportedOperation();
}
@Override
public <T> T readValue(JsonParser p, ResolvedType type) {
return _reportUnsupportedOperation();
}
// // // Helper methods
protected <T> T _reportUnsupportedOperation() {
throw new UnsupportedOperationException("Operation not supported by `ObjectReadContext` of type "+getClass().getName());
}
}
}
|
Base
|
java
|
apache__camel
|
components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/csv/BindyDoNotRemoveQuotesCsvUnmarshallTest.java
|
{
"start": 3025,
"end": 4029
}
|
class ____ {
@DataField(pos = 1)
private String name;
@DataField(pos = 2)
private String description1;
@DataField(pos = 3)
private String description2;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription1() {
return description1;
}
public void setDescription1(String description1) {
this.description1 = description1;
}
public String getDescription2() {
return description2;
}
public void setDescription2(String description2) {
this.description2 = description2;
}
@Override
public String toString() {
return "Product{" + "name='" + name + '\'' + ", description1='" + description1 + '\'' + ", description2='"
+ description2 + '\'' + '}';
}
}
}
|
Product
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java
|
{
"start": 891,
"end": 2122
}
|
class ____ extends Expression {
// TODO: Functions supporting distinct should add a dedicated constructor Location, List<Expression>, boolean
protected Function(Source source, List<Expression> children) {
super(source, children);
}
public final List<Expression> arguments() {
return children();
}
public String functionName() {
return getClass().getSimpleName().toUpperCase(Locale.ROOT);
}
@Override
public Nullability nullable() {
return Expressions.nullable(children());
}
@Override
public int hashCode() {
return Objects.hash(getClass(), children());
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
Function other = (Function) obj;
return Objects.equals(children(), other.children());
}
@Override
public String nodeString() {
StringJoiner sj = new StringJoiner(",", functionName() + "(", ")");
for (Expression ex : arguments()) {
sj.add(ex.nodeString());
}
return sj.toString();
}
}
|
Function
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/HidingFieldTest.java
|
{
"start": 3925,
"end": 4032
}
|
class ____ {
public int varOne;
}
// subclass with member variables of different names
static
|
ClassA
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/test/java/org/springframework/web/reactive/resource/PathResourceResolverTests.java
|
{
"start": 1201,
"end": 6054
}
|
class ____ {
private static final Duration TIMEOUT = Duration.ofSeconds(5);
private final PathResourceResolver resolver = new PathResourceResolver();
@Test
void resolveFromClasspath() throws IOException {
Resource location = new ClassPathResource("test/", PathResourceResolver.class);
String path = "bar.css";
List<Resource> locations = Collections.singletonList(location);
Resource actual = this.resolver.resolveResource(null, path, locations, null).block(TIMEOUT);
assertThat(actual).isEqualTo(location.createRelative(path));
}
@Test
void resolveFromClasspathRoot() {
Resource location = new ClassPathResource("/");
String path = "org/springframework/web/reactive/resource/test/bar.css";
List<Resource> locations = Collections.singletonList(location);
Resource actual = this.resolver.resolveResource(null, path, locations, null).block(TIMEOUT);
assertThat(actual).isNotNull();
}
@Test // gh-22272
public void resolveWithEncodedPath() throws IOException {
Resource classpathLocation = new ClassPathResource("test/", PathResourceResolver.class);
testWithEncodedPath(classpathLocation);
testWithEncodedPath(new FileUrlResource(classpathLocation.getURL()));
}
private void testWithEncodedPath(Resource location) throws IOException {
String path = "foo%20foo.txt";
List<Resource> locations = Collections.singletonList(location);
Resource actual = this.resolver.resolveResource(null, path, locations, null).block(TIMEOUT);
assertThat(actual).isNotNull();
assertThat(actual.getFile()).hasName("foo foo.txt");
}
@Test
void checkResource() throws IOException {
Resource location = new ClassPathResource("test/", PathResourceResolver.class);
testCheckResource(location, "../testsecret/secret.txt");
testCheckResource(location, "test/../../testsecret/secret.txt");
location = new UrlResource(getClass().getResource("./test/"));
String secretPath = new UrlResource(getClass().getResource("testsecret/secret.txt")).getURL().getPath();
testCheckResource(location, "file:" + secretPath);
testCheckResource(location, "/file:" + secretPath);
testCheckResource(location, "/" + secretPath);
testCheckResource(location, "////../.." + secretPath);
testCheckResource(location, "/%2E%2E/testsecret/secret.txt");
testCheckResource(location, "/%2e%2e/testsecret/secret.txt");
testCheckResource(location, " " + secretPath);
testCheckResource(location, "/ " + secretPath);
testCheckResource(location, "url:" + secretPath);
}
private void testCheckResource(Resource location, String requestPath) {
List<Resource> locations = Collections.singletonList(location);
Resource actual = this.resolver.resolveResource(null, requestPath, locations, null).block(TIMEOUT);
assertThat(actual).isNull();
}
@Test // gh-23463
public void ignoreInvalidEscapeSequence() throws IOException {
UrlResource location = new UrlResource(getClass().getResource("./test/"));
Resource resource = new UrlResource(location.getURL() + "test%file.txt");
assertThat(this.resolver.checkResource(resource, location)).isTrue();
resource = location.createRelative("test%file.txt");
assertThat(this.resolver.checkResource(resource, location)).isTrue();
}
@Test
void checkResourceWithAllowedLocations() {
this.resolver.setAllowedLocations(
new ClassPathResource("test/", PathResourceResolver.class),
new ClassPathResource("testalternatepath/", PathResourceResolver.class)
);
Resource location = getResource("main.css");
String actual = this.resolver.resolveUrlPath("../testalternatepath/bar.css",
Collections.singletonList(location), null).block(TIMEOUT);
assertThat(actual).isEqualTo("../testalternatepath/bar.css");
}
@Test // SPR-12624
public void checkRelativeLocation() throws Exception {
String location= new UrlResource(getClass().getResource("./test/")).getURL().toExternalForm();
location = location.replace("/test/org/springframework","/test/org/../org/springframework");
Mono<Resource> resourceMono = this.resolver.resolveResource(
null, "main.css", Collections.singletonList(new UrlResource(location)), null);
assertThat(resourceMono.block(TIMEOUT)).isNotNull();
}
@Test // SPR-12747
public void checkFileLocation() throws Exception {
Resource resource = getResource("main.css");
assertThat(this.resolver.checkResource(resource, resource)).isTrue();
}
@Test // SPR-13241
public void resolvePathRootResource() {
Resource webjarsLocation = new ClassPathResource("/META-INF/resources/webjars/", PathResourceResolver.class);
String path = this.resolver.resolveUrlPathInternal(
"", Collections.singletonList(webjarsLocation), null).block(TIMEOUT);
assertThat(path).isNull();
}
private Resource getResource(String filePath) {
return new ClassPathResource("test/" + filePath, getClass());
}
}
|
PathResourceResolverTests
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/test/websocket/WebSocketIntegrationTests.java
|
{
"start": 4608,
"end": 11223
}
|
class ____ {
private static final Duration TIMEOUT = Duration.ofMillis(5000);
private static final Log logger = LogFactory.getLog(WebSocketIntegrationTests.class);
protected int serverPort;
private WebSocketClient client;
private HttpServer server;
private ConfigurableApplicationContext gatewayContext;
private int gatewayPort;
private static final Sinks.One<CloseStatus> serverCloseStatusSink = Sinks.one();
@BeforeEach
public void setup() throws Exception {
this.client = new ReactorNettyWebSocketClient();
this.server = new ReactorHttpServer();
this.server.setHandler(createHttpHandler());
this.server.afterPropertiesSet();
this.server.start();
// Set dynamically chosen port
this.serverPort = this.server.getPort();
if (this.client instanceof Lifecycle) {
((Lifecycle) this.client).start();
}
this.gatewayContext = new SpringApplicationBuilder(GatewayConfig.class)
.properties("ws.server.port:" + this.serverPort, "server.port=0", "spring.jmx.enabled=false")
.run();
ConfigurableEnvironment env = this.gatewayContext.getBean(ConfigurableEnvironment.class);
this.gatewayPort = Integer.valueOf(env.getProperty("local.server.port"));
}
@AfterEach
public void stop() throws Exception {
if (this.client instanceof Lifecycle) {
((Lifecycle) this.client).stop();
}
this.server.stop();
if (this.gatewayContext != null) {
this.gatewayContext.stop();
}
}
private HttpHandler createHttpHandler() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.register(WebSocketTestConfig.class);
context.refresh();
return WebHttpHandlerBuilder.applicationContext(context).build();
}
protected URI getUrl(String path) throws URISyntaxException {
// return new URI("ws://localhost:" + this.serverPort + path);
return new URI("ws://localhost:" + this.gatewayPort + path);
}
protected URI getHttpUrl(String path) throws URISyntaxException {
return new URI("http://localhost:" + this.gatewayPort + path);
}
@Test
public void echo() throws Exception {
int count = 100;
Flux<String> input = Flux.range(1, count).map(index -> "msg-" + index);
AtomicReference<List<String>> actualRef = new AtomicReference<>();
this.client
.execute(getUrl("/echo"),
session -> session.send(input.map(session::textMessage))
.thenMany(session.receive().take(count).map(WebSocketMessage::getPayloadAsText))
.collectList()
.doOnNext(actualRef::set)
.then())
.block(TIMEOUT);
assertThat(actualRef.get()).isNotNull();
assertThat(actualRef.get()).isEqualTo(input.collectList().block());
}
@Test
public void echoForHttp() throws Exception {
int count = 100;
Flux<String> input = Flux.range(1, count).map(index -> "msg-" + index);
AtomicReference<List<String>> actualRef = new AtomicReference<>();
this.client.execute(getHttpUrl("/echoForHttp"), session -> {
logger.debug("Starting to send messages");
return session.send(input.doOnNext(s -> logger.debug("outbound " + s)).map(session::textMessage))
.thenMany(session.receive().take(count).map(WebSocketMessage::getPayloadAsText))
.collectList()
.doOnNext(actualRef::set)
.then();
}).block(TIMEOUT);
assertThat(actualRef.get()).isNotNull();
assertThat(actualRef.get()).isEqualTo(input.collectList().block());
}
@Test
public void subProtocol() throws Exception {
String protocol = "echo-v1";
String protocol2 = "echo-v2";
AtomicReference<HandshakeInfo> infoRef = new AtomicReference<>();
AtomicReference<Object> protocolRef = new AtomicReference<>();
this.client.execute(getUrl("/sub-protocol"), new WebSocketHandler() {
@Override
public List<String> getSubProtocols() {
return Arrays.asList(protocol, protocol2);
}
@Override
public Mono<Void> handle(WebSocketSession session) {
infoRef.set(session.getHandshakeInfo());
return session.receive()
.map(WebSocketMessage::getPayloadAsText)
.doOnNext(protocolRef::set)
.doOnError(protocolRef::set)
.then();
}
}).block(TIMEOUT);
HandshakeInfo info = infoRef.get();
assertThat(info.getHeaders().getFirst("Upgrade")).isEqualToIgnoringCase("websocket");
assertThat(info.getHeaders().getFirst("Sec-WebSocket-Protocol")).isEqualTo(protocol);
assertThat(info.getSubProtocol()).as("Wrong protocol accepted").isEqualTo(protocol);
assertThat(protocolRef.get()).as("Wrong protocol detected on the server side").isEqualTo(protocol);
}
@Test
public void customHeader() throws Exception {
HttpHeaders headers = new HttpHeaders();
headers.add("my-header", "my-value");
AtomicReference<Object> headerRef = new AtomicReference<>();
this.client
.execute(getUrl("/custom-header"), headers,
session -> session.receive()
.map(WebSocketMessage::getPayloadAsText)
.doOnNext(headerRef::set)
.doOnError(headerRef::set)
.then())
.block(TIMEOUT);
assertThat(headerRef.get()).isEqualTo("my-header:my-value");
}
@Test
public void serverClosing() throws Exception {
AtomicReference<Mono<CloseStatus>> closeStatus = new AtomicReference<>();
this.client.execute(getUrl("/server-close"), session -> {
logger.debug("Starting..");
closeStatus.set(session.closeStatus());
return session.receive().doOnNext(s -> logger.debug("inbound " + s)).then().doFinally(signalType -> {
logger.debug("Completed with: " + signalType);
});
}).block(Duration.ofMillis(5000));
assertThat(closeStatus.get().block(Duration.ofMillis(5000)))
.isEqualTo(CloseStatus.create(4999, "server-close"));
}
@Test
public void clientClosing() throws Exception {
this.client.execute(getUrl("/client-close"), session -> session.close(CloseStatus.create(4999, "client-close")))
.block(Duration.ofMillis(5000));
assertThat(serverCloseStatusSink.asMono().block(Duration.ofMillis(5000)))
.isEqualTo(CloseStatus.create(4999, "client-close"));
}
@Disabled
@Test
void cookie() throws Exception {
AtomicReference<String> cookie = new AtomicReference<>();
AtomicReference<Object> receivedCookieRef = new AtomicReference<>();
this.client.execute(getUrl("/cookie"), session -> {
cookie.set(session.getHandshakeInfo().getHeaders().getFirst("Set-Cookie"));
return session.receive()
.map(WebSocketMessage::getPayloadAsText)
.doOnNext(receivedCookieRef::set)
.doOnError(receivedCookieRef::set)
.then();
}).block(TIMEOUT);
assertThat(receivedCookieRef.get()).isEqualTo("cookie");
assertThat(cookie.get()).isEqualTo("project=spring");
}
@Configuration(proxyBeanMethods = false)
static
|
WebSocketIntegrationTests
|
java
|
elastic__elasticsearch
|
modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharMatcher.java
|
{
"start": 649,
"end": 1111
}
|
class ____ implements CharMatcher {
public static CharMatcher of(byte unicodeCategory) {
return new ByUnicodeCategory(unicodeCategory);
}
private final byte unicodeType;
ByUnicodeCategory(byte unicodeType) {
this.unicodeType = unicodeType;
}
@Override
public boolean isTokenChar(int c) {
return Character.getType(c) == unicodeType;
}
}
|
ByUnicodeCategory
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/authorization/OAuth2AuthorizationCodeGrantTests.java
|
{
"start": 62219,
"end": 64318
}
|
class ____ {
@Bean
OAuth2AuthorizationService authorizationService(JdbcOperations jdbcOperations,
RegisteredClientRepository registeredClientRepository) {
return new JdbcOAuth2AuthorizationService(jdbcOperations, registeredClientRepository);
}
@Bean
OAuth2AuthorizationConsentService authorizationConsentService(JdbcOperations jdbcOperations,
RegisteredClientRepository registeredClientRepository) {
return new JdbcOAuth2AuthorizationConsentService(jdbcOperations, registeredClientRepository);
}
@Bean
RegisteredClientRepository registeredClientRepository(JdbcOperations jdbcOperations) {
JdbcRegisteredClientRepository jdbcRegisteredClientRepository = new JdbcRegisteredClientRepository(
jdbcOperations);
RegisteredClientParametersMapper registeredClientParametersMapper = new RegisteredClientParametersMapper();
jdbcRegisteredClientRepository.setRegisteredClientParametersMapper(registeredClientParametersMapper);
return jdbcRegisteredClientRepository;
}
@Bean
JdbcOperations jdbcOperations() {
return new JdbcTemplate(db);
}
@Bean
JWKSource<SecurityContext> jwkSource() {
return jwkSource;
}
@Bean
JwtDecoder jwtDecoder(JWKSource<SecurityContext> jwkSource) {
return OAuth2AuthorizationServerConfiguration.jwtDecoder(jwkSource);
}
@Bean
OAuth2TokenCustomizer<JwtEncodingContext> jwtCustomizer() {
return (context) -> {
if (AuthorizationGrantType.AUTHORIZATION_CODE.equals(context.getAuthorizationGrantType())
&& OAuth2TokenType.ACCESS_TOKEN.equals(context.getTokenType())) {
Authentication principal = context.getPrincipal();
Set<String> authorities = new HashSet<>();
for (GrantedAuthority authority : principal.getAuthorities()) {
authorities.add(authority.getAuthority());
}
context.getClaims().claim(AUTHORITIES_CLAIM, authorities);
}
};
}
@Bean
PasswordEncoder passwordEncoder() {
return NoOpPasswordEncoder.getInstance();
}
}
@EnableWebSecurity
@Import(OAuth2AuthorizationServerConfiguration.class)
static
|
AuthorizationServerConfiguration
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ShouldHaveSameTime.java
|
{
"start": 829,
"end": 1623
}
|
class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldHaveSameTime}</code>.
*
* @param actual the actual value in the failed assertion.
* @param expected the expected timestamp.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldHaveSameTime(Date actual, Date expected) {
return new ShouldHaveSameTime(actual, expected);
}
private ShouldHaveSameTime(Date actual, Date expected) {
super("%nExpecting%n" +
" %s%n" +
"to have the same time as:%n" +
" %s%n" +
"but actual time is%n" +
" %s%n" +
"and expected was:%n" +
" %s",
actual, expected, actual.getTime(), expected.getTime());
}
}
|
ShouldHaveSameTime
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/MappedSuperclassExtendsEntityTest.java
|
{
"start": 3580,
"end": 3686
}
|
class ____ extends Parent {
}
@Entity(name = "Child2")
@DiscriminatorValue("CHILD2")
public static
|
Child1
|
java
|
apache__flink
|
flink-formats/flink-avro/src/main/java/org/apache/flink/formats/avro/typeutils/AvroSchemaConverter.java
|
{
"start": 4025,
"end": 29985
}
|
class ____ not be null.");
// determine schema to retrieve deterministic field order
final Schema schema = SpecificData.get().getSchema(avroClass);
return (TypeInformation<Row>) convertToTypeInfo(schema, true);
}
/**
* Converts an Avro schema string into a nested row structure with deterministic field order and
* data types that are compatible with Flink's Table & SQL API.
*
* @param avroSchemaString Avro schema definition string
* @return type information matching the schema
*/
@SuppressWarnings("unchecked")
public static <T> TypeInformation<T> convertToTypeInfo(String avroSchemaString) {
return convertToTypeInfo(avroSchemaString, true);
}
/**
* Converts an Avro schema string into a nested row structure with deterministic field order and
* data types that are compatible with Flink's Table & SQL API.
*
* @param avroSchemaString Avro schema definition string
* @param legacyTimestampMapping legacy mapping of timestamp types
* @return type information matching the schema
*/
@SuppressWarnings("unchecked")
public static <T> TypeInformation<T> convertToTypeInfo(
String avroSchemaString, boolean legacyTimestampMapping) {
Preconditions.checkNotNull(avroSchemaString, "Avro schema must not be null.");
final Schema schema;
try {
schema = new Schema.Parser().parse(avroSchemaString);
} catch (SchemaParseException e) {
throw new IllegalArgumentException("Could not parse Avro schema string.", e);
}
return (TypeInformation<T>) convertToTypeInfo(schema, legacyTimestampMapping);
}
private static TypeInformation<?> convertToTypeInfo(
Schema schema, boolean legacyTimestampMapping) {
switch (schema.getType()) {
case RECORD:
final List<Schema.Field> fields = schema.getFields();
final TypeInformation<?>[] types = new TypeInformation<?>[fields.size()];
final String[] names = new String[fields.size()];
for (int i = 0; i < fields.size(); i++) {
final Schema.Field field = fields.get(i);
types[i] = convertToTypeInfo(field.schema(), legacyTimestampMapping);
names[i] = field.name();
}
return Types.ROW_NAMED(names, types);
case ENUM:
return Types.STRING;
case ARRAY:
// result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings
return Types.OBJECT_ARRAY(
convertToTypeInfo(schema.getElementType(), legacyTimestampMapping));
case MAP:
return Types.MAP(
Types.STRING,
convertToTypeInfo(schema.getValueType(), legacyTimestampMapping));
case UNION:
final Schema actualSchema;
if (schema.getTypes().size() == 2
&& schema.getTypes().get(0).getType() == Schema.Type.NULL) {
actualSchema = schema.getTypes().get(1);
} else if (schema.getTypes().size() == 2
&& schema.getTypes().get(1).getType() == Schema.Type.NULL) {
actualSchema = schema.getTypes().get(0);
} else if (schema.getTypes().size() == 1) {
actualSchema = schema.getTypes().get(0);
} else {
// use Kryo for serialization
return Types.GENERIC(Object.class);
}
return convertToTypeInfo(actualSchema, legacyTimestampMapping);
case FIXED:
// logical decimal type
if (schema.getLogicalType() instanceof LogicalTypes.Decimal) {
return Types.BIG_DEC;
}
// convert fixed size binary data to primitive byte arrays
return Types.PRIMITIVE_ARRAY(Types.BYTE);
case STRING:
// convert Avro's Utf8/CharSequence to String
return Types.STRING;
case BYTES:
// logical decimal type
if (schema.getLogicalType() instanceof LogicalTypes.Decimal) {
return Types.BIG_DEC;
}
return Types.PRIMITIVE_ARRAY(Types.BYTE);
case INT:
// logical date and time type
final org.apache.avro.LogicalType logicalType = schema.getLogicalType();
if (logicalType == LogicalTypes.date()) {
return Types.SQL_DATE;
} else if (logicalType == LogicalTypes.timeMillis()) {
return Types.SQL_TIME;
}
return Types.INT;
case LONG:
if (legacyTimestampMapping) {
if (schema.getLogicalType() == LogicalTypes.timestampMillis()
|| schema.getLogicalType() == LogicalTypes.timestampMicros()) {
return Types.SQL_TIMESTAMP;
} else if (schema.getLogicalType() == LogicalTypes.timeMicros()
|| schema.getLogicalType() == LogicalTypes.timeMillis()) {
return Types.SQL_TIME;
}
} else {
// Avro logical timestamp types to Flink DataStream timestamp types
if (schema.getLogicalType() == LogicalTypes.timestampMillis()
|| schema.getLogicalType() == LogicalTypes.timestampMicros()) {
return Types.INSTANT;
} else if (schema.getLogicalType() == LogicalTypes.localTimestampMillis()
|| schema.getLogicalType() == LogicalTypes.localTimestampMicros()) {
return Types.LOCAL_DATE_TIME;
} else if (schema.getLogicalType() == LogicalTypes.timeMicros()
|| schema.getLogicalType() == LogicalTypes.timeMillis()) {
return Types.SQL_TIME;
}
}
return Types.LONG;
case FLOAT:
return Types.FLOAT;
case DOUBLE:
return Types.DOUBLE;
case BOOLEAN:
return Types.BOOLEAN;
case NULL:
return Types.VOID;
}
throw new IllegalArgumentException("Unsupported Avro type '" + schema.getType() + "'.");
}
/**
* Converts an Avro schema string into a nested row structure with deterministic field order and
* data types that are compatible with Flink's Table & SQL API.
*
* @param avroSchemaString Avro schema definition string
* @return data type matching the schema
*/
public static DataType convertToDataType(String avroSchemaString) {
return convertToDataType(avroSchemaString, true);
}
/**
* Converts an Avro schema string into a nested row structure with deterministic field order and
* data types that are compatible with Flink's Table & SQL API.
*
* @param avroSchemaString Avro schema definition string
* @param legacyTimestampMapping legacy mapping of local timestamps
* @return data type matching the schema
*/
public static DataType convertToDataType(
String avroSchemaString, boolean legacyTimestampMapping) {
Preconditions.checkNotNull(avroSchemaString, "Avro schema must not be null.");
final Schema schema;
try {
schema = new Schema.Parser().parse(avroSchemaString);
} catch (SchemaParseException e) {
throw new IllegalArgumentException("Could not parse Avro schema string.", e);
}
return convertToDataType(schema, legacyTimestampMapping);
}
private static DataType convertToDataType(Schema schema, boolean legacyMapping) {
switch (schema.getType()) {
case RECORD:
final List<Schema.Field> schemaFields = schema.getFields();
final DataTypes.Field[] fields = new DataTypes.Field[schemaFields.size()];
for (int i = 0; i < schemaFields.size(); i++) {
final Schema.Field field = schemaFields.get(i);
fields[i] =
DataTypes.FIELD(
field.name(), convertToDataType(field.schema(), legacyMapping));
}
return DataTypes.ROW(fields).notNull();
case ENUM:
return DataTypes.STRING().notNull();
case ARRAY:
return DataTypes.ARRAY(convertToDataType(schema.getElementType(), legacyMapping))
.notNull();
case MAP:
return DataTypes.MAP(
DataTypes.STRING().notNull(),
convertToDataType(schema.getValueType(), legacyMapping))
.notNull();
case UNION:
final Schema actualSchema;
final boolean nullable;
if (schema.getTypes().size() == 2
&& schema.getTypes().get(0).getType() == Schema.Type.NULL) {
actualSchema = schema.getTypes().get(1);
nullable = true;
} else if (schema.getTypes().size() == 2
&& schema.getTypes().get(1).getType() == Schema.Type.NULL) {
actualSchema = schema.getTypes().get(0);
nullable = true;
} else if (schema.getTypes().size() == 1) {
actualSchema = schema.getTypes().get(0);
nullable = false;
} else {
// use Kryo for serialization
return new AtomicDataType(
new TypeInformationRawType<>(false, Types.GENERIC(Object.class)));
}
DataType converted = convertToDataType(actualSchema, legacyMapping);
return nullable ? converted.nullable() : converted;
case FIXED:
// logical decimal type
if (schema.getLogicalType() instanceof LogicalTypes.Decimal) {
final LogicalTypes.Decimal decimalType =
(LogicalTypes.Decimal) schema.getLogicalType();
return DataTypes.DECIMAL(decimalType.getPrecision(), decimalType.getScale())
.notNull();
}
// convert fixed size binary data to primitive byte arrays
return DataTypes.VARBINARY(schema.getFixedSize()).notNull();
case STRING:
// convert Avro's Utf8/CharSequence to String
return DataTypes.STRING().notNull();
case BYTES:
// logical decimal type
if (schema.getLogicalType() instanceof LogicalTypes.Decimal) {
final LogicalTypes.Decimal decimalType =
(LogicalTypes.Decimal) schema.getLogicalType();
return DataTypes.DECIMAL(decimalType.getPrecision(), decimalType.getScale())
.notNull();
}
return DataTypes.BYTES().notNull();
case INT:
// logical date and time type
final org.apache.avro.LogicalType logicalType = schema.getLogicalType();
if (logicalType == LogicalTypes.date()) {
return DataTypes.DATE().notNull();
} else if (logicalType == LogicalTypes.timeMillis()) {
return DataTypes.TIME(3).notNull();
}
return DataTypes.INT().notNull();
case LONG:
if (legacyMapping) {
// Avro logical timestamp types to Flink SQL timestamp types
if (schema.getLogicalType() == LogicalTypes.timestampMillis()) {
return DataTypes.TIMESTAMP(3).notNull();
} else if (schema.getLogicalType() == LogicalTypes.timestampMicros()) {
return DataTypes.TIMESTAMP(6).notNull();
} else if (schema.getLogicalType() == LogicalTypes.timeMillis()) {
return DataTypes.TIME(3).notNull();
} else if (schema.getLogicalType() == LogicalTypes.timeMicros()) {
return DataTypes.TIME(6).notNull();
}
} else {
// Avro logical timestamp types to Flink SQL timestamp types
if (schema.getLogicalType() == LogicalTypes.timestampMillis()) {
return DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3).notNull();
} else if (schema.getLogicalType() == LogicalTypes.timestampMicros()) {
return DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(6).notNull();
} else if (schema.getLogicalType() == LogicalTypes.timeMillis()) {
return DataTypes.TIME(3).notNull();
} else if (schema.getLogicalType() == LogicalTypes.timeMicros()) {
return DataTypes.TIME(6).notNull();
} else if (schema.getLogicalType() == LogicalTypes.localTimestampMillis()) {
return DataTypes.TIMESTAMP(3).notNull();
} else if (schema.getLogicalType() == LogicalTypes.localTimestampMicros()) {
return DataTypes.TIMESTAMP(6).notNull();
}
}
return DataTypes.BIGINT().notNull();
case FLOAT:
return DataTypes.FLOAT().notNull();
case DOUBLE:
return DataTypes.DOUBLE().notNull();
case BOOLEAN:
return DataTypes.BOOLEAN().notNull();
case NULL:
return DataTypes.NULL();
}
throw new IllegalArgumentException("Unsupported Avro type '" + schema.getType() + "'.");
}
/**
* Converts Flink SQL {@link LogicalType} (can be nested) into an Avro schema.
*
* <p>Use "org.apache.flink.avro.generated.record" as the type name.
*
* @param schema the schema type, usually it should be the top level record type, e.g. not a
* nested type
* @return Avro's {@link Schema} matching this logical type.
*/
public static Schema convertToSchema(LogicalType schema) {
return convertToSchema(schema, true);
}
/**
* Converts Flink SQL {@link LogicalType} (can be nested) into an Avro schema.
*
* <p>Use "org.apache.flink.avro.generated.record" as the type name.
*
* @param schema the schema type, usually it should be the top level record type, e.g. not a
* nested type
* @param legacyTimestampMapping whether to use the legacy timestamp mapping
* @return Avro's {@link Schema} matching this logical type.
*/
public static Schema convertToSchema(LogicalType schema, boolean legacyTimestampMapping) {
return convertToSchema(
schema, "org.apache.flink.avro.generated.record", legacyTimestampMapping);
}
/**
* Converts Flink SQL {@link LogicalType} (can be nested) into an Avro schema.
*
* <p>The "{rowName}_" is used as the nested row type name prefix in order to generate the right
* schema. Nested record type that only differs with type name is still compatible.
*
* @param logicalType logical type
* @param rowName the record name
* @return Avro's {@link Schema} matching this logical type.
*/
public static Schema convertToSchema(LogicalType logicalType, String rowName) {
return convertToSchema(logicalType, rowName, true);
}
/**
* Converts Flink SQL {@link LogicalType} (can be nested) into an Avro schema.
*
* <p>The "{rowName}_" is used as the nested row type name prefix in order to generate the right
* schema. Nested record type that only differs with type name is still compatible.
*
* @param logicalType logical type
* @param rowName the record name
* @param legacyTimestampMapping whether to use legal timestamp mapping
* @return Avro's {@link Schema} matching this logical type.
*/
public static Schema convertToSchema(
LogicalType logicalType, String rowName, boolean legacyTimestampMapping) {
int precision;
boolean nullable = logicalType.isNullable();
switch (logicalType.getTypeRoot()) {
case NULL:
return SchemaBuilder.builder().nullType();
case BOOLEAN:
Schema bool = SchemaBuilder.builder().booleanType();
return nullable ? nullableSchema(bool) : bool;
case TINYINT:
case SMALLINT:
case INTEGER:
Schema integer = SchemaBuilder.builder().intType();
return nullable ? nullableSchema(integer) : integer;
case BIGINT:
Schema bigint = SchemaBuilder.builder().longType();
return nullable ? nullableSchema(bigint) : bigint;
case FLOAT:
Schema f = SchemaBuilder.builder().floatType();
return nullable ? nullableSchema(f) : f;
case DOUBLE:
Schema d = SchemaBuilder.builder().doubleType();
return nullable ? nullableSchema(d) : d;
case CHAR:
case VARCHAR:
Schema str = SchemaBuilder.builder().stringType();
return nullable ? nullableSchema(str) : str;
case BINARY:
case VARBINARY:
Schema binary = SchemaBuilder.builder().bytesType();
return nullable ? nullableSchema(binary) : binary;
case TIMESTAMP_WITHOUT_TIME_ZONE:
// use long to represents Timestamp
final TimestampType timestampType = (TimestampType) logicalType;
precision = timestampType.getPrecision();
org.apache.avro.LogicalType avroLogicalType;
if (legacyTimestampMapping) {
if (precision <= 3) {
avroLogicalType = LogicalTypes.timestampMillis();
} else {
throw new IllegalArgumentException(
"Avro does not support TIMESTAMP type "
+ "with precision: "
+ precision
+ ", it only supports precision less than 3.");
}
} else {
if (precision <= 3) {
avroLogicalType = LogicalTypes.localTimestampMillis();
} else if (precision <= 6) {
avroLogicalType = LogicalTypes.localTimestampMicros();
} else {
throw new IllegalArgumentException(
"Avro does not support LOCAL TIMESTAMP type "
+ "with precision: "
+ precision
+ ", it only supports precision less than 6.");
}
}
Schema timestamp = avroLogicalType.addToSchema(SchemaBuilder.builder().longType());
return nullable ? nullableSchema(timestamp) : timestamp;
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
if (legacyTimestampMapping) {
throw new UnsupportedOperationException(
"Unsupported to derive Schema for type: " + logicalType);
} else {
final LocalZonedTimestampType localZonedTimestampType =
(LocalZonedTimestampType) logicalType;
precision = localZonedTimestampType.getPrecision();
if (precision <= 3) {
avroLogicalType = LogicalTypes.timestampMillis();
} else if (precision <= 6) {
avroLogicalType = LogicalTypes.timestampMicros();
} else {
throw new IllegalArgumentException(
"Avro does not support TIMESTAMP type "
+ "with precision: "
+ precision
+ ", it only supports precision less than 6.");
}
timestamp = avroLogicalType.addToSchema(SchemaBuilder.builder().longType());
return nullable ? nullableSchema(timestamp) : timestamp;
}
case DATE:
// use int to represents Date
Schema date = LogicalTypes.date().addToSchema(SchemaBuilder.builder().intType());
return nullable ? nullableSchema(date) : date;
case TIME_WITHOUT_TIME_ZONE:
precision = ((TimeType) logicalType).getPrecision();
if (precision > 3) {
throw new IllegalArgumentException(
"Avro does not support TIME type with precision: "
+ precision
+ ", it only supports precision less than 3.");
}
// use int to represents Time, we only support millisecond when deserialization
Schema time =
LogicalTypes.timeMillis().addToSchema(SchemaBuilder.builder().intType());
return nullable ? nullableSchema(time) : time;
case DECIMAL:
DecimalType decimalType = (DecimalType) logicalType;
// store BigDecimal as byte[]
Schema decimal =
LogicalTypes.decimal(decimalType.getPrecision(), decimalType.getScale())
.addToSchema(SchemaBuilder.builder().bytesType());
return nullable ? nullableSchema(decimal) : decimal;
case ROW:
RowType rowType = (RowType) logicalType;
List<String> fieldNames = rowType.getFieldNames();
// we have to make sure the record name is different in a Schema
SchemaBuilder.FieldAssembler<Schema> builder =
SchemaBuilder.builder().record(rowName).fields();
for (int i = 0; i < rowType.getFieldCount(); i++) {
String fieldName = fieldNames.get(i);
LogicalType fieldType = rowType.getTypeAt(i);
SchemaBuilder.GenericDefault<Schema> fieldBuilder =
builder.name(fieldName)
.type(
convertToSchema(
fieldType,
rowName + "_" + fieldName,
legacyTimestampMapping));
if (fieldType.isNullable()) {
builder = fieldBuilder.withDefault(null);
} else {
builder = fieldBuilder.noDefault();
}
}
Schema record = builder.endRecord();
return nullable ? nullableSchema(record) : record;
case MULTISET:
case MAP:
Schema map =
SchemaBuilder.builder()
.map()
.values(
convertToSchema(
extractValueTypeToAvroMap(logicalType), rowName));
return nullable ? nullableSchema(map) : map;
case ARRAY:
ArrayType arrayType = (ArrayType) logicalType;
Schema array =
SchemaBuilder.builder()
.array()
.items(convertToSchema(arrayType.getElementType(), rowName));
return nullable ? nullableSchema(array) : array;
case RAW:
default:
throw new UnsupportedOperationException(
"Unsupported to derive Schema for type: " + logicalType);
}
}
public static LogicalType extractValueTypeToAvroMap(LogicalType type) {
LogicalType keyType;
LogicalType valueType;
if (type instanceof MapType) {
MapType mapType = (MapType) type;
keyType = mapType.getKeyType();
valueType = mapType.getValueType();
} else {
MultisetType multisetType = (MultisetType) type;
keyType = multisetType.getElementType();
valueType = new IntType();
}
if (!keyType.is(LogicalTypeFamily.CHARACTER_STRING)) {
throw new UnsupportedOperationException(
"Avro format doesn't support non-string as key type of map. "
+ "The key type is: "
+ keyType.asSummaryString());
}
return valueType;
}
/** Returns schema with nullable true. */
private static Schema nullableSchema(Schema schema) {
return schema.isNullable()
? schema
: Schema.createUnion(SchemaBuilder.builder().nullType(), schema);
}
}
|
must
|
java
|
apache__camel
|
components/camel-jq/src/test/java/org/apache/camel/language/jq/JqFilterGETest.java
|
{
"start": 1007,
"end": 2989
}
|
class ____ extends JqTestSupport {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.filter().jq(".amount >= 1000")
.to("mock:result");
}
};
}
@Test
public void testFilterMapPayload() throws Exception {
Map b0 = Map.of("branch", "BRANCH0", "amount", 1234);
Map b1 = Map.of("branch", "BRANCH1", "amount", 499);
Map b2 = Map.of("branch", "BRANCH2", "amount", 4444);
getMockEndpoint("mock:result").expectedMessageCount(2);
getMockEndpoint("mock:result").message(0).body().isEqualTo(b0);
getMockEndpoint("mock:result").message(1).body().isEqualTo(b2);
template.sendBody("direct:start", b0);
template.sendBody("direct:start", b1);
template.sendBody("direct:start", b2);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testFilterStringPayload() throws Exception {
String b0 = "{\n"
+ " \"branch\": \"BRANCH0\",\n"
+ " \"amount\": 1234\n"
+ "}";
String b1 = "{\n"
+ " \"branch\": \"BRANCH1\",\n"
+ " \"amount\": 499\n"
+ "}";
String b2 = "{\n"
+ " \"branch\": \"BRANCH2\",\n"
+ " \"amount\": 4444\n"
+ "}";
getMockEndpoint("mock:result").expectedMessageCount(2);
getMockEndpoint("mock:result").message(0).body().isEqualTo(b0);
getMockEndpoint("mock:result").message(1).body().isEqualTo(b2);
template.sendBody("direct:start", b0);
template.sendBody("direct:start", b1);
template.sendBody("direct:start", b2);
MockEndpoint.assertIsSatisfied(context);
}
}
|
JqFilterGETest
|
java
|
quarkusio__quarkus
|
integration-tests/main/src/test/java/io/quarkus/it/main/RegisterForReflectionITCase.java
|
{
"start": 384,
"end": 3014
}
|
class ____ {
private static final String BASE_PKG = "io.quarkus.it.rest";
private static final String ENDPOINT = "/reflection/simpleClassName";
@Test
public void testSelfWithoutNested() {
final String resourceA = BASE_PKG + ".ResourceA";
assertRegistration("ResourceA", resourceA);
final boolean isCompleteReflectionTypes = isCompleteReflectionTypes();
assertRegistration(isCompleteReflectionTypes ? "InnerClassOfA" : "FAILED", resourceA + "$InnerClassOfA");
assertRegistration(isCompleteReflectionTypes ? "StaticClassOfA" : "FAILED", resourceA + "$StaticClassOfA");
assertRegistration(isCompleteReflectionTypes ? "InterfaceOfA" : "FAILED", resourceA + "$InterfaceOfA");
}
@Test
public void testSelfWithNested() {
final String resourceB = BASE_PKG + ".ResourceB";
assertRegistration("ResourceB", resourceB);
assertRegistration("InnerClassOfB", resourceB + "$InnerClassOfB");
assertRegistration("StaticClassOfB", resourceB + "$StaticClassOfB");
assertRegistration("InterfaceOfB", resourceB + "$InterfaceOfB");
assertRegistration("InnerInnerOfB", resourceB + "$InnerClassOfB$InnerInnerOfB");
}
@Test
public void testTargetWithNestedPost22_1() {
final String resourceC = BASE_PKG + ".ResourceC";
// Starting with GraalVM 22.1 ResourceC implicitly gets registered by GraalVM
// (see https://github.com/oracle/graal/pull/4414)
assertRegistration("ResourceC", resourceC);
assertRegistration("InaccessibleClassOfC", resourceC + "$InaccessibleClassOfC");
assertRegistration("OtherInaccessibleClassOfC", resourceC + "$InaccessibleClassOfC$OtherInaccessibleClassOfC");
}
@Test
public void testTargetWithoutNested() {
final String resourceD = BASE_PKG + ".ResourceD";
assertRegistration("FAILED", resourceD);
assertRegistration("StaticClassOfD", resourceD + "$StaticClassOfD");
final boolean isCompleteReflectionTypes = isCompleteReflectionTypes();
assertRegistration(isCompleteReflectionTypes ? "OtherAccessibleClassOfD" : "FAILED",
resourceD + "$StaticClassOfD$OtherAccessibleClassOfD");
}
// NOTE: This test is expected to fail with GraalVM >= 23.1.0 and < 23.1.3 yet we enable it for all 23.1 versions
// due to https://github.com/quarkusio/quarkus/issues/45873
@Test
@DisableIfBuiltWithGraalVMOlderThan(GraalVMVersion.GRAALVM_23_1_0)
public void testLambdaCapturingPost23_1_2() {
// Starting with GraalVM 23.1.3 lambda
|
RegisterForReflectionITCase
|
java
|
alibaba__nacos
|
naming/src/main/java/com/alibaba/nacos/naming/remote/rpc/handler/InstanceRequestHandler.java
|
{
"start": 2062,
"end": 4846
}
|
class ____ extends RequestHandler<InstanceRequest, InstanceResponse> {
private final EphemeralClientOperationServiceImpl clientOperationService;
public InstanceRequestHandler(EphemeralClientOperationServiceImpl clientOperationService) {
this.clientOperationService = clientOperationService;
}
@Override
@NamespaceValidation
@TpsControl(pointName = "RemoteNamingInstanceRegisterDeregister", name = "RemoteNamingInstanceRegisterDeregister")
@Secured(action = ActionTypes.WRITE)
@ExtractorManager.Extractor(rpcExtractor = InstanceRequestParamExtractor.class)
public InstanceResponse handle(InstanceRequest request, RequestMeta meta) throws NacosException {
Service service = Service.newService(request.getNamespace(), request.getGroupName(), request.getServiceName(),
true);
InstanceUtil.setInstanceIdIfEmpty(request.getInstance(), service.getGroupedServiceName());
switch (request.getType()) {
case NamingRemoteConstants.REGISTER_INSTANCE:
return registerInstance(service, request, meta);
case NamingRemoteConstants.DE_REGISTER_INSTANCE:
return deregisterInstance(service, request, meta);
default:
throw new NacosException(NacosException.INVALID_PARAM,
String.format("Unsupported request type %s", request.getType()));
}
}
private InstanceResponse registerInstance(Service service, InstanceRequest request, RequestMeta meta)
throws NacosException {
clientOperationService.registerInstance(service, request.getInstance(), meta.getConnectionId());
NotifyCenter.publishEvent(new RegisterInstanceTraceEvent(System.currentTimeMillis(),
NamingRequestUtil.getSourceIpForGrpcRequest(meta), true, service.getNamespace(), service.getGroup(),
service.getName(), request.getInstance().getIp(), request.getInstance().getPort()));
return new InstanceResponse(NamingRemoteConstants.REGISTER_INSTANCE);
}
private InstanceResponse deregisterInstance(Service service, InstanceRequest request, RequestMeta meta) {
clientOperationService.deregisterInstance(service, request.getInstance(), meta.getConnectionId());
NotifyCenter.publishEvent(new DeregisterInstanceTraceEvent(System.currentTimeMillis(),
NamingRequestUtil.getSourceIpForGrpcRequest(meta), true, DeregisterInstanceReason.REQUEST,
service.getNamespace(), service.getGroup(), service.getName(), request.getInstance().getIp(),
request.getInstance().getPort()));
return new InstanceResponse(NamingRemoteConstants.DE_REGISTER_INSTANCE);
}
}
|
InstanceRequestHandler
|
java
|
apache__camel
|
components/camel-rest-openapi/src/test/java/org/apache/camel/component/rest/openapi/OpenApiUtilsTest.java
|
{
"start": 5224,
"end": 6043
}
|
class ____ is provided in the schema title instead of schema name
String schemaTitle = "TagRequestDto";
String bindingPackagePath = OpenApiUtils.class.getPackage().getName();
Operation operation = new Operation();
Schema<Object> tagSchema = createTagSchema(schemaTitle);
RequestBody requestBody = createRequestBody(tagSchema);
operation.requestBody(requestBody);
Components components = new Components();
components.addSchemas(schemaName, tagSchema);
OpenApiUtils utils = new OpenApiUtils(new DefaultCamelContext(), bindingPackagePath, components);
assertEquals(TagRequestDto.class.getName(), utils.manageRequestBody(operation));
}
@Test
public void shouldReturnCorrectResponseClassNameForSchemaName() {
//When the
|
name
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java
|
{
"start": 1606,
"end": 2919
}
|
class ____ implements
ZKFCProtocol, Closeable, ProtocolTranslator {
private final static RpcController NULL_CONTROLLER = null;
private final ZKFCProtocolPB rpcProxy;
public ZKFCProtocolClientSideTranslatorPB(
InetSocketAddress addr, Configuration conf,
SocketFactory socketFactory, int timeout) throws IOException {
RPC.setProtocolEngine(conf, ZKFCProtocolPB.class,
ProtobufRpcEngine2.class);
rpcProxy = RPC.getProxy(ZKFCProtocolPB.class,
RPC.getProtocolVersion(ZKFCProtocolPB.class), addr,
UserGroupInformation.getCurrentUser(), conf, socketFactory, timeout);
}
@Override
public void cedeActive(int millisToCede) throws IOException,
AccessControlException {
CedeActiveRequestProto req = CedeActiveRequestProto.newBuilder()
.setMillisToCede(millisToCede)
.build();
ipc(() -> rpcProxy.cedeActive(NULL_CONTROLLER, req));
}
@Override
public void gracefulFailover() throws IOException, AccessControlException {
ipc(() -> rpcProxy.gracefulFailover(NULL_CONTROLLER,
GracefulFailoverRequestProto.getDefaultInstance()));
}
@Override
public void close() {
RPC.stopProxy(rpcProxy);
}
@Override
public Object getUnderlyingProxyObject() {
return rpcProxy;
}
}
|
ZKFCProtocolClientSideTranslatorPB
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/protocol/SlowPeerReports.java
|
{
"start": 1435,
"end": 3373
}
|
class ____ {
/**
* A map from the DataNode's DataNodeUUID to its aggregate latency
* as seen by the reporting node.
*
* The exact choice of the aggregate is opaque to the NameNode but it
* should be chosen consistently by all DataNodes in the cluster.
* Examples of aggregates are 90th percentile (good) and mean (not so
* good).
*
* The NameNode must not attempt to interpret the aggregate latencies
* beyond exposing them as a diagnostic. e.g. metrics. Also, comparing
* latencies across reports from different DataNodes may not be not
* meaningful and must be avoided.
*/
@Nonnull
private final Map<String, OutlierMetrics> slowPeers;
/**
* An object representing a SlowPeerReports with no entries. Should
* be used instead of null or creating new objects when there are
* no slow peers to report.
*/
public static final SlowPeerReports EMPTY_REPORT =
new SlowPeerReports(ImmutableMap.of());
private SlowPeerReports(Map<String, OutlierMetrics> slowPeers) {
this.slowPeers = slowPeers;
}
public static SlowPeerReports create(
@Nullable Map<String, OutlierMetrics> slowPeers) {
if (slowPeers == null || slowPeers.isEmpty()) {
return EMPTY_REPORT;
}
return new SlowPeerReports(slowPeers);
}
public Map<String, OutlierMetrics> getSlowPeers() {
return slowPeers;
}
public boolean haveSlowPeers() {
return slowPeers.size() > 0;
}
/**
* Return true if the two objects represent the same set slow peer
* entries. Primarily for unit testing convenience.
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof SlowPeerReports)) {
return false;
}
SlowPeerReports that = (SlowPeerReports) o;
return slowPeers.equals(that.slowPeers);
}
@Override
public int hashCode() {
return slowPeers.hashCode();
}
}
|
SlowPeerReports
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/parallel/Execution.java
|
{
"start": 2458,
"end": 3080
}
|
enum ____ defined in
* {@link ExecutionMode}, ignoring case.
*
* <p>If not specified, the default is "same_thread" which corresponds to
* {@code @Execution(ExecutionMode.SAME_THREAD)}.
*
* @since 5.4
*/
@API(status = MAINTAINED, since = "5.13.3")
String DEFAULT_EXECUTION_MODE_PROPERTY_NAME = "junit.jupiter.execution.parallel.mode.default";
/**
* Property name used to set the default test execution mode for top-level
* classes: {@value}
*
* <p>This setting is only effective if parallel execution is enabled.
*
* <h4>Supported Values</h4>
*
* <p>Supported values include names of
|
constants
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/data/util/DataFormatConverters.java
|
{
"start": 39888,
"end": 40703
}
|
class ____
extends DataFormatConverter<ArrayData, short[]> {
private static final long serialVersionUID = -1343184089311186834L;
public static final PrimitiveShortArrayConverter INSTANCE =
new PrimitiveShortArrayConverter();
private PrimitiveShortArrayConverter() {}
@Override
ArrayData toInternalImpl(short[] value) {
return new GenericArrayData(value);
}
@Override
short[] toExternalImpl(ArrayData value) {
return value.toShortArray();
}
@Override
short[] toExternalImpl(RowData row, int column) {
return toExternalImpl(row.getArray(column));
}
}
/** Converter for primitive long array. */
public static final
|
PrimitiveShortArrayConverter
|
java
|
apache__flink
|
flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/gateway/TypedResult.java
|
{
"start": 2582,
"end": 2652
}
|
enum ____ {
PAYLOAD,
EMPTY,
EOS
}
}
|
ResultType
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/scheduling/annotation/EnableAsyncTests.java
|
{
"start": 14934,
"end": 15197
}
|
class ____ {
private final AsyncBean asyncBean;
public AsyncBeanUser(AsyncBean asyncBean) {
this.asyncBean = asyncBean;
}
public AsyncBean getAsyncBean() {
return asyncBean;
}
}
@EnableAsync(annotation = CustomAsync.class)
static
|
AsyncBeanUser
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jackson/src/main/java/org/springframework/boot/jackson/autoconfigure/JacksonAutoConfiguration.java
|
{
"start": 3760,
"end": 4562
}
|
class ____ {
@Bean
JacksonComponentModule jsonComponentModule() {
return new JacksonComponentModule();
}
@Bean
@Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
@ConditionalOnMissingBean
JsonMapper.Builder jsonMapperBuilder(List<JsonMapperBuilderCustomizer> customizers) {
JsonMapper.Builder builder = JsonMapper.builder();
customize(builder, customizers);
return builder;
}
private void customize(JsonMapper.Builder builder, List<JsonMapperBuilderCustomizer> customizers) {
for (JsonMapperBuilderCustomizer customizer : customizers) {
customizer.customize(builder);
}
}
@Bean
@Primary
@ConditionalOnMissingBean
JsonMapper jacksonJsonMapper(JsonMapper.Builder builder) {
return builder.build();
}
@Configuration(proxyBeanMethods = false)
static
|
JacksonAutoConfiguration
|
java
|
junit-team__junit5
|
junit-platform-commons/src/main/java/org/junit/platform/commons/support/AnnotationSupport.java
|
{
"start": 8453,
"end": 8973
}
|
class ____ hierarchy).
*
* <p>If the annotation still has not been found, this method will optionally
* search recursively through the supplied enclosing instance types, starting
* at the innermost enclosing class (the last one in the supplied list of
* {@code enclosingInstanceTypes}).
*
* @implNote The classes supplied as {@code enclosingInstanceTypes} may
* differ from the classes returned from invocations of
* {@link Class#getEnclosingClass()} — for example, when a nested test
*
|
inheritance
|
java
|
quarkusio__quarkus
|
integration-tests/cache/src/test/java/io/quarkus/it/cache/CacheITCase.java
|
{
"start": 115,
"end": 159
}
|
class ____ extends CacheTestCase {
}
|
CacheITCase
|
java
|
apache__flink
|
flink-yarn/src/test/java/org/apache/flink/yarn/FlinkYarnSessionCliTest.java
|
{
"start": 2820,
"end": 27651
}
|
class ____ {
private static final ApplicationId TEST_YARN_APPLICATION_ID =
ApplicationId.newInstance(System.currentTimeMillis(), 42);
private static final ApplicationId TEST_YARN_APPLICATION_ID_2 =
ApplicationId.newInstance(System.currentTimeMillis(), 43);
private static final String TEST_YARN_JOB_MANAGER_ADDRESS = "22.33.44.55";
private static final int TEST_YARN_JOB_MANAGER_PORT = 6655;
private static final String validPropertiesFile = "applicationID=" + TEST_YARN_APPLICATION_ID;
private static final String invalidPropertiesFile =
"jasfobManager=" + TEST_YARN_JOB_MANAGER_ADDRESS + ":asf" + TEST_YARN_JOB_MANAGER_PORT;
@TempDir private Path tmp;
@Test
void testDynamicProperties() throws Exception {
FlinkYarnSessionCli cli =
new FlinkYarnSessionCli(
new Configuration(), tmp.toFile().getAbsolutePath(), "", "", false);
Options options = new Options();
cli.addGeneralOptions(options);
cli.addRunOptions(options);
CommandLineParser parser = new DefaultParser();
CommandLine cmd =
parser.parse(
options,
new String[] {
"run",
"-j",
"fake.jar",
"-D",
RpcOptions.ASK_TIMEOUT_DURATION.key() + "=5 min",
"-D",
CoreOptions.FLINK_JVM_OPTIONS.key() + "=-DappName=foobar",
"-D",
SecurityOptions.SSL_INTERNAL_KEY_PASSWORD.key() + "=changeit"
});
Configuration executorConfig = cli.toConfiguration(cmd);
assertThat(executorConfig.get(RpcOptions.ASK_TIMEOUT_DURATION)).hasMinutes(5);
assertThat(executorConfig.get(CoreOptions.FLINK_JVM_OPTIONS)).isEqualTo("-DappName=foobar");
assertThat(executorConfig.get(SecurityOptions.SSL_INTERNAL_KEY_PASSWORD))
.isEqualTo("changeit");
}
@Test
void testCorrectSettingOfMaxSlots() throws Exception {
String[] params = new String[] {"-ys", "3"};
final Configuration configuration = createConfigurationWithJmAndTmTotalMemory(2048);
final FlinkYarnSessionCli yarnCLI = createFlinkYarnSessionCli(configuration);
final CommandLine commandLine = yarnCLI.parseCommandLineOptions(params, true);
configuration.addAll(yarnCLI.toConfiguration(commandLine));
final ClusterClientFactory<ApplicationId> clientFactory =
getClusterClientFactory(configuration);
final ClusterSpecification clusterSpecification =
clientFactory.getClusterSpecification(configuration);
// each task manager has 3 slots but the parallelism is 7. Thus the slots should be
// increased.
assertThat(clusterSpecification.getSlotsPerTaskManager()).isEqualTo(3);
}
@Test
void testCorrectSettingOfDetachedMode() throws Exception {
final String[] params = new String[] {"-d"};
FlinkYarnSessionCli yarnCLI = createFlinkYarnSessionCli();
final CommandLine commandLine = yarnCLI.parseCommandLineOptions(params, true);
final Configuration executorConfig = yarnCLI.toConfiguration(commandLine);
assertThat(executorConfig.get(DeploymentOptions.ATTACHED)).isFalse();
}
@Test
void testZookeeperNamespaceProperty() throws Exception {
String zkNamespaceCliInput = "flink_test_namespace";
String[] params = new String[] {"-yz", zkNamespaceCliInput};
FlinkYarnSessionCli yarnCLI = createFlinkYarnSessionCli();
CommandLine commandLine = yarnCLI.parseCommandLineOptions(params, true);
Configuration executorConfig = yarnCLI.toConfiguration(commandLine);
assertThat(executorConfig.get(HighAvailabilityOptions.HA_CLUSTER_ID))
.isEqualTo(zkNamespaceCliInput);
}
@Test
void testNodeLabelProperty() throws Exception {
String nodeLabelCliInput = "flink_test_nodelabel";
String[] params = new String[] {"-ynl", nodeLabelCliInput};
FlinkYarnSessionCli yarnCLI = createFlinkYarnSessionCli();
CommandLine commandLine = yarnCLI.parseCommandLineOptions(params, true);
Configuration executorConfig = yarnCLI.toConfiguration(commandLine);
ClusterClientFactory<ApplicationId> clientFactory = getClusterClientFactory(executorConfig);
YarnClusterDescriptor descriptor =
(YarnClusterDescriptor) clientFactory.createClusterDescriptor(executorConfig);
assertThat(descriptor.getNodeLabel()).isEqualTo(nodeLabelCliInput);
}
private void validateExecutorCLIisPrioritised(
Configuration configuration, String[] argsUnderTest)
throws IOException, CliArgsException {
final List<CustomCommandLine> customCommandLines =
CliFrontend.loadCustomCommandLines(
configuration,
Files.createTempFile(tmp, UUID.randomUUID().toString(), "")
.toFile()
.getAbsolutePath());
final CliFrontend cli = new CliFrontend(configuration, customCommandLines);
final CommandLine commandLine =
cli.getCommandLine(CliFrontendParser.getRunCommandOptions(), argsUnderTest, true);
final CustomCommandLine customCommandLine =
cli.validateAndGetActiveCommandLine(commandLine);
assertThat(customCommandLine).isInstanceOf(GenericCLI.class);
}
private void validateYarnCLIisActive(Configuration configuration)
throws FlinkException, CliArgsException {
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli(configuration);
final CommandLine testCLIArgs =
flinkYarnSessionCli.parseCommandLineOptions(new String[] {}, true);
assertThat(flinkYarnSessionCli.isActive(testCLIArgs)).isTrue();
}
/**
* Test that the CliFrontend is able to pick up the .yarn-properties file from a specified
* location.
*/
@Test
void testResumeFromYarnPropertiesFile() throws Exception {
File directoryPath = writeYarnPropertiesFile(validPropertiesFile);
final Configuration configuration = new Configuration();
configuration.set(
YarnConfigOptions.PROPERTIES_FILE_LOCATION, directoryPath.getAbsolutePath());
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli(configuration);
final CommandLine commandLine =
flinkYarnSessionCli.parseCommandLineOptions(new String[] {}, true);
final Configuration executorConfig = flinkYarnSessionCli.toConfiguration(commandLine);
final ClusterClientFactory<ApplicationId> clientFactory =
getClusterClientFactory(executorConfig);
final ApplicationId clusterId = clientFactory.getClusterId(executorConfig);
assertThat(clusterId).isEqualTo(TEST_YARN_APPLICATION_ID);
}
/**
* Tests that we fail when reading an invalid yarn properties file when retrieving the cluster
* id.
*/
@Test
void testInvalidYarnPropertiesFile() throws Exception {
File directoryPath = writeYarnPropertiesFile(invalidPropertiesFile);
final Configuration configuration = new Configuration();
configuration.set(
YarnConfigOptions.PROPERTIES_FILE_LOCATION, directoryPath.getAbsolutePath());
assertThatThrownBy(() -> createFlinkYarnSessionCli(configuration))
.isInstanceOf(FlinkException.class);
}
@Test
void testResumeFromYarnID() throws Exception {
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli();
final CommandLine commandLine =
flinkYarnSessionCli.parseCommandLineOptions(
new String[] {"-yid", TEST_YARN_APPLICATION_ID.toString()}, true);
final Configuration executorConfig = flinkYarnSessionCli.toConfiguration(commandLine);
final ClusterClientFactory<ApplicationId> clientFactory =
getClusterClientFactory(executorConfig);
final ApplicationId clusterId = clientFactory.getClusterId(executorConfig);
assertThat(clusterId).isEqualTo(TEST_YARN_APPLICATION_ID);
}
@Test
void testResumeFromYarnIDZookeeperNamespace() throws Exception {
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli();
final CommandLine commandLine =
flinkYarnSessionCli.parseCommandLineOptions(
new String[] {"-yid", TEST_YARN_APPLICATION_ID.toString()}, true);
final Configuration executorConfig = flinkYarnSessionCli.toConfiguration(commandLine);
final ClusterClientFactory<ApplicationId> clientFactory =
getClusterClientFactory(executorConfig);
final YarnClusterDescriptor clusterDescriptor =
(YarnClusterDescriptor) clientFactory.createClusterDescriptor(executorConfig);
final Configuration clusterDescriptorConfiguration =
clusterDescriptor.getFlinkConfiguration();
String zkNs =
clusterDescriptorConfiguration.getValue(HighAvailabilityOptions.HA_CLUSTER_ID);
assertThat(zkNs).matches("application_\\d+_0042");
}
@Test
void testResumeFromYarnIDZookeeperNamespaceOverride() throws Exception {
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli();
final String overrideZkNamespace = "my_cluster";
final CommandLine commandLine =
flinkYarnSessionCli.parseCommandLineOptions(
new String[] {
"-yid", TEST_YARN_APPLICATION_ID.toString(), "-yz", overrideZkNamespace
},
true);
final Configuration executorConfig = flinkYarnSessionCli.toConfiguration(commandLine);
final ClusterClientFactory<ApplicationId> clientFactory =
getClusterClientFactory(executorConfig);
final YarnClusterDescriptor clusterDescriptor =
(YarnClusterDescriptor) clientFactory.createClusterDescriptor(executorConfig);
final Configuration clusterDescriptorConfiguration =
clusterDescriptor.getFlinkConfiguration();
final String clusterId =
clusterDescriptorConfiguration.getValue(HighAvailabilityOptions.HA_CLUSTER_ID);
assertThat(clusterId).isEqualTo(overrideZkNamespace);
}
@Test
void testYarnIDOverridesPropertiesFile() throws Exception {
File directoryPath = writeYarnPropertiesFile(validPropertiesFile);
final Configuration configuration = new Configuration();
configuration.set(
YarnConfigOptions.PROPERTIES_FILE_LOCATION, directoryPath.getAbsolutePath());
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli(configuration);
final CommandLine commandLine =
flinkYarnSessionCli.parseCommandLineOptions(
new String[] {"-yid", TEST_YARN_APPLICATION_ID_2.toString()}, true);
final Configuration executorConfig = flinkYarnSessionCli.toConfiguration(commandLine);
final ClusterClientFactory<ApplicationId> clientFactory =
getClusterClientFactory(executorConfig);
final ApplicationId clusterId = clientFactory.getClusterId(executorConfig);
assertThat(clusterId).isEqualTo(TEST_YARN_APPLICATION_ID_2);
}
/**
* Tests that the command line arguments override the configuration settings when the {@link
* ClusterSpecification} is created.
*/
@Test
void testCommandLineClusterSpecification() throws Exception {
final Configuration configuration = new Configuration();
final int jobManagerMemory = 1337;
final int taskManagerMemory = 7331;
final int slotsPerTaskManager = 30;
configuration.set(
JobManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.ofMebiBytes(jobManagerMemory));
configuration.set(
TaskManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.ofMebiBytes(taskManagerMemory));
configuration.set(TaskManagerOptions.NUM_TASK_SLOTS, slotsPerTaskManager);
final String[] args = {
"-yjm",
jobManagerMemory + "m",
"-ytm",
taskManagerMemory + "m",
"-ys",
String.valueOf(slotsPerTaskManager)
};
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli(configuration);
CommandLine commandLine = flinkYarnSessionCli.parseCommandLineOptions(args, false);
Configuration executorConfig = flinkYarnSessionCli.toConfiguration(commandLine);
ClusterClientFactory<ApplicationId> clientFactory = getClusterClientFactory(executorConfig);
ClusterSpecification clusterSpecification =
clientFactory.getClusterSpecification(executorConfig);
assertThat(clusterSpecification.getMasterMemoryMB()).isEqualTo(jobManagerMemory);
assertThat(clusterSpecification.getTaskManagerMemoryMB()).isEqualTo(taskManagerMemory);
assertThat(clusterSpecification.getSlotsPerTaskManager()).isEqualTo(slotsPerTaskManager);
}
/**
* Tests that the configuration settings are used to create the {@link ClusterSpecification}.
*/
@Test
void testConfigurationClusterSpecification() throws Exception {
final Configuration configuration = new Configuration();
final int jobManagerMemory = 1337;
configuration.set(
JobManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.ofMebiBytes(jobManagerMemory));
final int taskManagerMemory = 7331;
configuration.set(
TaskManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.ofMebiBytes(taskManagerMemory));
final int slotsPerTaskManager = 42;
configuration.set(TaskManagerOptions.NUM_TASK_SLOTS, slotsPerTaskManager);
final String[] args = {};
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli(configuration);
CommandLine commandLine = flinkYarnSessionCli.parseCommandLineOptions(args, false);
configuration.addAll(flinkYarnSessionCli.toConfiguration(commandLine));
ClusterClientFactory<ApplicationId> clientFactory = getClusterClientFactory(configuration);
ClusterSpecification clusterSpecification =
clientFactory.getClusterSpecification(configuration);
assertThat(clusterSpecification.getMasterMemoryMB()).isEqualTo(jobManagerMemory);
assertThat(clusterSpecification.getTaskManagerMemoryMB()).isEqualTo(taskManagerMemory);
assertThat(clusterSpecification.getSlotsPerTaskManager()).isEqualTo(slotsPerTaskManager);
}
/** Tests the specifying total process memory without unit for job manager and task manager. */
@Test
void testMemoryPropertyWithoutUnit() throws Exception {
final String[] args = new String[] {"-yjm", "1024", "-ytm", "2048"};
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli();
final CommandLine commandLine = flinkYarnSessionCli.parseCommandLineOptions(args, false);
final Configuration executorConfig = flinkYarnSessionCli.toConfiguration(commandLine);
final ClusterClientFactory<ApplicationId> clientFactory =
getClusterClientFactory(executorConfig);
final ClusterSpecification clusterSpecification =
clientFactory.getClusterSpecification(executorConfig);
assertThat(clusterSpecification.getMasterMemoryMB()).isEqualTo(1024);
assertThat(clusterSpecification.getTaskManagerMemoryMB()).isEqualTo(2048);
}
/**
* Tests the specifying total process memory with unit (MB) for job manager and task manager.
*/
@Test
void testMemoryPropertyWithUnitMB() throws Exception {
final String[] args = new String[] {"-yjm", "1024m", "-ytm", "2048m"};
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli();
final CommandLine commandLine = flinkYarnSessionCli.parseCommandLineOptions(args, false);
final Configuration executorConfig = flinkYarnSessionCli.toConfiguration(commandLine);
final ClusterClientFactory<ApplicationId> clientFactory =
getClusterClientFactory(executorConfig);
final ClusterSpecification clusterSpecification =
clientFactory.getClusterSpecification(executorConfig);
assertThat(clusterSpecification.getMasterMemoryMB()).isEqualTo(1024);
assertThat(clusterSpecification.getTaskManagerMemoryMB()).isEqualTo(2048);
}
/**
* Tests the specifying total process memory with arbitrary unit for job manager and task
* manager.
*/
@Test
void testMemoryPropertyWithArbitraryUnit() throws Exception {
final String[] args = new String[] {"-yjm", "1g", "-ytm", "2g"};
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli();
final CommandLine commandLine = flinkYarnSessionCli.parseCommandLineOptions(args, false);
final Configuration executorConfig = flinkYarnSessionCli.toConfiguration(commandLine);
final ClusterClientFactory<ApplicationId> clientFactory =
getClusterClientFactory(executorConfig);
final ClusterSpecification clusterSpecification =
clientFactory.getClusterSpecification(executorConfig);
assertThat(clusterSpecification.getMasterMemoryMB()).isEqualTo(1024);
assertThat(clusterSpecification.getTaskManagerMemoryMB()).isEqualTo(2048);
}
/** Tests the specifying heap memory with old config key for job manager and task manager. */
@Test
void testHeapMemoryPropertyWithOldConfigKey() throws Exception {
Configuration configuration = new Configuration();
configuration.set(JobManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.parse("2048m"));
configuration.set(TaskManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.parse("4096m"));
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli(configuration);
final CommandLine commandLine =
flinkYarnSessionCli.parseCommandLineOptions(new String[0], false);
configuration.addAll(flinkYarnSessionCli.toConfiguration(commandLine));
final ClusterClientFactory<ApplicationId> clientFactory =
getClusterClientFactory(configuration);
final ClusterSpecification clusterSpecification =
clientFactory.getClusterSpecification(configuration);
assertThat(clusterSpecification.getMasterMemoryMB()).isEqualTo(2048);
assertThat(clusterSpecification.getTaskManagerMemoryMB()).isEqualTo(4096);
}
/**
* Tests the specifying job manager total process memory with config default value for job
* manager and task manager.
*/
@Test
void testJobManagerMemoryPropertyWithConfigDefaultValue() throws Exception {
int procMemory = 2048;
final Configuration configuration = createConfigurationWithJmAndTmTotalMemory(procMemory);
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli(configuration);
final CommandLine commandLine =
flinkYarnSessionCli.parseCommandLineOptions(new String[0], false);
configuration.addAll(flinkYarnSessionCli.toConfiguration(commandLine));
final ClusterClientFactory<ApplicationId> clientFactory =
getClusterClientFactory(configuration);
final ClusterSpecification clusterSpecification =
clientFactory.getClusterSpecification(configuration);
assertThat(clusterSpecification.getMasterMemoryMB()).isEqualTo(procMemory);
assertThat(clusterSpecification.getTaskManagerMemoryMB()).isEqualTo(procMemory);
}
@Test
void testMultipleYarnShipOptions() throws Exception {
final String[] args =
new String[] {
"run",
"--yarnship",
Files.createTempDirectory(tmp, UUID.randomUUID().toString())
.toFile()
.getAbsolutePath(),
"--yarnship",
Files.createTempDirectory(tmp, UUID.randomUUID().toString())
.toFile()
.getAbsolutePath()
};
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli();
final CommandLine commandLine = flinkYarnSessionCli.parseCommandLineOptions(args, false);
final Configuration executorConfig = flinkYarnSessionCli.toConfiguration(commandLine);
final ClusterClientFactory<ApplicationId> clientFactory =
getClusterClientFactory(executorConfig);
YarnClusterDescriptor flinkYarnDescriptor =
(YarnClusterDescriptor) clientFactory.createClusterDescriptor(executorConfig);
assertThat(flinkYarnDescriptor.getShipFiles()).hasSize(2);
}
@Test
void testShipFiles() throws Exception {
File tmpFile = Files.createTempFile(tmp, UUID.randomUUID().toString(), "").toFile();
final String[] args = new String[] {"run", "--yarnship", tmpFile.toString()};
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli();
final CommandLine commandLine = flinkYarnSessionCli.parseCommandLineOptions(args, false);
final Configuration executorConfig = flinkYarnSessionCli.toConfiguration(commandLine);
final ClusterClientFactory<ApplicationId> clientFactory =
getClusterClientFactory(executorConfig);
YarnClusterDescriptor flinkYarnDescriptor =
(YarnClusterDescriptor) clientFactory.createClusterDescriptor(executorConfig);
assertThat(flinkYarnDescriptor.getShipFiles())
.containsExactly(getPathFromLocalFile(tmpFile));
}
@Test
void testMissingShipFiles() throws Exception {
File tmpFile = Files.createTempFile(tmp, UUID.randomUUID().toString(), "").toFile();
final String[] args =
new String[] {
"run", "--yarnship", tmpFile.toString(), "--yarnship", "missing.file"
};
final FlinkYarnSessionCli flinkYarnSessionCli = createFlinkYarnSessionCli();
final CommandLine commandLine = flinkYarnSessionCli.parseCommandLineOptions(args, false);
assertThatThrownBy(() -> flinkYarnSessionCli.toConfiguration(commandLine))
.isInstanceOf(ConfigurationException.class)
.hasMessage("Ship file missing.file does not exist");
}
///////////
// Utils //
///////////
private ClusterClientFactory<ApplicationId> getClusterClientFactory(
final Configuration executorConfig) {
final ClusterClientServiceLoader clusterClientServiceLoader =
new DefaultClusterClientServiceLoader();
return clusterClientServiceLoader.getClusterClientFactory(executorConfig);
}
private File writeYarnPropertiesFile(String contents) throws IOException {
File tmpFolder = Files.createTempDirectory(tmp, UUID.randomUUID().toString()).toFile();
String currentUser = System.getProperty("user.name");
// copy .yarn-properties-<username>
File testPropertiesFile = new File(tmpFolder, ".yarn-properties-" + currentUser);
Files.write(testPropertiesFile.toPath(), contents.getBytes(), StandardOpenOption.CREATE);
return tmpFolder.getAbsoluteFile();
}
private FlinkYarnSessionCli createFlinkYarnSessionCli() throws FlinkException {
return createFlinkYarnSessionCli(new Configuration());
}
private Configuration createConfigurationWithJmAndTmTotalMemory(int totalMemory) {
Configuration configuration = new Configuration();
configuration.set(
JobManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.ofMebiBytes(totalMemory));
configuration.set(
TaskManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.ofMebiBytes(totalMemory));
return configuration;
}
private FlinkYarnSessionCli createFlinkYarnSessionCli(Configuration configuration)
throws FlinkException {
return new FlinkYarnSessionCli(configuration, tmp.toFile().getAbsolutePath(), "y", "yarn");
}
}
|
FlinkYarnSessionCliTest
|
java
|
google__gson
|
extras/src/test/java/com/google/gson/typeadapters/RuntimeTypeAdapterFactoryTest.java
|
{
"start": 1887,
"end": 8429
}
|
class ____ has recognizeSubtypes(). So it should recognize CreditCard, and
// when we call gson.toJson(original) below, without an explicit type, it should be invoked.
RuntimeTypeAdapterFactory<BillingInstrument> rta =
RuntimeTypeAdapterFactory.of(BillingInstrument.class)
.recognizeSubtypes()
.registerSubtype(CreditCard.class);
Gson gson = new GsonBuilder().registerTypeAdapterFactory(rta).create();
CreditCard original = new CreditCard("Jesse", 234);
assertThat(gson.toJson(original))
.isEqualTo("{\"type\":\"CreditCard\",\"cvv\":234,\"ownerName\":\"Jesse\"}");
BillingInstrument deserialized =
gson.fromJson("{type:'CreditCard',cvv:234,ownerName:'Jesse'}", BillingInstrument.class);
assertThat(deserialized.ownerName).isEqualTo("Jesse");
assertThat(deserialized).isInstanceOf(CreditCard.class);
}
@Test
public void testRuntimeTypeIsBaseType() {
TypeAdapterFactory rta =
RuntimeTypeAdapterFactory.of(BillingInstrument.class)
.registerSubtype(BillingInstrument.class);
Gson gson = new GsonBuilder().registerTypeAdapterFactory(rta).create();
BillingInstrument original = new BillingInstrument("Jesse");
assertThat(gson.toJson(original, BillingInstrument.class))
.isEqualTo("{\"type\":\"BillingInstrument\",\"ownerName\":\"Jesse\"}");
BillingInstrument deserialized =
gson.fromJson("{type:'BillingInstrument',ownerName:'Jesse'}", BillingInstrument.class);
assertThat(deserialized.ownerName).isEqualTo("Jesse");
}
@Test
public void testNullBaseType() {
assertThrows(NullPointerException.class, () -> RuntimeTypeAdapterFactory.of(null));
}
@Test
public void testNullTypeFieldName() {
assertThrows(
NullPointerException.class,
() -> RuntimeTypeAdapterFactory.of(BillingInstrument.class, null));
}
@Test
public void testNullSubtype() {
RuntimeTypeAdapterFactory<BillingInstrument> rta =
RuntimeTypeAdapterFactory.of(BillingInstrument.class);
assertThrows(NullPointerException.class, () -> rta.registerSubtype(null));
}
@Test
public void testNullLabel() {
RuntimeTypeAdapterFactory<BillingInstrument> rta =
RuntimeTypeAdapterFactory.of(BillingInstrument.class);
assertThrows(NullPointerException.class, () -> rta.registerSubtype(CreditCard.class, null));
}
@Test
public void testDuplicateSubtype() {
RuntimeTypeAdapterFactory<BillingInstrument> rta =
RuntimeTypeAdapterFactory.of(BillingInstrument.class);
rta.registerSubtype(CreditCard.class, "CC");
var e =
assertThrows(
IllegalArgumentException.class, () -> rta.registerSubtype(CreditCard.class, "Visa"));
assertThat(e).hasMessageThat().isEqualTo("types and labels must be unique");
}
@Test
public void testDuplicateLabel() {
RuntimeTypeAdapterFactory<BillingInstrument> rta =
RuntimeTypeAdapterFactory.of(BillingInstrument.class);
rta.registerSubtype(CreditCard.class, "CC");
var e =
assertThrows(
IllegalArgumentException.class, () -> rta.registerSubtype(BankTransfer.class, "CC"));
assertThat(e).hasMessageThat().isEqualTo("types and labels must be unique");
}
@Test
public void testDeserializeMissingTypeField() {
TypeAdapterFactory billingAdapter =
RuntimeTypeAdapterFactory.of(BillingInstrument.class).registerSubtype(CreditCard.class);
Gson gson = new GsonBuilder().registerTypeAdapterFactory(billingAdapter).create();
var e =
assertThrows(
JsonParseException.class,
() -> gson.fromJson("{ownerName:'Jesse'}", BillingInstrument.class));
assertThat(e)
.hasMessageThat()
.isEqualTo(
"cannot deserialize "
+ BillingInstrument.class
+ " because it does not define a field named type");
}
@Test
public void testDeserializeMissingSubtype() {
TypeAdapterFactory billingAdapter =
RuntimeTypeAdapterFactory.of(BillingInstrument.class).registerSubtype(BankTransfer.class);
Gson gson = new GsonBuilder().registerTypeAdapterFactory(billingAdapter).create();
var e =
assertThrows(
JsonParseException.class,
() -> gson.fromJson("{type:'CreditCard',ownerName:'Jesse'}", BillingInstrument.class));
assertThat(e)
.hasMessageThat()
.isEqualTo(
"cannot deserialize "
+ BillingInstrument.class
+ " subtype named CreditCard; did you forget to register a subtype?");
}
@Test
public void testSerializeMissingSubtype() {
TypeAdapterFactory billingAdapter =
RuntimeTypeAdapterFactory.of(BillingInstrument.class).registerSubtype(BankTransfer.class);
Gson gson = new GsonBuilder().registerTypeAdapterFactory(billingAdapter).create();
var e =
assertThrows(
JsonParseException.class,
() -> gson.toJson(new CreditCard("Jesse", 456), BillingInstrument.class));
assertThat(e)
.hasMessageThat()
.isEqualTo(
"cannot serialize "
+ CreditCard.class.getName()
+ "; did you forget to register a subtype?");
}
@Test
public void testSerializeCollidingTypeFieldName() {
TypeAdapterFactory billingAdapter =
RuntimeTypeAdapterFactory.of(BillingInstrument.class, "cvv")
.registerSubtype(CreditCard.class);
Gson gson = new GsonBuilder().registerTypeAdapterFactory(billingAdapter).create();
var e =
assertThrows(
JsonParseException.class,
() -> gson.toJson(new CreditCard("Jesse", 456), BillingInstrument.class));
assertThat(e)
.hasMessageThat()
.isEqualTo(
"cannot serialize "
+ CreditCard.class.getName()
+ " because it already defines a field named cvv");
}
@Test
public void testSerializeWrappedNullValue() {
TypeAdapterFactory billingAdapter =
RuntimeTypeAdapterFactory.of(BillingInstrument.class)
.registerSubtype(CreditCard.class)
.registerSubtype(BankTransfer.class);
Gson gson = new GsonBuilder().registerTypeAdapterFactory(billingAdapter).create();
String serialized =
gson.toJson(new BillingInstrumentWrapper(null), BillingInstrumentWrapper.class);
BillingInstrumentWrapper deserialized =
gson.fromJson(serialized, BillingInstrumentWrapper.class);
assertThat(deserialized.instrument).isNull();
}
static
|
that
|
java
|
quarkusio__quarkus
|
independent-projects/tools/analytics-common/src/main/java/io/quarkus/analytics/ConfigService.java
|
{
"start": 1039,
"end": 11112
}
|
class ____ {
public static final String QUARKUS_ANALYTICS_DISABLED_LOCAL_PROP = "quarkus.analytics.disabled";
public static final String QUARKUS_ANALYTICS_PROMPT_TIMEOUT = "quarkus.analytics.prompt.timeout";
private static final String NEW_LINE = System.lineSeparator();
public static final String ACCEPTANCE_PROMPT = NEW_LINE
+ "----------------------------" + NEW_LINE
+ "--- Help improve Quarkus ---" + NEW_LINE
+ "----------------------------" + NEW_LINE
+ "* Learn more: https://quarkus.io/usage/" + NEW_LINE
+ "* Do you agree to contribute anonymous build time data to the Quarkus community? (y/n and enter) " + NEW_LINE;
private static final int DEFAULT_REFRESH_HOURS = 12;
private AnalyticsRemoteConfig config;
private Instant lastRefreshTime;
final private ConfigClient client;
final private AnonymousUserId userId;
final private Path remoteConfigFile;
final private Path localConfigFile;
final private MessageWriter log;
private static Instant initLastRefreshTime(final Path configFile) {
if (Files.exists(configFile)) {
try {
return Files.getLastModifiedTime(configFile).toInstant();
} catch (IOException e) {
return null;
}
} else {
return null;
}
}
public ConfigService(final ConfigClient client, final AnonymousUserId userId, final FileLocations fileLocations,
final MessageWriter log) {
this.client = client;
this.userId = userId;
this.log = log;
this.lastRefreshTime = initLastRefreshTime(fileLocations.getRemoteConfigFile());
this.remoteConfigFile = fileLocations.getRemoteConfigFile();
this.localConfigFile = fileLocations.getLocalConfigFile();
}
public void userAcceptance(Function<String, String> analyticsEnabledSupplier) {
final int timeout = getProperty(QUARKUS_ANALYTICS_PROMPT_TIMEOUT, 10);
if (Files.exists(localConfigFile) || getProperty(QUARKUS_ANALYTICS_DISABLED_LOCAL_PROP, false)) {
return; // ask nothing
} else {
try {
CompletableFuture<String> userInputFuture = CompletableFuture
.supplyAsync(() -> analyticsEnabledSupplier.apply(ACCEPTANCE_PROMPT));
final String userInput = userInputFuture.get(timeout, TimeUnit.SECONDS).toLowerCase().trim();
if (!validInput(userInput)) {
log.info("[Quarkus build analytics] Didn't receive a valid user's answer: `y` or `n`. " +
"The question will be asked again next time." + NEW_LINE);
return;
}
final boolean isActive = userInput.equals("y") || userInput.equals("yes") || userInput.startsWith("yy");
FileUtils.createFileAndParent(localConfigFile);
final boolean isDisabled = !isActive;// just to make it explicit
FileUtils.write(new LocalConfig(isDisabled), localConfigFile);
log.info("[Quarkus build analytics] Quarkus Build Analytics " + (isActive ? "enabled" : "disabled")
+ " by the user." + NEW_LINE);
} catch (TimeoutException e) {
log.info("[Quarkus build analytics] Didn't receive the user's answer after " + timeout + " seconds. " +
"The question will be asked again next time." + NEW_LINE);
} catch (Exception e) {
log.info("[Quarkus build analytics] Analytics config file was not written successfully. " +
e.getClass().getName() + ": " + (e.getMessage() == null ? "(no message)" : e.getMessage()));
}
}
}
/**
* True if build time analytics can be gathered.
* <p>
* <p>
* Disabled by default.
* <p>
* If running on CI, false.
* <p>
* If Not explicitly approved by user in dev mode, false
* <p>
* If analytics disabled by local property, false
* <p>
* If remote config not accessible, false
* <p>
* If disabled by remote config, false
*
* @return true if active
*/
public boolean isActive() {
if (isCi()) {
if (log.isDebugEnabled()) {
log.debug("[Quarkus build analytics] Running on CI. Skipping analytics.");
}
return false;
}
if (!isLocalConfigActive()) {
if (log.isDebugEnabled()) {
log.debug("[Quarkus build analytics] Local config is not active. Skipping analytics.");
}
return false;
}
AnalyticsRemoteConfig analyticsRemoteConfig = getRemoteConfig();
if (!analyticsRemoteConfig.isActive()) {
if (log.isDebugEnabled()) {
log.debug("[Quarkus build analytics] Remote config is not active. Skipping analytics.");
}
return false;
}
if (!isUserEnabled(analyticsRemoteConfig, userId.getUuid())) {
if (log.isDebugEnabled()) {
log.debug("[Quarkus build analytics] Remote config is not active for anonymous user. " +
"Skipping analytics.");
}
return false;
}
return true;
}
private boolean isCi() {
return "true".equalsIgnoreCase(System.getenv("CI"));
}
boolean isLocalConfigActive() {
if (getProperty(QUARKUS_ANALYTICS_DISABLED_LOCAL_PROP, false)) {
return false; // disabled by local property
}
if (!Files.exists(localConfigFile)) {
return false; // disabled because user has not decided yet
} else if (!loadConfig(LocalConfig.class, localConfigFile)
.map(localConfig -> !localConfig.isDisabled())
.orElse(true)) {
return false; // disabled by the user and recorded on the local config
}
return true;
}
/**
* If groupId has been disabled by local static config, false
* If Quarkus version has been disabled by remote config, false
*
* @param groupId
* @param quarkusVersion
* @return true if active
*/
public boolean isArtifactActive(final String groupId, final String quarkusVersion) {
return GroupIdFilter.isAuthorizedGroupId(groupId, log) &&
this.getRemoteConfig().getDenyQuarkusVersions().stream()
.noneMatch(version -> version.equals(quarkusVersion));
}
boolean isUserEnabled(final AnalyticsRemoteConfig analyticsRemoteConfig, final String user) {
return analyticsRemoteConfig.getDenyAnonymousIds().stream()
.noneMatch(uId -> uId.equals(user));
}
AnalyticsRemoteConfig getRemoteConfig() {
try {
if (!isLocalConfigActive()) {
return checkAgainConfig(); // disabled. Will check again in a few hours.
}
if (this.config == null || shouldRefreshRemoteConfig(this.config)) {
this.config = loadConfig(RemoteConfig.class, remoteConfigFile)
.filter(remoteConfig -> !shouldRefreshRemoteConfig(remoteConfig))
.orElseGet(() -> (RemoteConfig) loadConfigFromInternet());
}
return this.config;
} catch (Exception e) {
if (log.isDebugEnabled()) {
log.debug("[Quarkus build analytics] Failed to load remote config. Will check again latter. " +
"Exception: " + e.getMessage());
}
this.config = checkAgainConfig();
return this.config;
}
}
private boolean validInput(String input) {
String[] allowedValues = { "n", "nn", "no", "y", "yy", "yes" };
for (String allowedValue : allowedValues) {
if (input.equalsIgnoreCase(allowedValue)) {
return true;
}
}
return false;
}
private boolean shouldRefreshRemoteConfig(final AnalyticsRemoteConfig remoteConfig) {
return lastRefreshTime == null ||
Duration.between(
lastRefreshTime,
Instant.now()).compareTo(
remoteConfig.getRefreshInterval()) > 0;
}
private <T> Optional<T> loadConfig(Class<T> clazz, Path file) {
try {
if (Files.exists(file)) {
return FileUtils.read(clazz, file, log);
}
return Optional.empty();
} catch (IOException e) {
log.warn("[Quarkus build analytics] Failed to read " + file.getFileName() + ". Exception: " + e.getMessage());
return Optional.empty();
}
}
private AnalyticsRemoteConfig loadConfigFromInternet() {
AnalyticsRemoteConfig analyticsRemoteConfig = this.client.getConfig().orElse(checkAgainConfig());
this.lastRefreshTime = Instant.now();
return storeRemoteConfigOnDisk(analyticsRemoteConfig);
}
private AnalyticsRemoteConfig storeRemoteConfigOnDisk(AnalyticsRemoteConfig config) {
try {
if (!Files.exists(remoteConfigFile)) {
FileUtils.createFileAndParent(remoteConfigFile);
}
FileUtils.write(config, remoteConfigFile);
return config;
} catch (IOException e) {
log.warn("[Quarkus build analytics] Failed to save remote config file. Analytics will be skipped. Exception: "
+ e.getMessage());
return NoopRemoteConfig.INSTANCE;// disable
}
}
private AnalyticsRemoteConfig checkAgainConfig() {
return RemoteConfig.builder()
.active(false)
.denyQuarkusVersions(Collections.emptyList())
.denyUserIds(Collections.emptyList())
.refreshInterval(Duration.ofHours(DEFAULT_REFRESH_HOURS)).build();
}
}
|
ConfigService
|
java
|
quarkusio__quarkus
|
extensions/reactive-routes/deployment/src/test/java/io/quarkus/vertx/web/failure/FailureHandlerPathTest.java
|
{
"start": 446,
"end": 785
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(Routes.class));
@Test
public void test() {
get("/fail").then().statusCode(500).body(is("no-path"));
}
public static
|
FailureHandlerPathTest
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/dataview/MapView.java
|
{
"start": 3881,
"end": 8999
}
|
class ____<K, V> implements DataView {
private Map<K, V> map = new HashMap<>();
/**
* Creates a map view.
*
* <p>The {@link DataType} of keys and values is reflectively extracted.
*/
public MapView() {
// default constructor
}
/** Returns the entire view's content as an instance of {@link Map}. */
public Map<K, V> getMap() {
return map;
}
/** Replaces the entire view's content with the content of the given {@link Map}. */
public void setMap(Map<K, V> map) {
this.map = map;
}
/**
* Return the value for the specified key or {@code null} if the key is not in the map view.
*
* @param key The key whose associated value is to be returned
* @return The value to which the specified key is mapped, or {@code null} if this map contains
* no mapping for the key
* @throws Exception Thrown if the system cannot get data.
*/
public V get(K key) throws Exception {
return map.get(key);
}
/**
* Inserts a value for the given key into the map view. If the map view already contains a value
* for the key, the existing value is overwritten.
*
* @param key The key for which the value is inserted.
* @param value The value that is inserted for the key.
* @throws Exception Thrown if the system cannot put data.
*/
public void put(K key, V value) throws Exception {
map.put(key, value);
}
/**
* Inserts all mappings from the specified map to this map view.
*
* @param map The map whose entries are inserted into this map view.
* @throws Exception Thrown if the system cannot access the map.
*/
public void putAll(Map<K, V> map) throws Exception {
this.map.putAll(map);
}
/**
* Deletes the value for the given key.
*
* @param key The key for which the value is deleted.
* @throws Exception Thrown if the system cannot access the map.
*/
public void remove(K key) throws Exception {
map.remove(key);
}
/**
* Checks if the map view contains a value for a given key.
*
* @param key The key to check.
* @return True if there exists a value for the given key, false otherwise.
* @throws Exception Thrown if the system cannot access the map.
*/
public boolean contains(K key) throws Exception {
return map.containsKey(key);
}
/**
* Returns all entries of the map view.
*
* @return An iterable of all the key-value pairs in the map view.
* @throws Exception Thrown if the system cannot access the map.
*/
public Iterable<Map.Entry<K, V>> entries() throws Exception {
return map.entrySet();
}
/**
* Returns all the keys in the map view.
*
* @return An iterable of all the keys in the map.
* @throws Exception Thrown if the system cannot access the map.
*/
public Iterable<K> keys() throws Exception {
return map.keySet();
}
/**
* Returns all the values in the map view.
*
* @return An iterable of all the values in the map.
* @throws Exception Thrown if the system cannot access the map.
*/
public Iterable<V> values() throws Exception {
return map.values();
}
/**
* Returns an iterator over all entries of the map view.
*
* @return An iterator over all the mappings in the map.
* @throws Exception Thrown if the system cannot access the map.
*/
public Iterator<Map.Entry<K, V>> iterator() throws Exception {
return map.entrySet().iterator();
}
/**
* Returns true if the map view contains no key-value mappings, otherwise false.
*
* @return True if the map view contains no key-value mappings, otherwise false.
* @throws Exception Thrown if the system cannot access the state.
*/
public boolean isEmpty() throws Exception {
return map.isEmpty();
}
/** Removes all entries of this map. */
@Override
public void clear() {
map.clear();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof MapView)) {
return false;
}
final MapView<?, ?> mapView = (MapView<?, ?>) o;
return getMap().equals(mapView.getMap());
}
@Override
public int hashCode() {
return Objects.hash(getMap());
}
// --------------------------------------------------------------------------------------------
// Utilities
// --------------------------------------------------------------------------------------------
/** Utility method for creating a {@link DataType} of {@link MapView} explicitly. */
public static DataType newMapViewDataType(DataType keyDataType, DataType valueDataType) {
return DataTypes.STRUCTURED(
MapView.class,
DataTypes.FIELD(
"map", DataTypes.MAP(keyDataType, valueDataType).bridgedTo(Map.class)));
}
}
|
MapView
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/annotations/BuildSteps.java
|
{
"start": 597,
"end": 804
}
|
class ____ be enabled/disabled
* based on a single condition, for example based on configuration properties.
*
* @see BuildStep
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @
|
should
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java
|
{
"start": 33036,
"end": 33128
}
|
class ____ implements TransportInterceptor {
public static
|
InterceptingTransportService
|
java
|
alibaba__nacos
|
core/src/main/java/com/alibaba/nacos/core/distributed/distro/component/DistroComponentHolder.java
|
{
"start": 878,
"end": 2510
}
|
class ____ {
private final Map<String, DistroTransportAgent> transportAgentMap = new HashMap<>();
private final Map<String, DistroDataStorage> dataStorageMap = new HashMap<>();
private final Map<String, DistroFailedTaskHandler> failedTaskHandlerMap = new HashMap<>();
private final Map<String, DistroDataProcessor> dataProcessorMap = new HashMap<>();
public DistroTransportAgent findTransportAgent(String type) {
return transportAgentMap.get(type);
}
public void registerTransportAgent(String type, DistroTransportAgent transportAgent) {
transportAgentMap.put(type, transportAgent);
}
public DistroDataStorage findDataStorage(String type) {
return dataStorageMap.get(type);
}
public void registerDataStorage(String type, DistroDataStorage dataStorage) {
dataStorageMap.put(type, dataStorage);
}
public Set<String> getDataStorageTypes() {
return dataStorageMap.keySet();
}
public DistroFailedTaskHandler findFailedTaskHandler(String type) {
return failedTaskHandlerMap.get(type);
}
public void registerFailedTaskHandler(String type, DistroFailedTaskHandler failedTaskHandler) {
failedTaskHandlerMap.put(type, failedTaskHandler);
}
public void registerDataProcessor(DistroDataProcessor dataProcessor) {
dataProcessorMap.putIfAbsent(dataProcessor.processType(), dataProcessor);
}
public DistroDataProcessor findDataProcessor(String processType) {
return dataProcessorMap.get(processType);
}
}
|
DistroComponentHolder
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/core/publisher/ParallelFilterTest.java
|
{
"start": 844,
"end": 2064
}
|
class ____ {
@Test
public void parallelism() {
ParallelFlux<Integer> source = Flux.range(1, 4).parallel(3);
ParallelFilter<Integer> test = new ParallelFilter<>(source, i -> i % 2 == 0);
assertThat(test.parallelism())
.isEqualTo(3)
.isEqualTo(source.parallelism());
}
@Test
public void scanOperator() throws Exception {
ParallelFlux<Integer> source = Flux.range(1, 4).parallel(3);
ParallelFilter<Integer> test = new ParallelFilter<>(source, i -> i % 2 == 0);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(source);
assertThat(test.scan(Scannable.Attr.PREFETCH))
.isEqualTo(-1)
.isNotEqualTo(source.getPrefetch());
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
@Test
public void conditional() {
Flux<Integer> source = Flux.range(1, 1_000);
for (int i = 1; i < 33; i++) {
Flux<Integer> result = ParallelFlux.from(source, i)
.filter(t -> true)
.filter(t -> true)
.sequential();
StepVerifier.create(result)
.expectNextCount(1_000)
.verifyComplete();
}
}
}
|
ParallelFilterTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/Rounding.java
|
{
"start": 28640,
"end": 30618
}
|
class ____ extends TimeUnitPreparedRounding implements LocalTimeOffset.Strategy {
private final LocalTimeOffset.Lookup lookup;
ToMidnightRounding(LocalTimeOffset.Lookup lookup) {
this.lookup = lookup;
}
@Override
public long round(long utcMillis) {
LocalTimeOffset offset = lookup.lookup(utcMillis);
return offset.localToUtc(unit.roundFloor(offset.utcToLocalTime(utcMillis), multiplier), this);
}
@Override
public long nextRoundingValue(long utcMillis) {
// TODO this is actually used date range's collect so we should optimize it
return new JavaTimeToMidnightRounding().nextRoundingValue(utcMillis);
}
@Override
public long inGap(long localMillis, Gap gap) {
return gap.startUtcMillis();
}
@Override
public long beforeGap(long localMillis, Gap gap) {
return gap.previous().localToUtc(localMillis, this);
}
@Override
public long inOverlap(long localMillis, Overlap overlap) {
return overlap.previous().localToUtc(localMillis, this);
}
@Override
public long beforeOverlap(long localMillis, Overlap overlap) {
return overlap.previous().localToUtc(localMillis, this);
}
@Override
protected Prepared maybeUseArray(long minUtcMillis, long maxUtcMillis, int max) {
if (lookup.anyMoveBackToPreviousDay()) {
return this;
}
return super.maybeUseArray(minUtcMillis, maxUtcMillis, max);
}
@Override
public String toString() {
return TimeUnitRounding.this + "[across DST to midnight]";
}
}
private
|
ToMidnightRounding
|
java
|
elastic__elasticsearch
|
libs/core/src/main/java/org/elasticsearch/core/Strings.java
|
{
"start": 580,
"end": 1284
}
|
class ____ {
/**
* Returns a formatted string using the specified format string and
* arguments.
* <p>
* This method calls {@link String#format(Locale, String, Object...)}
* with Locale.ROOT
* If format is incorrect the function will return format without populating
* its variable placeholders.
*/
public static String format(String format, Object... args) {
try {
return String.format(Locale.ROOT, format, args);
} catch (Exception e) {
assert false : "Exception thrown when formatting [" + format + "]. " + e.getClass().getCanonicalName() + ". " + e.getMessage();
return format;
}
}
}
|
Strings
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/fuse/RrfScoreEvalOperatorTests.java
|
{
"start": 580,
"end": 2106
}
|
class ____ extends FuseOperatorTestCase {
protected RrfConfig config;
@Before
public void setup() {
config = randomConfig();
}
@Override
protected void assertSimpleOutput(List<Page> input, List<Page> results) {
Map<String, Integer> counts = new HashMap<>();
assertOutput(input, results, (discriminator, actualScore, initialScore) -> {
var rank = counts.getOrDefault(discriminator, 1);
var weight = config.weights().getOrDefault(discriminator, 1.0);
assertEquals(actualScore, 1.0 / (config.rankConstant() + rank) * weight, 0.0d);
counts.put(discriminator, rank + 1);
});
}
@Override
protected Operator.OperatorFactory simple(SimpleOptions options) {
return new RrfScoreEvalOperator.Factory(discriminatorPosition, scorePosition, config, null, 0, 0);
}
@Override
protected Matcher<String> expectedDescriptionOfSimple() {
return equalTo(
"RrfScoreEvalOperator[discriminatorPosition="
+ discriminatorPosition
+ ", scorePosition="
+ scorePosition
+ ", rrfConfig="
+ config
+ "]"
);
}
@Override
protected Matcher<String> expectedToStringOfSimple() {
return equalTo("RrfScoreEvalOperator");
}
private RrfConfig randomConfig() {
return new RrfConfig((double) randomIntBetween(1, 100), randomWeights());
}
}
|
RrfScoreEvalOperatorTests
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/core/dynamic/support/ParametrizedTypeInformationUnitTests.java
|
{
"start": 3911,
"end": 4004
}
|
interface ____ extends List<Iterable<Number>> {
}
private static
|
ListOfIterableOfNumber
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble8Evaluator.java
|
{
"start": 4679,
"end": 5928
}
|
class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory field;
private final double p0;
private final double p1;
private final double p2;
private final double p3;
private final double p4;
private final double p5;
private final double p6;
private final double p7;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, double p0,
double p1, double p2, double p3, double p4, double p5, double p6, double p7) {
this.source = source;
this.field = field;
this.p0 = p0;
this.p1 = p1;
this.p2 = p2;
this.p3 = p3;
this.p4 = p4;
this.p5 = p5;
this.p6 = p6;
this.p7 = p7;
}
@Override
public RoundToDouble8Evaluator get(DriverContext context) {
return new RoundToDouble8Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, p7, context);
}
@Override
public String toString() {
return "RoundToDouble8Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + ", p7=" + p7 + "]";
}
}
}
|
Factory
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/filter/factory/TokenRelayGatewayFilterFactory.java
|
{
"start": 1515,
"end": 4094
}
|
class ____
extends AbstractGatewayFilterFactory<AbstractGatewayFilterFactory.NameConfig> {
private final ObjectProvider<ReactiveOAuth2AuthorizedClientManager> clientManagerProvider;
public TokenRelayGatewayFilterFactory(ObjectProvider<ReactiveOAuth2AuthorizedClientManager> clientManagerProvider) {
super(NameConfig.class);
this.clientManagerProvider = clientManagerProvider;
}
@Override
public List<String> shortcutFieldOrder() {
return Collections.singletonList(NAME_KEY);
}
public GatewayFilter apply() {
return apply((NameConfig) null);
}
@Override
public GatewayFilter apply(@Nullable NameConfig config) {
String defaultClientRegistrationId = (config == null) ? null : config.getName();
return (exchange, chain) -> exchange.getPrincipal()
// .log("token-relay-filter")
.filter(principal -> principal instanceof Authentication)
.cast(Authentication.class)
.flatMap(principal -> authorizationRequest(defaultClientRegistrationId, principal))
.flatMap(this::authorizedClient)
.map(OAuth2AuthorizedClient::getAccessToken)
.map(token -> withBearerAuth(exchange, token))
// TODO: adjustable behavior if empty
.defaultIfEmpty(exchange)
.flatMap(chain::filter);
}
private Mono<OAuth2AuthorizeRequest> authorizationRequest(@Nullable String defaultClientRegistrationId,
Authentication principal) {
String clientRegistrationId = defaultClientRegistrationId;
if (clientRegistrationId == null && principal instanceof OAuth2AuthenticationToken) {
clientRegistrationId = ((OAuth2AuthenticationToken) principal).getAuthorizedClientRegistrationId();
}
return Mono.justOrEmpty(clientRegistrationId)
.map(OAuth2AuthorizeRequest::withClientRegistrationId)
.map(builder -> builder.principal(principal).build());
}
private Mono<OAuth2AuthorizedClient> authorizedClient(OAuth2AuthorizeRequest request) {
ReactiveOAuth2AuthorizedClientManager clientManager = clientManagerProvider.getIfAvailable();
if (clientManager == null) {
return Mono.error(new IllegalStateException(
"No ReactiveOAuth2AuthorizedClientManager bean was found. Did you include the "
+ "org.springframework.boot:spring-boot-starter-oauth2-client dependency?"));
}
// TODO: use Mono.defer() for request above?
return clientManager.authorize(request);
}
private ServerWebExchange withBearerAuth(ServerWebExchange exchange, OAuth2AccessToken accessToken) {
return exchange.mutate()
.request(r -> r.headers(headers -> headers.setBearerAuth(accessToken.getTokenValue())))
.build();
}
}
|
TokenRelayGatewayFilterFactory
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/nestedbeans/mixed/FishTankMapperExpression.java
|
{
"start": 510,
"end": 1098
}
|
interface ____ {
FishTankMapperExpression INSTANCE = Mappers.getMapper( FishTankMapperExpression.class );
@Mappings({
@Mapping(target = "fish.kind", source = "fish.type"),
@Mapping(target = "fish.name", expression = "java(\"Jaws\")"),
@Mapping(target = "plant", ignore = true ),
@Mapping(target = "ornament", ignore = true ),
@Mapping(target = "material", ignore = true),
@Mapping(target = "quality.report.organisation.name", expression = "java(\"Dunno\")" )
})
FishTankDto map( FishTank source );
}
|
FishTankMapperExpression
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/MixedMutabilityReturnTypeTest.java
|
{
"start": 5655,
"end": 6233
}
|
class ____ {
List<Integer> foo() {
if (hashCode() > 0) {
return Collections.emptyList();
}
List<Integer> ints = new ArrayList<>();
ints.add(1);
return ints;
}
}
""")
.addOutputLines(
"Test.java",
"""
import com.google.common.collect.ImmutableList;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
final
|
Test
|
java
|
apache__flink
|
flink-python/src/test/java/org/apache/flink/table/utils/TestCollectionTableFactory.java
|
{
"start": 8082,
"end": 9459
}
|
class ____ implements DynamicTableSink {
private final DataType outputType;
public CollectionTableSink(DataType outputType) {
this.outputType = outputType;
}
@Override
public ChangelogMode getChangelogMode(ChangelogMode requestedMode) {
return requestedMode;
}
@Override
public SinkRuntimeProvider getSinkRuntimeProvider(Context context) {
TypeInformation<Row> typeInformation = context.createTypeInformation(outputType);
DataStructureConverter converter = context.createDataStructureConverter(outputType);
return new DataStreamSinkProvider() {
@Override
public DataStreamSink<?> consumeDataStream(
ProviderContext providerContext, DataStream<RowData> dataStream) {
return dataStream
.addSink(new UnsafeMemorySinkFunction(typeInformation, converter))
.setParallelism(1);
}
};
}
@Override
public DynamicTableSink copy() {
return new CollectionTableSink(outputType);
}
@Override
public String asSummaryString() {
return String.format("CollectionTableSink(%s)", outputType);
}
}
static
|
CollectionTableSink
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/util/UrlCodecUtils.java
|
{
"start": 446,
"end": 3911
}
|
class ____ {
private UrlCodecUtils() {}
private static final char[] HEX_DIGITS = "0123456789ABCDEF".toCharArray();
public static BytesRef urlEncode(final BytesRef val, BreakingBytesRefBuilder scratch, final boolean plusForSpace) {
int size = computeSizeAfterEncoding(val, plusForSpace);
if (size == -1) {
// the input doesn't change after encoding so encoding can be skipped
return val;
}
scratch.grow(size);
scratch.clear();
int lo = val.offset;
int hi = val.offset + val.length;
for (int i = lo; i < hi; ++i) {
byte b = val.bytes[i];
char c = (char) (b & 0xFF);
if (plusForSpace && c == ' ') {
scratch.append((byte) '+');
continue;
}
if (isRfc3986Safe(c)) {
scratch.append(b);
continue;
}
// every encoded byte is represented by 3 chars: %XY
scratch.append((byte) '%');
// the X in %XY is the hex value for the high nibble
scratch.append((byte) HEX_DIGITS[(c >> 4) & 0x0F]);
// the Y in %XY is the hex value for the low nibble
scratch.append((byte) HEX_DIGITS[c & 0x0F]);
}
return scratch.bytesRefView();
}
/**
* Determines whether a character is considered unreserved (or safe) according to RFC3986. Alphanumerics along with ".-_~" are safe,
* and therefore not percent-encoded.
*
* @param c A character
* @return Boolean
*/
public static boolean isRfc3986Safe(char c) {
return ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || ('0' <= c && c <= '9') || c == '-' || c == '.' || c == '_' || c == '~';
}
/**
* <p>Computes the size of the input if it were encoded, and tells whether any encoding is needed at all. For example, if the input only
* contained alphanumerics and safe characters, then -1 is returned, to mean that no encoding is needed. If the input additionally
* contained spaces which can be encoded as '+', then the new size after encoding is returned.</p>
*
* <p>Examples</p>
* <ul>
* <li>"abc" -> -1 (no encoding needed)</li>
* <li>"a b" -> 3 if encoding spaces as "+". The positive value indicates encoding is needed.</li>
* <li>"a b" -> 5 if encoding spaces as "%20". The positive value indicates encoding is needed.</li>
* <li>"" -> -1 (no encoding needed)</li>
* </ul>
*
* @param val
* @param plusForSpace Whether spaces are encoded as + or %20.
* @return The new size after encoding, or -1 if no encoding is needed.
*/
private static int computeSizeAfterEncoding(final BytesRef val, final boolean plusForSpace) {
int size = 0;
boolean noEncodingNeeded = true;
int lo = val.offset;
int hi = val.offset + val.length;
for (int i = lo; i < hi; ++i) {
char c = (char) (val.bytes[i] & 0xFF);
if (plusForSpace && c == ' ') {
++size;
noEncodingNeeded = false;
} else if (isRfc3986Safe(c)) {
++size;
} else {
size += 3;
noEncodingNeeded = false;
}
}
if (noEncodingNeeded) {
return -1;
}
return size;
}
}
|
UrlCodecUtils
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/IteratorsBaseTest.java
|
{
"start": 1068,
"end": 1373
}
|
class ____ {
protected static final AssertionInfo INFO = someInfo();
protected Failures failures;
protected Iterators iterators;
@BeforeEach
public void setUp() {
failures = spy(Failures.instance());
iterators = new Iterators();
iterators.failures = failures;
}
}
|
IteratorsBaseTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/identifier/uuid/time/Book.java
|
{
"start": 543,
"end": 1067
}
|
class ____ {
@Id
@GeneratedValue
@UuidGenerator(style = TIME)
private UUID id;
@Basic
private String name;
//end::example-identifiers-generators-uuid-implicit[]
protected Book() {
// for Hibernate use
}
public Book(String name) {
this.name = name;
}
public UUID getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
//tag::example-identifiers-generators-uuid-implicit[]
}
//end::example-identifiers-generators-uuid-implicit[]
|
Book
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/impl/SpringRoutePolicyFactoryTest.java
|
{
"start": 1033,
"end": 1304
}
|
class ____ extends RoutePolicyFactoryTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/impl/SpringRoutePolicyFactoryTest.xml");
}
}
|
SpringRoutePolicyFactoryTest
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/AbstractTypeResolver.java
|
{
"start": 241,
"end": 2210
}
|
class ____
{
/**
* Try to locate a subtype for given abstract type, to either resolve
* to a concrete type, or at least to a more-specific (and hopefully supported)
* abstract type, one which may have registered deserializers.
* Method is called before trying to locate registered deserializers
* (as well as standard abstract type defaulting that core Jackson does),
* so it is typically implemented to add custom mappings of common abstract
* types (like specify which concrete implementation to use for binding
* {@link java.util.List}s).
*<p>
* Note that this method does not necessarily have to do full resolution
* of bindings; that is, it is legal to return type that could be further
* resolved: caller is expected to keep calling this method on registered
* resolvers, until a concrete type is located.
*
* @param config Configuration in use
* @param type Type to find mapping for
*
* @return Type to map given input type (if mapping found) or {@code null} (if not).
*/
public JavaType findTypeMapping(DeserializationConfig config, JavaType type) {
return null;
}
/**
* Method called to try to resolve an abstract type into
* concrete type (usually for purposes of deserializing),
* when no concrete implementation was found.
* It will be called after checking all other possibilities,
* including defaulting.
*
* @param config Configuration in use
* @param typeDescRef Description of the POJO type to resolve
*
* @return Resolved concrete type (which should retain generic
* type parameters of input type, if any), if resolution succeeds;
* null if resolver does not know how to resolve given type
*/
public JavaType resolveAbstractType(DeserializationConfig config,
BeanDescription.Supplier typeDescRef) {
return null;
}
}
|
AbstractTypeResolver
|
java
|
apache__flink
|
flink-table/flink-table-common/src/test/java/org/apache/flink/table/catalog/ObjectIdentifierTest.java
|
{
"start": 1103,
"end": 1950
}
|
class ____ {
@Test
void testAnonymousIdentifier() {
String objectName = "my_anonymous_table";
ObjectIdentifier objectIdentifier = ObjectIdentifier.ofAnonymous(objectName);
assertThat(objectIdentifier.getCatalogName()).isEqualTo(ObjectIdentifier.UNKNOWN);
assertThat(objectIdentifier.getDatabaseName()).isEqualTo(ObjectIdentifier.UNKNOWN);
assertThat(objectIdentifier.toList()).containsExactly(objectName);
assertThat(objectIdentifier.asSummaryString())
.isEqualTo(objectIdentifier.toString())
.isEqualTo(objectName);
assertThatThrownBy(objectIdentifier::asSerializableString)
.isInstanceOf(TableException.class);
assertThatThrownBy(objectIdentifier::toObjectPath).isInstanceOf(TableException.class);
}
}
|
ObjectIdentifierTest
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/datagram/impl/PacketWriteStreamImpl.java
|
{
"start": 763,
"end": 2111
}
|
class ____ implements WriteStream<Buffer>, Handler<AsyncResult<Void>> {
private DatagramSocketImpl datagramSocket;
private Handler<Throwable> exceptionHandler;
private final int port;
private final String host;
PacketWriteStreamImpl(DatagramSocketImpl datagramSocket, int port, String host) {
this.datagramSocket = datagramSocket;
this.port = port;
this.host = host;
}
@Override
public void handle(AsyncResult<Void> event) {
if (event.failed() && exceptionHandler != null) {
exceptionHandler.handle(event.cause());
}
}
@Override
public PacketWriteStreamImpl exceptionHandler(Handler<Throwable> handler) {
exceptionHandler = handler;
return this;
}
@Override
public Future<Void> write(Buffer data) {
Future<Void> fut = datagramSocket.send(data, port, host);
fut.onComplete(PacketWriteStreamImpl.this);
return fut;
}
@Override
public PacketWriteStreamImpl setWriteQueueMaxSize(int maxSize) {
return this;
}
@Override
public boolean writeQueueFull() {
return false;
}
@Override
public PacketWriteStreamImpl drainHandler(Handler<Void> handler) {
return this;
}
@Override
public Future<Void> end() {
Promise<Void> promise = Promise.promise();
datagramSocket.close(promise);
return promise.future();
}
}
|
PacketWriteStreamImpl
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/catalog/TableChange.java
|
{
"start": 35198,
"end": 35941
}
|
class ____ implements CatalogTableChange, MaterializedTableChange {
static final DropDistribution INSTANCE = new DropDistribution();
@Override
public String toString() {
return "DropDistribution";
}
}
// --------------------------------------------------------------------------------------------
// Property change
// --------------------------------------------------------------------------------------------
/**
* A table change to set the table option.
*
* <p>It is equal to the following statement:
*
* <pre>
* ALTER TABLE <table_name> SET '<key>' = '<value>';
* </pre>
*/
@PublicEvolving
|
DropDistribution
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/fieldlocation/FieldLocation_isTopLevelField_Test.java
|
{
"start": 1078,
"end": 2091
}
|
class ____ {
@ParameterizedTest
@MethodSource
void should_return_false_for_root_location_or_nested_field_location(FieldLocation fieldLocation) {
assertThat(fieldLocation.isTopLevelField()).isFalse();
}
private static Stream<FieldLocation> should_return_false_for_root_location_or_nested_field_location() {
return Stream.of(rootFieldLocation(),
new FieldLocation(list("[0]")),
new FieldLocation(list("[1]")),
new FieldLocation(list("friend", "name")));
}
@ParameterizedTest
@MethodSource
void should_return_true_for_top_level_field(FieldLocation fieldLocation) {
assertThat(fieldLocation.isTopLevelField()).isTrue();
}
private static Stream<FieldLocation> should_return_true_for_top_level_field() {
return Stream.of(new FieldLocation(list("name")),
new FieldLocation(list("[0]", "name")),
new FieldLocation(list("[1]", "name")));
}
}
|
FieldLocation_isTopLevelField_Test
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleScope.java
|
{
"start": 4545,
"end": 5231
}
|
class ____ {
private Map<String, FilterRef> scope = new HashMap<>();
public Builder() {}
public Builder(RuleScope otherScope) {
scope = new HashMap<>(otherScope.scope);
}
public Builder exclude(String field, String filterId) {
scope.put(field, new FilterRef(filterId, FilterRef.FilterType.EXCLUDE));
return this;
}
public Builder include(String field, String filterId) {
scope.put(field, new FilterRef(filterId, FilterRef.FilterType.INCLUDE));
return this;
}
public RuleScope build() {
return new RuleScope(scope);
}
}
}
|
Builder
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/lock/prevention/TestClient.java
|
{
"start": 382,
"end": 501
}
|
interface ____ {
@GET
String blockingCall();
@GET
CompletionStage<String> nonBlockingCall();
}
|
TestClient
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalNumericMetricsAggregation.java
|
{
"start": 1175,
"end": 2949
}
|
class ____ extends InternalNumericMetricsAggregation implements NumericMetricsAggregation.SingleValue {
protected SingleValue(String name, DocValueFormat format, Map<String, Object> metadata) {
super(name, format, metadata);
}
/**
* Read from a stream.
*/
protected SingleValue(StreamInput in) throws IOException {
super(in);
}
/**
* Read from a stream.
*
* @param readFormat whether to read the "format" field
*/
protected SingleValue(StreamInput in, boolean readFormat) throws IOException {
super(in, readFormat);
}
@Override
public String getValueAsString() {
return format.format(value()).toString();
}
@Override
public Object getProperty(List<String> path) {
if (path.isEmpty()) {
return this;
} else if (path.size() == 1 && "value".equals(path.get(0))) {
return value();
} else {
throw new IllegalArgumentException("path not supported for [" + getName() + "]: " + path);
}
}
@Override
public final SortValue sortValue(String key) {
if (key != null && false == key.equals("value")) {
throw new IllegalArgumentException(
"Unknown value key ["
+ key
+ "] for single-value metric aggregation ["
+ getName()
+ "]. Either use [value] as key or drop the key all together"
);
}
return SortValue.from(value());
}
}
public abstract static
|
SingleValue
|
java
|
bumptech__glide
|
samples/giphy/src/main/java/com/bumptech/glide/samples/giphy/Api.java
|
{
"start": 1283,
"end": 3782
}
|
interface ____ {
/**
* Called when a search completes.
*
* @param result The results returned from Giphy's search api.
*/
void onSearchComplete(SearchResult result);
}
static Api get() {
if (api == null) {
synchronized (Api.class) {
if (api == null) {
api = new Api();
}
}
}
return api;
}
private Api() {
HandlerThread bgThread = new HandlerThread("api_thread");
bgThread.start();
bgHandler = new Handler(bgThread.getLooper());
mainHandler = new Handler(Looper.getMainLooper());
// Do nothing.
}
void addMonitor(Monitor monitor) {
monitors.add(monitor);
}
void removeMonitor(Monitor monitor) {
monitors.remove(monitor);
}
void getTrending() {
String trendingUrl = getTrendingUrl();
query(trendingUrl);
}
private void query(final String apiUrl) {
bgHandler.post(
new Runnable() {
@Override
public void run() {
URL url;
try {
url = new URL(apiUrl);
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
HttpURLConnection urlConnection = null;
InputStream is = null;
SearchResult result = new SearchResult();
try {
urlConnection = (HttpURLConnection) url.openConnection();
is = urlConnection.getInputStream();
InputStreamReader reader = new InputStreamReader(is);
result = new Gson().fromJson(reader, SearchResult.class);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (is != null) {
try {
is.close();
} catch (IOException e) {
// Do nothing.
}
}
if (urlConnection != null) {
urlConnection.disconnect();
}
}
final SearchResult finalResult = result;
mainHandler.post(
new Runnable() {
@Override
public void run() {
for (Monitor monitor : monitors) {
monitor.onSearchComplete(finalResult);
}
}
});
}
});
}
/** A POJO mirroring the top level result JSON object returned from Giphy's api. */
public static final
|
Monitor
|
java
|
google__auto
|
value/src/it/functional/src/test/java/com/google/auto/value/AutoBuilderKotlinTest.java
|
{
"start": 8989,
"end": 11378
}
|
interface ____ {
static KotlinDataSomeDefaultsBigBuilder builder() {
return new AutoBuilder_AutoBuilderKotlinTest_KotlinDataSomeDefaultsBigBuilder();
}
KotlinDataSomeDefaultsBigBuilder requiredInt(int x);
KotlinDataSomeDefaultsBigBuilder requiredString(String x);
KotlinDataSomeDefaultsBigBuilder a1(int x);
KotlinDataSomeDefaultsBigBuilder a2(int x);
KotlinDataSomeDefaultsBigBuilder a3(int x);
KotlinDataSomeDefaultsBigBuilder a4(int x);
KotlinDataSomeDefaultsBigBuilder a5(int x);
KotlinDataSomeDefaultsBigBuilder a6(int x);
KotlinDataSomeDefaultsBigBuilder a7(int x);
KotlinDataSomeDefaultsBigBuilder a8(int x);
KotlinDataSomeDefaultsBigBuilder a9(int x);
KotlinDataSomeDefaultsBigBuilder a10(int x);
KotlinDataSomeDefaultsBigBuilder a11(int x);
KotlinDataSomeDefaultsBigBuilder a12(int x);
KotlinDataSomeDefaultsBigBuilder a13(int x);
KotlinDataSomeDefaultsBigBuilder a14(int x);
KotlinDataSomeDefaultsBigBuilder a15(int x);
KotlinDataSomeDefaultsBigBuilder a16(int x);
KotlinDataSomeDefaultsBigBuilder a17(int x);
KotlinDataSomeDefaultsBigBuilder a18(int x);
KotlinDataSomeDefaultsBigBuilder a19(int x);
KotlinDataSomeDefaultsBigBuilder a20(int x);
KotlinDataSomeDefaultsBigBuilder a21(int x);
KotlinDataSomeDefaultsBigBuilder a22(int x);
KotlinDataSomeDefaultsBigBuilder a23(int x);
KotlinDataSomeDefaultsBigBuilder a24(int x);
KotlinDataSomeDefaultsBigBuilder a25(int x);
KotlinDataSomeDefaultsBigBuilder a26(int x);
KotlinDataSomeDefaultsBigBuilder a27(int x);
KotlinDataSomeDefaultsBigBuilder a28(int x);
KotlinDataSomeDefaultsBigBuilder a29(int x);
KotlinDataSomeDefaultsBigBuilder a30(int x);
KotlinDataSomeDefaultsBigBuilder a31(int x);
KotlinDataSomeDefaultsBig build();
}
@Test
public void kotlinSomeDefaultsBig() {
KotlinDataSomeDefaultsBig allDefaulted =
KotlinDataSomeDefaultsBigBuilder.builder().requiredInt(23).requiredString("skidoo").build();
assertThat(allDefaulted.getRequiredInt()).isEqualTo(23);
assertThat(allDefaulted.getRequiredString()).isEqualTo("skidoo");
assertThat(allDefaulted.getA1()).isEqualTo(1);
assertThat(allDefaulted.getA31()).isEqualTo(31);
}
@AutoBuilder(ofClass = KotlinDataWithList.class)
|
KotlinDataSomeDefaultsBigBuilder
|
java
|
elastic__elasticsearch
|
libs/x-content/src/main/java/org/elasticsearch/xcontent/cbor/CborXContent.java
|
{
"start": 736,
"end": 1101
}
|
class ____ {
private static final XContentProvider.FormatProvider provider = XContentProvider.provider().getCborXContent();
private CborXContent() {}
public static XContentBuilder contentBuilder() throws IOException {
return provider.getContentBuilder();
}
public static final XContent cborXContent = provider.XContent();
}
|
CborXContent
|
java
|
netty__netty
|
buffer/src/test/java/io/netty/buffer/BigEndianUnsafeDirectByteBufTest.java
|
{
"start": 803,
"end": 1256
}
|
class ____ extends BigEndianDirectByteBufTest {
@BeforeEach
@Override
public void init() {
Assumptions.assumeTrue(PlatformDependent.hasUnsafe(), "sun.misc.Unsafe not found, skip tests");
super.init();
}
@Override
protected ByteBuf newBuffer(int length, int maxCapacity) {
return new UnpooledUnsafeDirectByteBuf(UnpooledByteBufAllocator.DEFAULT, length, maxCapacity);
}
}
|
BigEndianUnsafeDirectByteBufTest
|
java
|
quarkusio__quarkus
|
independent-projects/tools/codestarts/src/main/java/io/quarkus/devtools/codestarts/core/strategy/SmartConfigMergeCodestartFileStrategyHandler.java
|
{
"start": 726,
"end": 4698
}
|
class ____ implements CodestartFileStrategyHandler {
private static final ObjectMapper YAML_MAPPER = new ObjectMapper(
new YAMLFactory().configure(YAMLGenerator.Feature.WRITE_DOC_START_MARKER, false));
private static final String APP_CONFIG = "app-config";
@Override
public String name() {
return "smart-config-merge";
}
@Override
public void process(Path targetDirectory, String relativePath, List<TargetFile> codestartFiles,
Map<String, Object> data)
throws IOException {
checkNotEmptyCodestartFiles(codestartFiles);
final String configType = getConfigType(data);
final Map<String, Object> config = initConfigMap(data);
for (TargetFile codestartFile : codestartFiles) {
final String content = codestartFile.getContent();
if (!content.trim().isEmpty()) {
final Map<String, Object> o = YAML_MAPPER.readerFor(Map.class).readValue(content);
config.putAll(NestedMaps.deepMerge(config, o));
}
}
final Path targetPath = targetDirectory.resolve(relativePath);
createDirectories(targetPath);
if (Objects.equals(configType, "config-properties")) {
writePropertiesConfig(targetPath, config);
return;
}
if (Objects.equals(configType, "config-yaml")) {
writeYamlConfig(targetPath, config);
return;
}
throw new CodestartException("Unsupported config type: " + configType);
}
private void writeYamlConfig(Path targetPath, Map<String, Object> config) throws IOException {
checkTargetDoesNotExist(targetPath);
YAML_MAPPER.writerFor(Map.class).writeValue(targetPath.toFile(), config);
}
private void writePropertiesConfig(Path targetPath, Map<String, Object> config) throws IOException {
final StringBuilder builder = new StringBuilder();
// Enforce properties are in consistent order.
final TreeMap<String, String> flat = new TreeMap<>();
flatten("", flat, config);
for (Map.Entry<String, String> entry : flat.entrySet()) {
final String key = entry.getKey().replaceAll("\\.~$", "");
builder.append(key).append("=").append(entry.getValue()).append("\n");
}
final Path propertiesTargetPath = targetPath.getParent()
.resolve(targetPath.getFileName().toString().replace(".yml", ".properties"));
checkTargetDoesNotExist(propertiesTargetPath);
writeFile(propertiesTargetPath, builder.toString());
}
@SuppressWarnings({ "rawtypes", "unchecked" })
static void flatten(String prefix, Map<String, String> target, Map<String, ?> map) {
for (Map.Entry entry : map.entrySet()) {
if (entry.getValue() instanceof Map) {
flatten(prefix + quote(entry.getKey().toString()) + ".", target, (Map) entry.getValue());
} else {
// TODO: handle different types of values
target.put(prefix + quote(entry.getKey().toString()), entry.getValue().toString());
}
}
}
private static String quote(String key) {
if (!key.contains(".")) {
return key;
}
return "\"" + key.replaceAll("\"", "\\\"") + "\"";
}
private static String getConfigType(Map<String, Object> data) {
final Optional<String> config = CodestartData.getInputCodestartForType(data, CodestartType.CONFIG);
return config.orElseThrow(() -> new CodestartException("Config type is required"));
}
@SuppressWarnings("unchecked")
private Map<String, Object> initConfigMap(final Map<String, Object> data) {
if (data.get(APP_CONFIG) instanceof Map) {
return NestedMaps.unflatten((Map<String, Object>) data.get(APP_CONFIG));
}
return new HashMap<>();
}
}
|
SmartConfigMergeCodestartFileStrategyHandler
|
java
|
quarkusio__quarkus
|
extensions/devui/deployment-spi/src/main/java/io/quarkus/devui/spi/page/Page.java
|
{
"start": 495,
"end": 9812
}
|
class ____ {
private final String icon; // Any font awesome icon
private final String color; // The color of the link and icon
private final String tooltip; // Add a tooltip to the link
private final String title; // This is the display name and link title for the page
private final String staticLabel; // This is optional extra info that might be displayed next to the link
private final String dynamicLabel; // This is optional extra info that might be displayed next to the link. This will override above static label. This expects a jsonRPC method name
private final String streamingLabel; // This is optional extra info that might be displayed next to the link. This will override above dynamic label. This expects a jsonRPC Multi method name
private final String streamingLabelParams; // This is optional parameters (comma separated) that will be fetched from local storage and sent along with the streaming label jsonrpc request
private final String componentName; // This is name of the component
private final String componentLink; // This is a link to the component, excluding namespace
private final Map<String, String> metadata; // Key value Metadata
private final boolean embed; // if the component is embedded in the page. true in all cases except maybe external pages
private final boolean includeInMenu; // if this link should be added to the submenu. true in all cases except maybe external pages
private final boolean internalComponent; // True if this component is provided by dev-ui (usually provided by the extension)
private String namespace = null; // The namespace can be the extension path or, if internal, qwc
private String namespaceLabel = null; // When more than one page belongs to the same namespace, we use the namespace as a title sometimes
private String extensionId = null; // If this originates from an extension, then id. For internal this will be null;
private String menuActionComponent = null; // Internal pages can set this
protected Page(String icon,
String color,
String tooltip,
String title,
String staticLabel,
String dynamicLabel,
String streamingLabel,
String[] streamingLabelParams,
String componentName,
String componentLink,
Map<String, String> metadata,
boolean embed,
boolean includeInMenu,
boolean internalComponent,
String namespace,
String namespaceLabel,
String extensionId) {
if (icon != null) {
this.icon = icon;
} else {
this.icon = "font-awesome-solid:arrow-right"; // default
}
if (color != null) {
this.color = color;
} else {
this.color = "var(--lumo-contrast-80pct)"; // default
}
this.tooltip = tooltip;
this.title = title;
this.staticLabel = staticLabel;
this.dynamicLabel = dynamicLabel;
this.streamingLabel = streamingLabel;
if (streamingLabelParams != null && streamingLabelParams.length > 0) {
this.streamingLabelParams = String.join(",", streamingLabelParams);
} else {
this.streamingLabelParams = null;
}
this.componentName = componentName;
this.componentLink = componentLink;
this.metadata = metadata;
this.embed = embed;
this.includeInMenu = includeInMenu;
this.internalComponent = internalComponent;
this.namespace = namespace;
this.namespaceLabel = namespaceLabel;
this.extensionId = extensionId;
}
public String getId() {
String id = this.title.toLowerCase().replaceAll(SPACE, DASH);
try {
id = URLEncoder.encode(id, StandardCharsets.UTF_8.toString());
} catch (UnsupportedEncodingException ex) {
throw new RuntimeException(ex);
}
if (!this.isInternal() && this.namespace != null) {
// This is extension pages in Dev UI
id = this.namespace.toLowerCase() + SLASH + id;
} else if (this.isInternal() && this.namespace != null) {
// This is internal pages in Dev UI
String d = "devui-" + id;
if (d.equals(this.namespace)) {
return id;
} else {
int i = this.namespace.indexOf(DASH) + 1;
String stripDevui = this.namespace.substring(i);
return stripDevui + DASH + id;
}
}
return id;
}
public String getComponentRef() {
if (internalComponent) {
return DOT + SLASH + DOT + DOT + SLASH + "qwc" + SLASH + this.componentLink;
} else if (this.namespace != null) {
return DOT + SLASH + DOT + DOT + SLASH + this.namespace + SLASH + this.componentLink;
}
// TODO: Create a not found component to display here ?
throw new RuntimeException("Could not find component reference");
}
public String getNamespace() {
return this.namespace;
}
public String getNamespaceLabel() {
return this.namespaceLabel;
}
public String getIcon() {
return icon;
}
public String getColor() {
return color;
}
public String getTooltip() {
return tooltip;
}
public boolean isAssistantPage() {
return this.metadata != null && this.metadata.containsKey("isAssistantPage")
&& this.metadata.get("isAssistantPage").equalsIgnoreCase("true");
}
public String getTitle() {
return title;
}
public String getStaticLabel() {
return staticLabel;
}
public String getDynamicLabel() {
return dynamicLabel;
}
public String getStreamingLabel() {
return streamingLabel;
}
public String getStreamingLabelParams() {
return streamingLabelParams;
}
public String getComponentName() {
return componentName;
}
public String getComponentLink() {
return componentLink;
}
public boolean isEmbed() {
return embed;
}
public boolean isIncludeInMenu() {
return includeInMenu;
}
public boolean isInternal() {
return this.internalComponent && this.extensionId == null;
}
public String getExtensionId() {
return extensionId;
}
public Map<String, String> getMetadata() {
return metadata;
}
public void setMenuActionComponent(String menuActionComponent) {
this.menuActionComponent = menuActionComponent;
}
public String getMenuActionComponent() {
return this.menuActionComponent;
}
@Override
public String toString() {
return "Page {\n\tid=" + getId()
+ ", \n\ticon=" + icon
+ ", \n\tcolor=" + color
+ ", \n\ttooltip=" + tooltip
+ ", \n\ttitle=" + title
+ ", \n\tstaticLabel=" + staticLabel
+ ", \n\tdynamicLabel=" + dynamicLabel
+ ", \n\tstreamingLabel=" + streamingLabel
+ ", \n\tstreamingLabelParams=" + streamingLabelParams
+ ", \n\tnamespace=" + namespace
+ ", \n\tnamespaceLabel=" + namespaceLabel
+ ", \n\tcomponentName=" + componentName
+ ", \n\tcomponentLink=" + componentLink
+ ", \n\tembed=" + embed
+ ", \n\tincludeInMenu=" + includeInMenu + "\n}";
}
/**
* Here you provide the Web Component that should be rendered. You have full control over the page.
* You can use build time data if you made it available
*/
public static WebComponentPageBuilder webComponentPageBuilder() {
return new WebComponentPageBuilder();
}
/**
* Here you provide the Web Component that should be rendered. You have full control over the page.
* You can use build time data if you made it available
*/
public static WebComponentPageBuilder assistantPageBuilder() {
return new AssistantPageBuilder();
}
/**
* Here you provide a url to an external resource. When code/markup, if can be displayed in a code view, when HTML it can
* render the HTML
*/
public static ExternalPageBuilder externalPageBuilder(String name) {
return new ExternalPageBuilder(name);
}
/**
* Here you provide the data that should be rendered in raw json format
*/
public static RawDataPageBuilder rawDataPageBuilder(String name) {
return new RawDataPageBuilder(name);
}
/**
* Here you can render the data with a qute template
*/
public static QuteDataPageBuilder quteDataPageBuilder(String name) {
return new QuteDataPageBuilder(name);
}
/**
* Here you provide the data that should be rendered in a table
*/
public static TableDataPageBuilder tableDataPageBuilder(String name) {
return new TableDataPageBuilder(name);
}
private static final String SPACE = " ";
private static final String DASH = "-";
private static final String SLASH = "/";
private static final String DOT = ".";
}
|
Page
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/TestExternal4.java
|
{
"start": 1533,
"end": 2065
}
|
class ____ extends ClassLoader {
public ExtClassLoader() throws IOException{
super(Thread.currentThread().getContextClassLoader());
{
byte[] bytes;
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream("external/VO2.clazz");
bytes = IOUtils.toByteArray(is);
is.close();
super.defineClass("external.VO2", bytes, 0, bytes.length);
}
}
}
}
|
ExtClassLoader
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-datajoin/src/main/java/org/apache/hadoop/contrib/utils/join/TaggedMapOutput.java
|
{
"start": 1446,
"end": 1835
}
|
class ____ implements Writable {
protected Text tag;
public TaggedMapOutput() {
this.tag = new Text("");
}
public Text getTag() {
return tag;
}
public void setTag(Text tag) {
this.tag = tag;
}
public abstract Writable getData();
public TaggedMapOutput clone(JobConf job) {
return (TaggedMapOutput) WritableUtils.clone(this, job);
}
}
|
TaggedMapOutput
|
java
|
quarkusio__quarkus
|
extensions/redis-cache/runtime/src/main/java/io/quarkus/cache/redis/runtime/RedisCacheImpl.java
|
{
"start": 21739,
"end": 22007
}
|
class ____<V> implements Supplier<V> {
private final V cached;
public StaticSupplier(V cached) {
this.cached = cached;
}
@Override
public V get() {
return cached;
}
}
private
|
StaticSupplier
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/ProtoShim.java
|
{
"start": 650,
"end": 1759
}
|
class ____ {
private ProtoShim() {}
//
// Core classes
//
static org.elasticsearch.core.TimeValue fromProto(TimeValue fromProto) {
if (fromProto == null) {
return null;
}
return new org.elasticsearch.core.TimeValue(fromProto.duration(), fromProto.timeUnit());
}
static TimeValue toProto(org.elasticsearch.core.TimeValue toProto) {
if (toProto == null) {
return null;
}
return new TimeValue(toProto.duration(), toProto.timeUnit());
}
//
// XContent classes
//
static org.elasticsearch.xcontent.XContentLocation fromProto(ContentLocation fromProto) {
if (fromProto == null) {
return null;
}
return new org.elasticsearch.xcontent.XContentLocation(fromProto.lineNumber, fromProto.columnNumber);
}
static ContentLocation toProto(org.elasticsearch.xcontent.XContentLocation toProto) {
if (toProto == null) {
return null;
}
return new ContentLocation(toProto.lineNumber(), toProto.columnNumber());
}
}
|
ProtoShim
|
java
|
redisson__redisson
|
redisson-spring-data/redisson-spring-data-24/src/main/java/org/redisson/spring/data/connection/RedissonReactiveZSetCommands.java
|
{
"start": 2532,
"end": 27588
}
|
class ____ extends RedissonBaseReactive implements ReactiveZSetCommands {
RedissonReactiveZSetCommands(CommandReactiveExecutor executorService) {
super(executorService);
}
private static final RedisCommand<Double> ZADD_FLOAT = new RedisCommand<>("ZADD", new DoubleNullSafeReplayConvertor());
@Override
public Flux<NumericResponse<ZAddCommand, Number>> zAdd(Publisher<ZAddCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notEmpty(command.getTuples(), "Tuples must not be empty or null!");
byte[] keyBuf = toByteArray(command.getKey());
List<Object> params = new ArrayList<Object>(command.getTuples().size()*2+1);
params.add(keyBuf);
if (command.isIncr() || command.isUpsert() || command.isReturnTotalChanged()) {
if (command.isUpsert()) {
params.add("NX");
} else {
params.add("XX");
}
if (command.isReturnTotalChanged()) {
params.add("CH");
}
if (command.isIncr()) {
params.add("INCR");
}
}
for (Tuple entry : command.getTuples()) {
params.add(BigDecimal.valueOf(entry.getScore()).toPlainString());
params.add(entry.getValue());
}
Mono<Number> m;
if (command.isIncr()) {
m = write(keyBuf, DoubleCodec.INSTANCE, ZADD_FLOAT, params.toArray());
} else {
m = write(keyBuf, StringCodec.INSTANCE, RedisCommands.ZADD, params.toArray());
}
return m.map(v -> new NumericResponse<>(command, v));
});
}
@Override
public Flux<NumericResponse<ZRemCommand, Long>> zRem(Publisher<ZRemCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getValues(), "Values must not be null!");
List<Object> args = new ArrayList<Object>(command.getValues().size() + 1);
args.add(toByteArray(command.getKey()));
args.addAll(command.getValues().stream().map(v -> toByteArray(v)).collect(Collectors.toList()));
Mono<Long> m = write((byte[])args.get(0), StringCodec.INSTANCE, RedisCommands.ZREM_LONG, args.toArray());
return m.map(v -> new NumericResponse<>(command, v));
});
}
@Override
public Flux<NumericResponse<ZIncrByCommand, Double>> zIncrBy(Publisher<ZIncrByCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getValue(), "Member must not be null!");
Assert.notNull(command.getIncrement(), "Increment value must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
byte[] valueBuf = toByteArray(command.getValue());
Mono<Double> m = write(keyBuf, DoubleCodec.INSTANCE, RedisCommands.ZINCRBY, keyBuf, new BigDecimal(command.getIncrement().doubleValue()).toPlainString(), valueBuf);
return m.map(v -> new NumericResponse<>(command, v));
});
}
@Override
public Flux<NumericResponse<ZRankCommand, Long>> zRank(Publisher<ZRankCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getValue(), "Member must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
byte[] valueBuf = toByteArray(command.getValue());
RedisCommand<Long> cmd = RedisCommands.ZRANK;
if (command.getDirection() == Direction.DESC) {
cmd = RedisCommands.ZREVRANK;
}
Mono<Long> m = read(keyBuf, DoubleCodec.INSTANCE, cmd, keyBuf, valueBuf);
return m.map(v -> new NumericResponse<>(command, v));
});
}
private static final RedisCommand<Set<Tuple>> ZRANGE_ENTRY = new RedisCommand<>("ZRANGE", new ScoredSortedSetReplayDecoder());
private static final RedisCommand<Set<Tuple>> ZRANGE_ENTRY_V2 = new RedisCommand<Set<Tuple>>("ZRANGE",
new ListMultiDecoder2(new ObjectSetReplayDecoder(), new ScoredSortedSetReplayDecoderV2()));
private static final RedisCommand<Set<Object>> ZRANGE = new RedisCommand<>("ZRANGE", new ObjectSetReplayDecoder<Object>());
private static final RedisCommand<Set<Tuple>> ZREVRANGE_ENTRY = new RedisCommand<>("ZREVRANGE", new ScoredSortedSetReplayDecoder());
private static final RedisCommand<Set<Tuple>> ZREVRANGE_ENTRY_V2 = new RedisCommand("ZREVRANGE",
new ListMultiDecoder2(new ObjectSetReplayDecoder(), new ScoredSortedSetReplayDecoderV2()));
private static final RedisCommand<Set<Object>> ZREVRANGE = new RedisCommand<>("ZREVRANGE", new ObjectSetReplayDecoder<Object>());
@Override
public Flux<CommandResponse<ZRangeCommand, Flux<Tuple>>> zRange(Publisher<ZRangeCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getRange(), "Range must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
long start = command.getRange().getLowerBound().getValue().orElse(0L);
long end = command.getRange().getUpperBound().getValue().get();
Flux<Tuple> flux;
if (command.getDirection() == Direction.ASC) {
if (command.isWithScores()) {
RedisCommand<Set<Tuple>> cmd = ZRANGE_ENTRY;
if (executorService.getServiceManager().isResp3()) {
cmd = ZRANGE_ENTRY_V2;
}
Mono<Set<Tuple>> m = read(keyBuf, ByteArrayCodec.INSTANCE, cmd,
keyBuf, start, end, "WITHSCORES");
flux = m.flatMapMany(e -> Flux.fromIterable(e));
} else {
Mono<Set<byte[]>> m = read(keyBuf, ByteArrayCodec.INSTANCE, ZRANGE, keyBuf, start, end);
flux = m.flatMapMany(e -> Flux.fromIterable(e).map(b -> new DefaultTuple(b, Double.NaN)));
}
} else {
if (command.isWithScores()) {
RedisCommand<Set<Tuple>> cmd = ZREVRANGE_ENTRY;
if (executorService.getServiceManager().isResp3()) {
cmd = ZREVRANGE_ENTRY_V2;
}
Mono<Set<Tuple>> m = read(keyBuf, ByteArrayCodec.INSTANCE, cmd,
keyBuf, start, end, "WITHSCORES");
flux = m.flatMapMany(e -> Flux.fromIterable(e));
} else {
Mono<Set<byte[]>> m = read(keyBuf, ByteArrayCodec.INSTANCE, ZREVRANGE, keyBuf, start, end);
flux = m.flatMapMany(e -> Flux.fromIterable(e).map(b -> new DefaultTuple(b, Double.NaN)));
}
}
return Mono.just(new CommandResponse<>(command, flux));
});
}
private static final RedisCommand<Set<Tuple>> ZRANGEBYSCORE = new RedisCommand<Set<Tuple>>("ZRANGEBYSCORE", new ScoredSortedSetReplayDecoder());
private static final RedisCommand<Set<Tuple>> ZRANGEBYSCORE_V2 = new RedisCommand<Set<Tuple>>("ZRANGEBYSCORE",
new ListMultiDecoder2(new ObjectSetReplayDecoder(), new ScoredSortedSetReplayDecoderV2()));
private static final RedisCommand<Set<Tuple>> ZREVRANGEBYSCORE = new RedisCommand<Set<Tuple>>("ZREVRANGEBYSCORE", new ScoredSortedSetReplayDecoder());
private static final RedisCommand<Set<Tuple>> ZREVRANGEBYSCORE_V2 = new RedisCommand<Set<Tuple>>("ZREVRANGEBYSCORE",
new ListMultiDecoder2(new ObjectSetReplayDecoder(), new ScoredSortedSetReplayDecoderV2()));
@Override
public Flux<CommandResponse<ZRangeByScoreCommand, Flux<Tuple>>> zRangeByScore(
Publisher<ZRangeByScoreCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getRange(), "Range must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
String start = toLowerBound(command.getRange());
String end = toUpperBound(command.getRange());
List<Object> args = new ArrayList<Object>();
args.add(keyBuf);
if (command.getDirection() == Direction.ASC) {
args.add(start);
} else {
args.add(end);
}
if (command.getDirection() == Direction.ASC) {
args.add(end);
} else {
args.add(start);
}
if (command.isWithScores()) {
args.add("WITHSCORES");
}
if (command.getLimit().isPresent() && !command.getLimit().get().isUnlimited()) {
args.add("LIMIT");
args.add(command.getLimit().get().getOffset());
args.add(command.getLimit().get().getCount());
}
Flux<Tuple> flux;
if (command.getDirection() == Direction.ASC) {
if (command.isWithScores()) {
RedisCommand<Set<Tuple>> cmd = ZRANGEBYSCORE;
if (executorService.getServiceManager().isResp3()) {
cmd = ZRANGEBYSCORE_V2;
}
Mono<Set<Tuple>> m = read(keyBuf, ByteArrayCodec.INSTANCE, cmd, args.toArray());
flux = m.flatMapMany(e -> Flux.fromIterable(e));
} else {
Mono<Set<byte[]>> m = read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.ZRANGEBYSCORE, args.toArray());
flux = m.flatMapMany(e -> Flux.fromIterable(e).map(b -> new DefaultTuple(b, Double.NaN)));
}
} else {
if (command.isWithScores()) {
RedisCommand<Set<Tuple>> cmd = ZREVRANGEBYSCORE;
if (executorService.getServiceManager().isResp3()) {
cmd = ZREVRANGEBYSCORE_V2;
}
Mono<Set<Tuple>> m = read(keyBuf, ByteArrayCodec.INSTANCE, cmd, args.toArray());
flux = m.flatMapMany(e -> Flux.fromIterable(e));
} else {
Mono<Set<byte[]>> m = read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.ZREVRANGEBYSCORE, args.toArray());
flux = m.flatMapMany(e -> Flux.fromIterable(e).map(b -> new DefaultTuple(b, Double.NaN)));
}
}
return Mono.just(new CommandResponse<>(command, flux));
});
}
private static final RedisCommand<ListScanResult<Tuple>> ZSCAN = new RedisCommand<>("ZSCAN", new ListMultiDecoder2(new ScoredSortedSetScanDecoder<Object>(), new ScoredSortedSetScanReplayDecoder()));
@Override
public Flux<CommandResponse<KeyCommand, Flux<Tuple>>> zScan(Publisher<KeyScanCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getOptions(), "ScanOptions must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Flux<Tuple> flux = Flux.create(new SetReactiveIterator<Tuple>() {
@Override
protected RFuture<ScanResult<Object>> scanIterator(RedisClient client, String nextIterPos) {
if (command.getOptions().getPattern() == null) {
return executorService.readAsync(client, keyBuf, ByteArrayCodec.INSTANCE, ZSCAN,
keyBuf, nextIterPos, "COUNT", Optional.ofNullable(command.getOptions().getCount()).orElse(10L));
}
return executorService.readAsync(client, keyBuf, ByteArrayCodec.INSTANCE, ZSCAN,
keyBuf, nextIterPos, "MATCH", command.getOptions().getPattern(),
"COUNT", Optional.ofNullable(command.getOptions().getCount()).orElse(10L));
}
});
return Mono.just(new CommandResponse<>(command, flux));
});
}
private static final RedisStrictCommand<Long> ZCOUNT = new RedisStrictCommand<Long>("ZCOUNT");
String toLowerBound(Range range) {
StringBuilder s = new StringBuilder();
if (!range.getLowerBound().isInclusive()) {
s.append("(");
}
if (!range.getLowerBound().getValue().isPresent() || range.getLowerBound().getValue().get().toString().isEmpty()) {
s.append("-inf");
} else {
s.append(range.getLowerBound().getValue().get());
}
return s.toString();
}
String toUpperBound(Range range) {
StringBuilder s = new StringBuilder();
if (!range.getUpperBound().isInclusive()) {
s.append("(");
}
if (!range.getUpperBound().getValue().isPresent() || range.getUpperBound().getValue().get().toString().isEmpty()) {
s.append("+inf");
} else {
s.append(range.getUpperBound().getValue().get());
}
return s.toString();
}
String toLexLowerBound(Range range, Object defaultValue) {
StringBuilder s = new StringBuilder();
if (range.getLowerBound().isInclusive()) {
s.append("[");
} else {
s.append("(");
}
if (!range.getLowerBound().getValue().isPresent() || range.getLowerBound().getValue().get().toString().isEmpty()) {
s.append(defaultValue);
} else {
s.append(range.getLowerBound().getValue().get());
}
return s.toString();
}
String toLexUpperBound(Range range, Object defaultValue) {
StringBuilder s = new StringBuilder();
if (range.getUpperBound().isInclusive()) {
s.append("[");
} else {
s.append("(");
}
if (!range.getUpperBound().getValue().isPresent() || range.getUpperBound().getValue().get().toString().isEmpty()) {
s.append(defaultValue);
} else {
s.append(range.getUpperBound().getValue().get());
}
return s.toString();
}
@Override
public Flux<NumericResponse<ZCountCommand, Long>> zCount(Publisher<ZCountCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getRange(), "Range must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Long> m = read(keyBuf, StringCodec.INSTANCE, ZCOUNT,
keyBuf, toLowerBound(command.getRange()),
toUpperBound(command.getRange()));
return m.map(v -> new NumericResponse<>(command, v));
});
}
@Override
public Flux<NumericResponse<KeyCommand, Long>> zCard(Publisher<KeyCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Long> m = read(keyBuf, StringCodec.INSTANCE, RedisCommands.ZCARD, keyBuf);
return m.map(v -> new NumericResponse<>(command, v));
});
}
@Override
public Flux<NumericResponse<ZScoreCommand, Double>> zScore(Publisher<ZScoreCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getValue(), "Value must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
byte[] valueBuf = toByteArray(command.getValue());
Mono<Double> m = read(keyBuf, StringCodec.INSTANCE, RedisCommands.ZSCORE, keyBuf, valueBuf);
return m.map(v -> new NumericResponse<>(command, v));
});
}
private static final RedisStrictCommand<Long> ZREMRANGEBYRANK = new RedisStrictCommand<Long>("ZREMRANGEBYRANK");
@Override
public Flux<NumericResponse<ZRemRangeByRankCommand, Long>> zRemRangeByRank(
Publisher<ZRemRangeByRankCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getRange(), "Range must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Long> m = write(keyBuf, StringCodec.INSTANCE, ZREMRANGEBYRANK,
keyBuf, command.getRange().getLowerBound().getValue().orElse(0L),
command.getRange().getUpperBound().getValue().get());
return m.map(v -> new NumericResponse<>(command, v));
});
}
private static final RedisStrictCommand<Long> ZREMRANGEBYSCORE = new RedisStrictCommand<Long>("ZREMRANGEBYSCORE");
@Override
public Flux<NumericResponse<ZRemRangeByScoreCommand, Long>> zRemRangeByScore(
Publisher<ZRemRangeByScoreCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getRange(), "Range must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
Mono<Long> m = write(keyBuf, StringCodec.INSTANCE, ZREMRANGEBYSCORE,
keyBuf, toLowerBound(command.getRange()),
toUpperBound(command.getRange()));
return m.map(v -> new NumericResponse<>(command, v));
});
}
private static final RedisStrictCommand<Long> ZUNIONSTORE = new RedisStrictCommand<Long>("ZUNIONSTORE");
@Override
public Flux<NumericResponse<ZUnionStoreCommand, Long>> zUnionStore(Publisher<ZUnionStoreCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Destination key must not be null!");
Assert.notEmpty(command.getSourceKeys(), "Source keys must not be null or empty!");
byte[] keyBuf = toByteArray(command.getKey());
List<Object> args = new ArrayList<Object>(command.getSourceKeys().size() * 2 + 5);
args.add(keyBuf);
args.add(command.getSourceKeys().size());
args.addAll(command.getSourceKeys().stream().map(e -> toByteArray(e)).collect(Collectors.toList()));
if (!command.getWeights().isEmpty()) {
args.add("WEIGHTS");
for (Double weight : command.getWeights()) {
args.add(BigDecimal.valueOf(weight).toPlainString());
}
}
if (command.getAggregateFunction().isPresent()) {
args.add("AGGREGATE");
args.add(command.getAggregateFunction().get().name());
}
Mono<Long> m = write(keyBuf, LongCodec.INSTANCE, ZUNIONSTORE, args.toArray());
return m.map(v -> new NumericResponse<>(command, v));
});
}
private static final RedisStrictCommand<Long> ZINTERSTORE = new RedisStrictCommand<Long>("ZINTERSTORE");
@Override
public Flux<NumericResponse<ZInterStoreCommand, Long>> zInterStore(Publisher<ZInterStoreCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Destination key must not be null!");
Assert.notEmpty(command.getSourceKeys(), "Source keys must not be null or empty!");
byte[] keyBuf = toByteArray(command.getKey());
List<Object> args = new ArrayList<Object>(command.getSourceKeys().size() * 2 + 5);
args.add(keyBuf);
args.add(command.getSourceKeys().size());
args.addAll(command.getSourceKeys().stream().map(e -> toByteArray(e)).collect(Collectors.toList()));
if (!command.getWeights().isEmpty()) {
args.add("WEIGHTS");
for (Double weight : command.getWeights()) {
args.add(BigDecimal.valueOf(weight).toPlainString());
}
}
if (command.getAggregateFunction().isPresent()) {
args.add("AGGREGATE");
args.add(command.getAggregateFunction().get().name());
}
Mono<Long> m = write(keyBuf, LongCodec.INSTANCE, ZINTERSTORE, args.toArray());
return m.map(v -> new NumericResponse<>(command, v));
});
}
private static final RedisCommand<Set<Object>> ZRANGEBYLEX = new RedisCommand<Set<Object>>("ZRANGEBYLEX", new ObjectSetReplayDecoder<Object>());
private static final RedisCommand<Set<Object>> ZREVRANGEBYLEX = new RedisCommand<Set<Object>>("ZREVRANGEBYLEX", new ObjectSetReplayDecoder<Object>());
@Override
public Flux<CommandResponse<ZRangeByLexCommand, Flux<ByteBuffer>>> zRangeByLex(
Publisher<ZRangeByLexCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getRange(), "Range must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
String start = null;
String end = null;
if (command.getDirection() == Direction.ASC) {
start = toLexLowerBound(command.getRange(), "-");
end = toLexUpperBound(command.getRange(), "+");
} else {
start = toLexUpperBound(command.getRange(), "-");
end = toLexLowerBound(command.getRange(), "+");
}
Mono<Set<byte[]>> m;
if (!command.getLimit().isUnlimited()) {
if (command.getDirection() == Direction.ASC) {
m = read(keyBuf, ByteArrayCodec.INSTANCE, ZRANGEBYLEX,
keyBuf, start, end, "LIMIT", command.getLimit().getOffset(), command.getLimit().getCount());
} else {
m = read(keyBuf, ByteArrayCodec.INSTANCE, ZREVRANGEBYLEX,
keyBuf, start, end, "LIMIT", command.getLimit().getOffset(), command.getLimit().getCount());
}
} else {
if (command.getDirection() == Direction.ASC) {
m = read(keyBuf, ByteArrayCodec.INSTANCE, ZRANGEBYLEX,
keyBuf, start, end);
} else {
m = read(keyBuf, ByteArrayCodec.INSTANCE, ZREVRANGEBYLEX,
keyBuf, start, end);
}
}
Flux<ByteBuffer> flux = m.flatMapMany(e -> Flux.fromIterable(e).map(v -> ByteBuffer.wrap(v)));
return Mono.just(new CommandResponse<>(command, flux));
});
}
public Flux<NumericResponse<ReactiveListCommands.LPosCommand, Long>> lPos(Publisher<ReactiveListCommands.LPosCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getElement(), "Element must not be null!");
List<Object> params = new ArrayList<Object>();
byte[] keyBuf = toByteArray(command.getKey());
params.add(keyBuf);
params.add(toByteArray(command.getElement()));
if (command.getRank() != null) {
params.add("RANK");
params.add(command.getRank());
}
if (command.getCount() != null) {
params.add("COUNT");
params.add(command.getCount());
}
Mono<Long> m = read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.LPOS, params.toArray());
return m.map(v -> new NumericResponse<>(command, v));
});
}
@Override
public Flux<ReactiveRedisConnection.NumericResponse<ZLexCountCommand, Long>> zLexCount(Publisher<ZLexCountCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getRange(), "Range must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
String start = toLexLowerBound(command.getRange(), "-");
String end = toLexUpperBound(command.getRange(), "+");
Mono<Long> m = read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.ZLEXCOUNT, keyBuf, start, end);
return m.map(v -> new NumericResponse<>(command, v));
});
}
}
|
RedissonReactiveZSetCommands
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java
|
{
"start": 3104,
"end": 8324
}
|
class ____<B extends Bucket> extends AbstractList<InternalAggregations> {
private final List<B> buckets;
public BucketAggregationList(List<B> buckets) {
this.buckets = buckets;
}
@Override
public InternalAggregations get(int index) {
return buckets.get(index).getAggregations();
}
@Override
public int size() {
return buckets.size();
}
}
@Override
public abstract List<B> getBuckets();
@Override
public Object getProperty(List<String> path) {
if (path.isEmpty()) {
return this;
}
return resolvePropertyFromPath(path, getBuckets(), getName());
}
static Object resolvePropertyFromPath(List<String> path, List<? extends InternalBucket> buckets, String name) {
String aggName = path.get(0);
if (aggName.equals("_bucket_count")) {
return buckets.size();
}
// This is a bucket key, look through our buckets and see if we can find a match
if (aggName.startsWith("'") && aggName.endsWith("'")) {
for (InternalBucket bucket : buckets) {
if (bucket.getKeyAsString().equals(aggName.substring(1, aggName.length() - 1))) {
return bucket.getProperty(name, path.subList(1, path.size()));
}
}
// No key match, time to give up
throw new InvalidAggregationPathException("Cannot find an key [" + aggName + "] in [" + name + "]");
}
Object[] propertyArray = new Object[buckets.size()];
for (int i = 0; i < buckets.size(); i++) {
propertyArray[i] = buckets.get(i).getProperty(name, path);
}
return propertyArray;
}
/**
* Counts the number of inner buckets inside the provided {@link InternalBucket}
*/
public static int countInnerBucket(InternalBucket bucket) {
int count = 0;
for (Aggregation agg : bucket.getAggregations()) {
count += countInnerBucket(agg);
}
return count;
}
/**
* Counts the number of inner buckets inside the provided {@link Aggregation}
*/
public static int countInnerBucket(Aggregation agg) {
int size = 0;
if (agg instanceof MultiBucketsAggregation multi) {
for (MultiBucketsAggregation.Bucket bucket : multi.getBuckets()) {
++size;
for (Aggregation bucketAgg : bucket.getAggregations()) {
size += countInnerBucket(bucketAgg);
}
}
} else if (agg instanceof SingleBucketAggregation single) {
for (Aggregation bucketAgg : single.getAggregations()) {
size += countInnerBucket(bucketAgg);
}
}
return size;
}
/**
* A multi-bucket agg needs to first reduce the buckets and *their* pipelines
* before allowing sibling pipelines to materialize.
*/
@Override
public final InternalAggregation reducePipelines(
InternalAggregation reducedAggs,
AggregationReduceContext reduceContext,
PipelineTree pipelineTree
) {
assert reduceContext.isFinalReduce();
InternalAggregation reduced = this;
if (pipelineTree.hasSubTrees()) {
List<B> materializedBuckets = reducePipelineBuckets(reduceContext, pipelineTree);
reduced = create(materializedBuckets);
}
return super.reducePipelines(reduced, reduceContext, pipelineTree);
}
@Override
public InternalAggregation copyWithRewritenBuckets(Function<InternalAggregations, InternalAggregations> rewriter) {
boolean modified = false;
List<B> newBuckets = new ArrayList<>();
for (B bucket : getBuckets()) {
InternalAggregations rewritten = rewriter.apply(bucket.getAggregations());
if (rewritten == null) {
newBuckets.add(bucket);
continue;
}
modified = true;
B newBucket = createBucket(rewritten, bucket);
newBuckets.add(newBucket);
}
return modified ? create(newBuckets) : this;
}
@Override
protected boolean mustReduceOnSingleInternalAgg() {
return true;
}
@Override
public void forEachBucket(Consumer<InternalAggregations> consumer) {
for (B bucket : getBuckets()) {
consumer.accept(bucket.getAggregations());
}
}
private List<B> reducePipelineBuckets(AggregationReduceContext reduceContext, PipelineTree pipelineTree) {
List<B> reducedBuckets = new ArrayList<>();
for (B bucket : getBuckets()) {
List<InternalAggregation> aggs = new ArrayList<>();
for (InternalAggregation agg : bucket.getAggregations()) {
PipelineTree subTree = pipelineTree.subTree(agg.getName());
aggs.add(agg.reducePipelines(agg, reduceContext, subTree));
}
reducedBuckets.add(createBucket(InternalAggregations.from(aggs), bucket));
}
return reducedBuckets;
}
public abstract static
|
BucketAggregationList
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/ValueAnnotationsDeserTest.java
|
{
"start": 5478,
"end": 5741
}
|
class ____
{
List<?> _list;
@JsonDeserialize(contentAs=StringWrapper.class)
public void setList(List<?> l) {
_list = l;
}
}
// for [databind#2553]
@SuppressWarnings("rawtypes")
static
|
ListContentHolder
|
java
|
spring-projects__spring-boot
|
module/spring-boot-liquibase/src/main/java/org/springframework/boot/liquibase/autoconfigure/LiquibaseProperties.java
|
{
"start": 8642,
"end": 9011
}
|
enum ____ {
/**
* Do not show a summary.
*/
OFF,
/**
* Show a summary.
*/
SUMMARY,
/**
* Show a verbose summary.
*/
VERBOSE
}
/**
* Enumeration of destinations to which the summary should be output. Values are the
* same as those on {@link UpdateSummaryOutputEnum}. To maximize backwards
* compatibility, the Liquibase
|
ShowSummary
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/join/AttributeJoinWithNaturalJoinedInheritanceTest.java
|
{
"start": 5309,
"end": 5639
}
|
class ____ extends BaseClass {
@Column(unique = true)
private Integer uk;
public ChildEntityA() {
}
public ChildEntityA(Integer id) {
super( id );
this.uk = id;
}
public Integer getUk() {
return uk;
}
}
@Entity( name = "SubChildEntityA1" )
@DiscriminatorValue( "child_a_1" )
public static
|
ChildEntityA
|
java
|
micronaut-projects__micronaut-core
|
http/src/main/java/io/micronaut/http/ssl/PemParser.java
|
{
"start": 11522,
"end": 15207
}
|
class ____ {
final byte[] data;
final int limit;
int i;
DerInput(byte[] data) {
this(data, 0, data.length);
}
private DerInput(byte[] data, int start, int limit) {
this.data = data;
this.i = start;
this.limit = limit;
}
/**
* Read a single byte.
*
* @return The byte value
*/
byte read() {
if (i >= limit) {
throw invalidDer();
}
return data[i++];
}
/**
* Read a single byte, throwing an exception if it does not match the given value.
*
* @param value The expected byte
*/
void expect(int value) {
if ((read() & 0xff) != value) {
throw invalidDer();
}
}
/**
* Read a DER tag length.
*
* @return The length
*/
private int readLength() {
byte b = read();
if (b >= 0) {
return b;
}
b &= 0x7f;
// this is not as strict as it should be for DER, so don't use this for data where
// parsing differentials could be a problem
int length = 0;
while (b-- > 0) {
length <<= 8;
length |= read() & 0xff;
if (length < 0 || length > limit - i) {
throw invalidDer();
}
}
return length;
}
/**
* Get the type of the next tag.
*
* @return The next tag
*/
int peekTag() {
int tag = read() & 0xff;
i--;
return tag;
}
/**
* Read a DER value. This {@link DerInput} will continue after the value, while the
* returned {@link DerInput} will read the value contents.
*
* @param tag The expected tag
* @return The reader for the value contents
*/
DerInput readValue(int tag) {
expect(tag);
int n = readLength();
int end = i + n;
DerInput sequence = new DerInput(data, i, end);
i = end;
return sequence;
}
/**
* Read a DER sequence.
*
* @return The reader for the sequence content
*/
DerInput readSequence() {
return readValue(0x30);
}
String readOid() {
DerInput helper = readValue(0x06);
StringBuilder builder = new StringBuilder();
while (helper.i < helper.limit) {
long value = 0;
while (true) {
byte b = helper.read();
value <<= 7;
value |= b & 0x7f;
if (b >= 0) {
break;
}
}
if (builder.isEmpty()) {
// first value
if (value >= 80) {
builder.append("2.").append(value - 80);
} else {
builder.append(value / 40).append('.').append(value % 40);
}
} else {
builder.append('.').append(value);
}
}
return builder.toString();
}
private static RuntimeException invalidDer() {
return new IllegalArgumentException("Invalid PKCS#8");
}
}
/**
* Writer for DER documents.
*/
private static final
|
DerInput
|
java
|
apache__rocketmq
|
broker/src/main/java/org/apache/rocketmq/broker/config/v2/ConfigStorage.java
|
{
"start": 2178,
"end": 7639
}
|
class ____ extends AbstractRocksDBStorage {
public static final String DATA_VERSION_KEY = "data_version";
public static final byte[] DATA_VERSION_KEY_BYTES = DATA_VERSION_KEY.getBytes(StandardCharsets.UTF_8);
private final ScheduledExecutorService scheduledExecutorService;
/**
* Number of write ops since previous flush.
*/
private final AtomicInteger writeOpsCounter;
private final AtomicLong estimateWalFileSize = new AtomicLong(0L);
private final MessageStoreConfig messageStoreConfig;
private final FlushSyncService flushSyncService;
public ConfigStorage(MessageStoreConfig messageStoreConfig) {
super(messageStoreConfig.getStorePathRootDir() + File.separator + "config" + File.separator + "rdb");
this.messageStoreConfig = messageStoreConfig;
ThreadFactory threadFactory = new ThreadFactoryBuilder()
.setDaemon(true)
.setNameFormat("config-storage-%d")
.build();
scheduledExecutorService = new ScheduledThreadPoolExecutor(1, threadFactory);
writeOpsCounter = new AtomicInteger(0);
this.flushSyncService = new FlushSyncService();
this.flushSyncService.setDaemon(true);
}
private void statNettyMemory() {
PooledByteBufAllocatorMetric metric = AbstractRocksDBStorage.POOLED_ALLOCATOR.metric();
LOGGER.info("Netty Memory Usage: {}", metric);
}
@Override
public synchronized boolean start() {
boolean started = super.start();
if (started) {
scheduledExecutorService.scheduleWithFixedDelay(() -> statRocksdb(LOGGER), 1, 10, TimeUnit.SECONDS);
scheduledExecutorService.scheduleWithFixedDelay(this::statNettyMemory, 10, 10, TimeUnit.SECONDS);
this.flushSyncService.start();
} else {
LOGGER.error("Failed to start config storage");
}
return started;
}
@Override
protected boolean postLoad() {
if (!PlatformDependent.hasUnsafe()) {
LOGGER.error("Unsafe not available and POOLED_ALLOCATOR cannot work correctly");
return false;
}
try {
UtilAll.ensureDirOK(this.dbPath);
initOptions();
List<ColumnFamilyDescriptor> cfDescriptors = new ArrayList<>();
ColumnFamilyOptions defaultOptions = ConfigHelper.createConfigColumnFamilyOptions();
this.cfOptions.add(defaultOptions);
cfDescriptors.add(new ColumnFamilyDescriptor(RocksDB.DEFAULT_COLUMN_FAMILY, defaultOptions));
// Start RocksDB instance
open(cfDescriptors);
this.defaultCFHandle = cfHandles.get(0);
} catch (Exception e) {
AbstractRocksDBStorage.LOGGER.error("postLoad Failed. {}", this.dbPath, e);
return false;
}
return true;
}
@Override
protected void preShutdown() {
scheduledExecutorService.shutdown();
flushSyncService.shutdown();
}
protected void initOptions() {
this.options = ConfigHelper.createConfigDBOptions();
super.initOptions();
}
@Override
protected void initAbleWalWriteOptions() {
this.ableWalWriteOptions = new WriteOptions();
// Given that fdatasync is kind of expensive, sync-WAL for every write cannot be afforded.
this.ableWalWriteOptions.setSync(false);
// We need WAL for config changes
this.ableWalWriteOptions.setDisableWAL(false);
// No fast failure on block, wait synchronously even if there is wait for the write request
this.ableWalWriteOptions.setNoSlowdown(false);
}
public byte[] get(ByteBuffer key) throws RocksDBException {
byte[] keyBytes = new byte[key.remaining()];
key.get(keyBytes);
return super.get(getDefaultCFHandle(), totalOrderReadOptions, keyBytes);
}
public void write(WriteBatch writeBatch) throws RocksDBException {
db.write(ableWalWriteOptions, writeBatch);
accountWriteOps(writeBatch.getDataSize());
}
private void accountWriteOps(long dataSize) {
writeOpsCounter.incrementAndGet();
estimateWalFileSize.addAndGet(dataSize);
}
public RocksIterator iterate(ByteBuffer beginKey, ByteBuffer endKey) {
try (ReadOptions readOptions = new ReadOptions()) {
readOptions.setTotalOrderSeek(true);
readOptions.setTailing(false);
readOptions.setAutoPrefixMode(true);
// Use DirectSlice till the follow issue is fixed:
// https://github.com/facebook/rocksdb/issues/13098
//
// readOptions.setIterateUpperBound(new DirectSlice(endKey));
byte[] buf = new byte[endKey.remaining()];
endKey.slice().get(buf);
readOptions.setIterateUpperBound(new Slice(buf));
RocksIterator iterator = db.newIterator(defaultCFHandle, readOptions);
iterator.seek(beginKey.slice());
return iterator;
}
}
/**
* RocksDB writes contain 3 stages: application memory buffer --> OS Page Cache --> Disk.
* Given that we are having DBOptions::manual_wal_flush, we need to manually call DB::FlushWAL and DB::SyncWAL
* Note: DB::FlushWAL(true) will internally call DB::SyncWAL.
* <p>
* See <a href="https://rocksdb.org/blog/2017/08/25/flushwal.html">Flush And Sync WAL</a>
*/
|
ConfigStorage
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/AsyncCorrelateRestoreTest.java
|
{
"start": 1129,
"end": 1709
}
|
class ____ extends RestoreTestBase {
public AsyncCorrelateRestoreTest() {
super(StreamExecCorrelate.class);
}
@Override
public List<TableTestProgram> programs() {
return Arrays.asList(
AsyncCorrelateTestPrograms.CORRELATE_CATALOG_FUNC,
AsyncCorrelateTestPrograms.CORRELATE_SYSTEM_FUNC,
AsyncCorrelateTestPrograms.CORRELATE_JOIN_FILTER,
AsyncCorrelateTestPrograms.CORRELATE_LEFT_JOIN,
AsyncCorrelateTestPrograms.CORRELATE_UDF_EXCEPTION);
}
}
|
AsyncCorrelateRestoreTest
|
java
|
spring-projects__spring-boot
|
integration-test/spring-boot-test-integration-tests/src/test/java/org/springframework/boot/web/server/test/SpringBootTestReactiveWebEnvironmentDefinedPortTests.java
|
{
"start": 1411,
"end": 1624
}
|
class ____
extends AbstractSpringBootTestEmbeddedReactiveWebEnvironmentTests {
@Configuration(proxyBeanMethods = false)
@EnableWebFlux
@RestController
static
|
SpringBootTestReactiveWebEnvironmentDefinedPortTests
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/Assertions_assertThat_with_AssertProvider_Test.java
|
{
"start": 1057,
"end": 1455
}
|
class ____ {
private TestedObject object;
@BeforeEach
void setup() {
object = new TestedObject("Test");
}
@Test
void should_allow_assert_provider_within_assertThat() {
assertThat(object).containsText("es");
}
@Test
void should_use_assert_provider_directly() {
object.assertThat().containsText("es");
}
private static
|
Assertions_assertThat_with_AssertProvider_Test
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java
|
{
"start": 4726,
"end": 28691
}
|
class ____ {
public static final Setting<Integer> CACHE_MAX_APPLICATIONS_SETTING = Setting.intSetting(
"xpack.security.authz.store.privileges.cache.max_size",
10_000,
Setting.Property.NodeScope
);
public static final Setting<TimeValue> CACHE_TTL_SETTING = Setting.timeSetting(
"xpack.security.authz.store.privileges.cache.ttl",
TimeValue.timeValueHours(24L),
Setting.Property.NodeScope
);
/**
* Determines how long get privileges calls will wait for an available security index.
* The default value of 0 bypasses all waiting-related logic entirely.
*/
private static final TimeValue SECURITY_INDEX_WAIT_TIMEOUT = TimeValue.parseTimeValue(
System.getProperty("es.security.security_index.wait_timeout", null),
TimeValue.ZERO,
"system property <es.security.security_index.wait_timeout>"
);
private static final Collector<Tuple<String, String>, ?, Map<String, List<String>>> TUPLES_TO_MAP = Collectors.toMap(
Tuple::v1,
t -> CollectionUtils.newSingletonArrayList(t.v2()),
(a, b) -> {
a.addAll(b);
return a;
}
);
private static final Logger logger = LogManager.getLogger(NativePrivilegeStore.class);
private final Settings settings;
private final Client client;
private final SecurityIndexManager securityIndexManager;
private volatile boolean allowExpensiveQueries;
private final DescriptorsAndApplicationNamesCache descriptorsAndApplicationNamesCache;
public NativePrivilegeStore(
Settings settings,
Client client,
SecurityIndexManager securityIndexManager,
CacheInvalidatorRegistry cacheInvalidatorRegistry,
ClusterService clusterService
) {
this.settings = settings;
this.client = client;
this.securityIndexManager = securityIndexManager;
this.allowExpensiveQueries = ALLOW_EXPENSIVE_QUERIES.get(settings);
clusterService.getClusterSettings().addSettingsUpdateConsumer(ALLOW_EXPENSIVE_QUERIES, this::setAllowExpensiveQueries);
final TimeValue ttl = CACHE_TTL_SETTING.get(settings);
if (ttl.getNanos() > 0) {
descriptorsAndApplicationNamesCache = new DescriptorsAndApplicationNamesCache(
ttl,
CACHE_MAX_APPLICATIONS_SETTING.get(settings)
);
cacheInvalidatorRegistry.registerCacheInvalidator("application_privileges", descriptorsAndApplicationNamesCache);
} else {
descriptorsAndApplicationNamesCache = null;
}
}
public void getPrivileges(
Collection<String> applications,
Collection<String> names,
ActionListener<Collection<ApplicationPrivilegeDescriptor>> listener
) {
// timeout of 0 means skip wait attempt entirely
final boolean waitForAvailableSecurityIndex = false == SECURITY_INDEX_WAIT_TIMEOUT.equals(TimeValue.ZERO);
getPrivileges(applications, names, waitForAvailableSecurityIndex, listener);
}
public void getPrivileges(
Collection<String> applications,
Collection<String> names,
boolean waitForAvailableSecurityIndex,
ActionListener<Collection<ApplicationPrivilegeDescriptor>> listener
) {
if (false == isEmpty(names) && names.stream().noneMatch(ApplicationPrivilege::isValidPrivilegeName)) {
logger.debug("no concrete privilege, only action patterns [{}], returning no application privilege descriptors", names);
listener.onResponse(Collections.emptySet());
return;
}
final Set<String> applicationNamesCacheKey = (isEmpty(applications) || applications.contains("*"))
? Set.of("*")
: Set.copyOf(applications);
// Always fetch for the concrete application names even when the passed-in application names has no wildcard.
// This serves as a negative lookup, i.e. when a passed-in non-wildcard application does not exist.
Set<String> concreteApplicationNames = descriptorsAndApplicationNamesCache == null
? null
: descriptorsAndApplicationNamesCache.getConcreteApplicationNames(applicationNamesCacheKey);
if (concreteApplicationNames != null && concreteApplicationNames.isEmpty()) {
logger.debug(
"returning empty application privileges for [{}] as application names result in empty list",
applicationNamesCacheKey
);
listener.onResponse(Collections.emptySet());
} else {
final Set<ApplicationPrivilegeDescriptor> cachedDescriptors = cachedDescriptorsForApplicationNames(
concreteApplicationNames != null ? concreteApplicationNames : applicationNamesCacheKey
);
if (cachedDescriptors != null) {
logger.debug("All application privileges for [{}] found in cache", applicationNamesCacheKey);
listener.onResponse(filterDescriptorsForPrivilegeNames(cachedDescriptors, names));
} else {
// Always fetch all privileges of an application for caching purpose
logger.debug("Fetching application privilege documents for: {}", applicationNamesCacheKey);
final long invalidationCount = descriptorsAndApplicationNamesCache == null
? -1
: descriptorsAndApplicationNamesCache.getInvalidationCount();
innerGetPrivileges(applicationNamesCacheKey, waitForAvailableSecurityIndex, ActionListener.wrap(fetchedDescriptors -> {
final Map<String, Set<ApplicationPrivilegeDescriptor>> mapOfFetchedDescriptors = fetchedDescriptors.stream()
.collect(Collectors.groupingBy(ApplicationPrivilegeDescriptor::getApplication, Collectors.toUnmodifiableSet()));
if (invalidationCount != -1) {
cacheFetchedDescriptors(applicationNamesCacheKey, mapOfFetchedDescriptors, invalidationCount);
}
listener.onResponse(filterDescriptorsForPrivilegeNames(fetchedDescriptors, names));
}, listener::onFailure));
}
}
}
private void innerGetPrivileges(
Collection<String> applications,
boolean waitForAvailableSecurityIndex,
ActionListener<Collection<ApplicationPrivilegeDescriptor>> listener
) {
assert applications != null && applications.size() > 0 : "Application names are required (found " + applications + ")";
final IndexState projectSecurityIndex = securityIndexManager.forCurrentProject();
if (projectSecurityIndex.indexExists() == false) {
listener.onResponse(Collections.emptyList());
} else if (projectSecurityIndex.isAvailable(SEARCH_SHARDS) == false) {
final ElasticsearchException unavailableReason = projectSecurityIndex.getUnavailableReason(SEARCH_SHARDS);
if (false == waitForAvailableSecurityIndex || false == unavailableReason instanceof UnavailableShardsException) {
listener.onFailure(unavailableReason);
return;
}
projectSecurityIndex.onIndexAvailableForSearch(new ActionListener<>() {
@Override
public void onResponse(Void unused) {
innerGetPrivileges(applications, false, listener);
}
@Override
public void onFailure(Exception e) {
logger.warn("Failure while waiting for security index [" + projectSecurityIndex.getConcreteIndexName() + "]", e);
// Call get privileges once more to get most up-to-date failure (or result, in case of an unlucky time-out)
innerGetPrivileges(applications, false, listener);
}
}, SECURITY_INDEX_WAIT_TIMEOUT);
} else {
projectSecurityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> {
final TermQueryBuilder typeQuery = QueryBuilders.termQuery(
ApplicationPrivilegeDescriptor.Fields.TYPE.getPreferredName(),
DOC_TYPE_VALUE
);
final Tuple<QueryBuilder, Predicate<String>> applicationNameQueryAndPredicate = getApplicationNameQueryAndPredicate(
applications
);
final QueryBuilder query;
if (applicationNameQueryAndPredicate.v1() != null) {
query = QueryBuilders.boolQuery().filter(typeQuery).filter(applicationNameQueryAndPredicate.v1());
} else {
query = QueryBuilders.boolQuery().filter(typeQuery);
}
final Supplier<ThreadContext.StoredContext> supplier = client.threadPool().getThreadContext().newRestorableContext(false);
try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashWithOrigin(SECURITY_ORIGIN)) {
SearchRequest request = client.prepareSearch(SECURITY_MAIN_ALIAS)
.setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings))
.setQuery(query)
.setSize(1000)
.setFetchSource(true)
.request();
logger.trace(() -> format("Searching for [%s] privileges with query [%s]", applications, Strings.toString(query)));
ScrollHelper.fetchAllByEntity(
client,
request,
new ContextPreservingActionListener<>(supplier, listener),
hit -> buildPrivilege(hit.getId(), hit.getSourceRef(), applicationNameQueryAndPredicate.v2())
);
}
});
}
}
public SecurityIndexManager getSecurityIndexManager() {
return securityIndexManager;
}
private Tuple<QueryBuilder, Predicate<String>> getApplicationNameQueryAndPredicate(Collection<String> applications) {
if (applications.contains("*")) {
return new Tuple<>(QueryBuilders.existsQuery(APPLICATION.getPreferredName()), null);
}
final List<String> rawNames = new ArrayList<>(applications.size());
final List<String> wildcardNames = new ArrayList<>(applications.size());
for (String name : applications) {
if (name.endsWith("*")) {
wildcardNames.add(name);
} else {
rawNames.add(name);
}
}
assert rawNames.isEmpty() == false || wildcardNames.isEmpty() == false;
TermsQueryBuilder termsQuery = rawNames.isEmpty() ? null : QueryBuilders.termsQuery(APPLICATION.getPreferredName(), rawNames);
if (wildcardNames.isEmpty()) {
return new Tuple<>(termsQuery, null);
}
if (allowExpensiveQueries) {
final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
if (termsQuery != null) {
boolQuery.should(termsQuery);
}
for (String wildcard : wildcardNames) {
final String prefix = wildcard.substring(0, wildcard.length() - 1);
boolQuery.should(QueryBuilders.prefixQuery(APPLICATION.getPreferredName(), prefix));
}
boolQuery.minimumShouldMatch(1);
return new Tuple<>(boolQuery, null);
} else {
logger.trace("expensive queries are not allowed, switching to filtering application names in memory");
return new Tuple<>(null, StringMatcher.of(applications));
}
}
private void setAllowExpensiveQueries(boolean allowExpensiveQueries) {
this.allowExpensiveQueries = allowExpensiveQueries;
}
private static ApplicationPrivilegeDescriptor buildPrivilege(
String docId,
BytesReference source,
@Nullable Predicate<String> applicationNamePredicate
) {
logger.trace("Building privilege from [{}] [{}]", docId, source == null ? "<<null>>" : source.utf8ToString());
if (source == null) {
return null;
}
final Tuple<String, String> name = nameFromDocId(docId);
if (applicationNamePredicate != null && false == applicationNamePredicate.test(name.v1())) {
return null;
}
try {
// EMPTY is safe here because we never use namedObject
try (
XContentParser parser = XContentHelper.createParserNotCompressed(
LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG,
source,
XContentType.JSON
)
) {
final ApplicationPrivilegeDescriptor privilege = ApplicationPrivilegeDescriptor.parse(parser, null, null, true);
assert privilege.getApplication().equals(name.v1())
: "Incorrect application name for privilege. Expected [" + name.v1() + "] but was " + privilege.getApplication();
assert privilege.getName().equals(name.v2())
: "Incorrect name for application privilege. Expected [" + name.v2() + "] but was " + privilege.getName();
return privilege;
}
} catch (IOException | XContentParseException e) {
logger.error(() -> "cannot parse application privilege [" + name + "]", e);
return null;
}
}
/**
* Try resolve all privileges for given application names from the cache.
* It returns non-null result only when privileges of ALL applications are
* found in the cache, i.e. it returns null if any of application name is
* NOT found in the cache. Since the cached is keyed by concrete application
* name, this means any wildcard will result in null.
*/
private Set<ApplicationPrivilegeDescriptor> cachedDescriptorsForApplicationNames(Set<String> applicationNames) {
if (descriptorsAndApplicationNamesCache == null) {
return null;
}
final Set<ApplicationPrivilegeDescriptor> cachedDescriptors = new HashSet<>();
for (String applicationName : applicationNames) {
if (applicationName.endsWith("*")) {
return null;
} else {
final Set<ApplicationPrivilegeDescriptor> descriptors = descriptorsAndApplicationNamesCache.getApplicationDescriptors(
applicationName
);
if (descriptors == null) {
return null;
} else {
cachedDescriptors.addAll(descriptors);
}
}
}
return Collections.unmodifiableSet(cachedDescriptors);
}
/**
* Filter to get all privilege descriptors that have any of the given privilege names.
*/
private static Collection<ApplicationPrivilegeDescriptor> filterDescriptorsForPrivilegeNames(
Collection<ApplicationPrivilegeDescriptor> descriptors,
Collection<String> privilegeNames
) {
// empty set of names equals to retrieve everything
if (isEmpty(privilegeNames)) {
return descriptors;
}
return descriptors.stream().filter(d -> privilegeNames.contains(d.getName())).collect(Collectors.toUnmodifiableSet());
}
// protected for tests
protected void cacheFetchedDescriptors(
Set<String> applicationNamesCacheKey,
Map<String, Set<ApplicationPrivilegeDescriptor>> mapOfFetchedDescriptors,
long invalidationCount
) {
descriptorsAndApplicationNamesCache.putIfNoInvalidationSince(applicationNamesCacheKey, mapOfFetchedDescriptors, invalidationCount);
}
public void putPrivileges(
Collection<ApplicationPrivilegeDescriptor> privileges,
WriteRequest.RefreshPolicy refreshPolicy,
ActionListener<Map<String, Map<String, DocWriteResponse.Result>>> listener
) {
if (privileges.isEmpty()) {
listener.onResponse(Map.of());
return;
}
final BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
bulkRequestBuilder.setRefreshPolicy(refreshPolicy);
try {
for (ApplicationPrivilegeDescriptor privilege : privileges) {
bulkRequestBuilder.add(preparePutPrivilege(privilege));
}
} catch (IOException e) {
listener.onFailure(e);
}
securityIndexManager.forCurrentProject().prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
ClientHelper.executeAsyncWithOrigin(
client.threadPool().getThreadContext(),
SECURITY_ORIGIN,
bulkRequestBuilder.request(),
ActionListener.<BulkResponse>wrap(bulkResponse -> handleBulkResponse(bulkResponse, listener), ex -> {
logger.warn(Strings.format("Failed to write application privileges to %s", securityIndexManager.aliasName()), ex);
listener.onFailure(ex);
}),
client::bulk
);
});
}
private IndexRequest preparePutPrivilege(ApplicationPrivilegeDescriptor privilege) throws IOException {
try {
final String name = privilege.getName();
final XContentBuilder xContentBuilder = privilege.toXContent(jsonBuilder(), true);
return client.prepareIndex(SECURITY_MAIN_ALIAS)
.setId(toDocId(privilege.getApplication(), name))
.setSource(xContentBuilder)
.request();
} catch (IOException e) {
logger.warn("Failed to build application privilege {} - {}", Strings.toString(privilege), e.toString());
throw e;
}
}
private void handleBulkResponse(BulkResponse bulkResponse, ActionListener<Map<String, Map<String, DocWriteResponse.Result>>> listener) {
ElasticsearchException failure = null;
final Map<String, Map<String, DocWriteResponse.Result>> privilegeResultByAppName = new HashMap<>();
for (var item : bulkResponse.getItems()) {
if (item.isFailed()) {
if (failure == null) {
failure = new ElasticsearchException("Failed to put application privileges", item.getFailure().getCause());
} else {
failure.addSuppressed(item.getFailure().getCause());
}
} else {
final Tuple<String, String> name = nameFromDocId(item.getId());
final String appName = name.v1();
final String privilegeName = name.v2();
var privileges = privilegeResultByAppName.get(appName);
if (privileges == null) {
privileges = new HashMap<>();
privilegeResultByAppName.put(appName, privileges);
}
privileges.put(privilegeName, item.getResponse().getResult());
}
}
if (failure != null) {
listener.onFailure(failure);
} else {
clearCaches(listener, privilegeResultByAppName.keySet(), privilegeResultByAppName);
}
}
public void deletePrivileges(
String application,
Collection<String> names,
WriteRequest.RefreshPolicy refreshPolicy,
ActionListener<Map<String, List<String>>> listener
) {
final IndexState projectSecurityIndex = securityIndexManager.forCurrentProject();
if (projectSecurityIndex.indexExists() == false) {
listener.onResponse(Collections.emptyMap());
} else if (projectSecurityIndex.isAvailable(PRIMARY_SHARDS) == false) {
listener.onFailure(projectSecurityIndex.getUnavailableReason(PRIMARY_SHARDS));
} else {
projectSecurityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> {
ActionListener<DeleteResponse> groupListener = new GroupedActionListener<>(names.size(), ActionListener.wrap(responses -> {
final Map<String, List<String>> deletedNames = responses.stream()
.filter(r -> r.getResult() == DocWriteResponse.Result.DELETED)
.map(r -> r.getId())
.map(NativePrivilegeStore::nameFromDocId)
.collect(TUPLES_TO_MAP);
clearCaches(listener, Collections.singleton(application), deletedNames);
}, listener::onFailure));
for (String name : names) {
ClientHelper.executeAsyncWithOrigin(
client.threadPool().getThreadContext(),
SECURITY_ORIGIN,
client.prepareDelete(SECURITY_MAIN_ALIAS, toDocId(application, name)).setRefreshPolicy(refreshPolicy).request(),
groupListener,
client::delete
);
}
});
}
}
private <T> void clearCaches(ActionListener<T> listener, Set<String> applicationNames, T value) {
// This currently clears _all_ roles, but could be improved to clear only those roles that reference the affected application
final ClearPrivilegesCacheRequest request = new ClearPrivilegesCacheRequest().applicationNames(
applicationNames.toArray(String[]::new)
).clearRolesCache(true);
executeAsyncWithOrigin(client, SECURITY_ORIGIN, ClearPrivilegesCacheAction.INSTANCE, request, new ActionListener<>() {
@Override
public void onResponse(ClearPrivilegesCacheResponse nodes) {
listener.onResponse(value);
}
@Override
public void onFailure(Exception e) {
logger.error("unable to clear application privileges and role cache", e);
listener.onFailure(
new ElasticsearchException(
"clearing the application privileges and role cache failed, please clear the caches manually",
e
)
);
}
});
}
/**
* @return A Tuple of (application-name, privilege-name)
*/
private static Tuple<String, String> nameFromDocId(String docId) {
final String name = docId.substring(DOC_TYPE_VALUE.length() + 1);
assert name != null && name.length() > 0 : "Invalid name '" + name + "'";
final int colon = name.indexOf(':');
assert colon > 0 : "Invalid name '" + name + "' (missing colon)";
return new Tuple<>(name.substring(0, colon), name.substring(colon + 1));
}
private static String toDocId(String application, String name) {
return DOC_TYPE_VALUE + "_" + application + ":" + name;
}
private static boolean isEmpty(Collection<String> collection) {
return collection == null || collection.isEmpty();
}
// Package private for tests
DescriptorsAndApplicationNamesCache getDescriptorsAndApplicationNamesCache() {
return descriptorsAndApplicationNamesCache;
}
// Package private for tests
Cache<Set<String>, Set<String>> getApplicationNamesCache() {
return descriptorsAndApplicationNamesCache == null ? null : descriptorsAndApplicationNamesCache.applicationNamesCache;
}
// Package private for tests
Cache<String, Set<ApplicationPrivilegeDescriptor>> getDescriptorsCache() {
return descriptorsAndApplicationNamesCache == null ? null : descriptorsAndApplicationNamesCache.descriptorsCache;
}
// Package private for tests
long getNumInvalidation() {
return descriptorsAndApplicationNamesCache.getInvalidationCount();
}
static final
|
NativePrivilegeStore
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
|
{
"start": 35911,
"end": 36336
}
|
class ____ extends org.apache.hadoop.ipc.Server {
boolean verbose;
private static final Pattern COMPLEX_SERVER_NAME_PATTERN =
Pattern.compile("(?:[^\\$]*\\$)*([A-Za-z][^\\$]+)(?:\\$\\d+)?");
/**
* Get a meaningful and short name for a server based on a java class.
*
* The rules are defined to support the current naming schema of the
* generated protobuf classes where the final
|
Server
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/time/DateTimePrinter.java
|
{
"start": 669,
"end": 984
}
|
interface ____ {
ZoneId getZone();
Locale getLocale();
DateTimePrinter withZone(ZoneId zone);
DateTimePrinter withLocale(Locale locale);
/**
* Returns the string representation of the specified {@link TemporalAccessor}
*/
String format(TemporalAccessor accessor);
}
|
DateTimePrinter
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.