language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
google__guice
|
core/test/com/google/inject/BindingTest.java
|
{
"start": 6055,
"end": 6127
}
|
class ____ {
protected ProtectedNoArg() { }
}
static
|
ProtectedNoArg
|
java
|
grpc__grpc-java
|
xds/src/main/java/io/grpc/xds/internal/security/trust/CertificateUtils.java
|
{
"start": 1015,
"end": 2237
}
|
class ____ {
public static boolean isXdsSniEnabled = GrpcUtil.getFlag("GRPC_EXPERIMENTAL_XDS_SNI", true);
public static boolean useChannelAuthorityIfNoSniApplicable
= GrpcUtil.getFlag("GRPC_USE_CHANNEL_AUTHORITY_IF_NO_SNI_APPLICABLE", false);
/**
* Generates X509Certificate array from a file on disk.
*
* @param file a {@link File} containing the cert data
*/
static X509Certificate[] toX509Certificates(File file) throws CertificateException, IOException {
try (FileInputStream fis = new FileInputStream(file);
BufferedInputStream bis = new BufferedInputStream(fis)) {
return toX509Certificates(bis);
}
}
/** Generates X509Certificate array from the {@link InputStream}. */
public static X509Certificate[] toX509Certificates(InputStream inputStream)
throws CertificateException, IOException {
return io.grpc.util.CertificateUtils.getX509Certificates(inputStream);
}
/** Generates a {@link PrivateKey} from the {@link InputStream}. */
public static PrivateKey getPrivateKey(InputStream inputStream)
throws Exception {
return io.grpc.util.CertificateUtils.getPrivateKey(inputStream);
}
private CertificateUtils() {}
}
|
CertificateUtils
|
java
|
apache__flink
|
flink-formats/flink-avro/src/test/java/org/apache/flink/formats/avro/EncoderDecoderTest.java
|
{
"start": 19436,
"end": 20167
}
|
class ____ {
private long bookId;
private String title;
private long authorId;
public Book() {}
public Book(long bookId, String title, long authorId) {
this.bookId = bookId;
this.title = title;
this.authorId = authorId;
}
@Override
public boolean equals(Object obj) {
if (obj.getClass() == Book.class) {
Book other = (Book) obj;
return other.bookId == this.bookId
&& other.authorId == this.authorId
&& this.title.equals(other.title);
} else {
return false;
}
}
}
private static
|
Book
|
java
|
apache__camel
|
core/camel-support/src/main/java/org/apache/camel/support/ChildServiceSupport.java
|
{
"start": 1151,
"end": 1252
}
|
class ____ control lifecycle for a set of child {@link org.apache.camel.Service}s.
*/
public abstract
|
to
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/prefetch/BufferData.java
|
{
"start": 1509,
"end": 1618
}
|
class ____ {
private static final Logger LOG = LoggerFactory.getLogger(BufferData.class);
public
|
BufferData
|
java
|
micronaut-projects__micronaut-core
|
core/src/main/java/io/micronaut/core/annotation/Order.java
|
{
"start": 1705,
"end": 1869
}
|
interface ____ {
/**
* The order value.
*
* Defaults to zero (no order).
*
* @return the order
*/
int value() default 0;
}
|
Order
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/statement/TidbSplitTableStatement.java
|
{
"start": 1073,
"end": 3783
}
|
class ____ extends MySqlStatementImpl {
//region for
private boolean splitSyntaxOptionRegionFor;
//partition
private boolean splitSyntaxOptionPartition;
private SQLExprTableSource tableName;
private List<SQLExpr> partitionNameListOptions = new ArrayList<>();
private SQLName indexName;
private List<List<SQLExpr>> splitOptionBys = new ArrayList<>();
private List<SQLExpr> splitOptionBetween;
private List<SQLExpr> splitOptionAnd;
private long splitOptionRegions;
public boolean isSplitSyntaxOptionRegionFor() {
return splitSyntaxOptionRegionFor;
}
public void setSplitSyntaxOptionRegionFor(boolean splitSyntaxOptionRegionFor) {
this.splitSyntaxOptionRegionFor = splitSyntaxOptionRegionFor;
}
public boolean isSplitSyntaxOptionPartition() {
return splitSyntaxOptionPartition;
}
public void setSplitSyntaxOptionPartition(boolean splitSyntaxOptionPartition) {
this.splitSyntaxOptionPartition = splitSyntaxOptionPartition;
}
public SQLExprTableSource getTableName() {
return tableName;
}
public void setTableName(SQLExprTableSource tableName) {
this.tableName = tableName;
}
public List<SQLExpr> getPartitionNameListOptions() {
return partitionNameListOptions;
}
public void setPartitionNameListOptions(List<SQLExpr> partitionNameListOptions) {
this.partitionNameListOptions = partitionNameListOptions;
}
public SQLName getIndexName() {
return indexName;
}
public void setIndexName(SQLName indexName) {
this.indexName = indexName;
}
public List<List<SQLExpr>> getSplitOptionBys() {
return splitOptionBys;
}
public void setSplitOptionBys(List<List<SQLExpr>> splitOptionBys) {
this.splitOptionBys = splitOptionBys;
}
public List<SQLExpr> getSplitOptionBetween() {
return splitOptionBetween;
}
public void setSplitOptionBetween(List<SQLExpr> splitOptionBetween) {
this.splitOptionBetween = splitOptionBetween;
}
public List<SQLExpr> getSplitOptionAnd() {
return splitOptionAnd;
}
public void setSplitOptionAnd(List<SQLExpr> splitOptionAnd) {
this.splitOptionAnd = splitOptionAnd;
}
public long getSplitOptionRegions() {
return splitOptionRegions;
}
public void setSplitOptionRegions(long splitOptionRegions) {
this.splitOptionRegions = splitOptionRegions;
}
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
this.getTableName().accept(visitor);
}
visitor.endVisit(this);
}
}
|
TidbSplitTableStatement
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jackson/src/test/java/org/springframework/boot/jackson/JacksonComponentModuleTests.java
|
{
"start": 9354,
"end": 9523
}
|
class ____ extends NameAndAgeJacksonKeyComponent.Serializer {
}
@JacksonComponent(scope = JacksonComponent.Scope.KEYS, type = NameAndAge.class)
static
|
OnlyKeySerializer
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/builder/ToStringStyleConcurrencyTest.java
|
{
"start": 1456,
"end": 1919
}
|
class ____ a registry to avoid infinite loops for objects with circular references. We
* want to make sure that we do not get concurrency exceptions accessing this registry.
* </p>
* <p>
* This test passes but only tests one aspect of the issue.
* </p>
*
* @see <a href="https://issues.apache.org/jira/browse/LANG-762">[LANG-762] Handle or document ReflectionToStringBuilder
* and ToStringBuilder for collections that are not thread safe</a>
*/
|
includes
|
java
|
google__error-prone
|
check_api/src/main/java/com/google/errorprone/matchers/Returns.java
|
{
"start": 979,
"end": 1583
}
|
class ____ implements Matcher<StatementTree> {
private final Matcher<? super ExpressionTree> returnedMatcher;
/**
* New matcher for a {@code return} statement where the returned expression is matched by the
* passed {@code returnedMatcher}.
*/
public Returns(Matcher<? super ExpressionTree> returnedMatcher) {
this.returnedMatcher = returnedMatcher;
}
@Override
public boolean matches(StatementTree expressionTree, VisitorState state) {
return expressionTree instanceof ReturnTree returnTree
&& returnedMatcher.matches(returnTree.getExpression(), state);
}
}
|
Returns
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/date/AbstractDateAssertWithDateArg_Test.java
|
{
"start": 1104,
"end": 1884
}
|
class ____ factorize DateAssert tests with a date arg (either Date or String based).
* <p/>
* For the most part, date assertion tests are (whatever the concrete date assertion method invoked is) :
* <ul>
* <li>successful assertion test with a date</li>
* <li>successful assertion test with a date as string following default date format</li>
* <li>successful assertion test with a date as string following custom date format</li>
* <li>failed assertion test when date as string does not follow the expected date format</li>
* <li>checking that DateAssert instance used for assertions is returned to allow fluent assertions chaining</li>
* </ul>
* <p/>
* Subclasses are expected to define the invoked assertion method.
*
* @author Joel Costigliola
*/
public abstract
|
that
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestValuesTableFactory.java
|
{
"start": 126428,
"end": 127933
}
|
class ____ implements SourceFunction<RowData> {
private final String dataId;
private final int sleepAfterElements;
private final long sleepTimeMillis;
private final AtomicInteger elementCtr = new AtomicInteger(0);
private volatile boolean isRunning = true;
public FromRowDataSourceFunction(
String dataId, int sleepAfterElements, long sleepTimeMillis) {
this.dataId = dataId;
this.sleepAfterElements = sleepAfterElements;
this.sleepTimeMillis = sleepTimeMillis;
}
@Override
public void run(SourceContext<RowData> ctx) throws Exception {
Collection<RowData> values =
registeredRowData.getOrDefault(dataId, Collections.emptyList());
Iterator<RowData> valueIter = values.iterator();
while (isRunning && valueIter.hasNext()) {
ctx.collect(valueIter.next());
if (elementCtr.incrementAndGet() >= sleepAfterElements && sleepTimeMillis > 0) {
try {
Thread.sleep(sleepTimeMillis);
elementCtr.set(0);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
}
}
}
@Override
public void cancel() {
isRunning = false;
}
}
}
|
FromRowDataSourceFunction
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-cache/src/main/java/smoketest/cache/Country.java
|
{
"start": 760,
"end": 1254
}
|
class ____ implements Serializable {
private final String code;
public Country(String code) {
this.code = code;
}
public String getCode() {
return this.code;
}
@Override
public boolean equals(@Nullable Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Country country = (Country) o;
return this.code.equals(country.code);
}
@Override
public int hashCode() {
return this.code.hashCode();
}
}
|
Country
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/aot/generate/ValueCodeGeneratorDelegates.java
|
{
"start": 7295,
"end": 7619
}
|
class ____ implements Delegate {
@Override
public @Nullable CodeBlock generateCode(ValueCodeGenerator codeGenerator, Object value) {
if (value instanceof String) {
return CodeBlock.of("$S", value);
}
return null;
}
}
/**
* {@link Delegate} for {@link Charset} types.
*/
private static
|
StringDelegate
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/ExtensionTest.java
|
{
"start": 48365,
"end": 48505
}
|
class ____ {",
" abstract String string();",
"",
" @AutoValue.Builder",
" abstract static
|
Baz
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/annotation/AnnotationTypeMappingsTests.java
|
{
"start": 24379,
"end": 24596
}
|
interface ____ {
@AliasFor(annotation = AliasForToOtherNonExistingAttributeTarget.class, attribute = "missing")
String test() default "";
}
@Retention(RetentionPolicy.RUNTIME)
@
|
AliasForToOtherNonExistingAttribute
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/internal/util/ReflectHelper.java
|
{
"start": 24790,
"end": 29518
}
|
interface
____ = setterOrNull( anInterface.getInterfaces(), propertyName, propertyType, likelyMethodName );
}
}
}
return setter;
}
private static Method setterOrNull(Class<?> theClass, String propertyName, Class<?> propertyType, String likelyMethodName) {
try {
return theClass.getDeclaredMethod( likelyMethodName, propertyType );
}
catch ( NoSuchMethodException e ) {
//Ignore, so we try the old method for best compatibility (even though it's less efficient) next:
}
Method potentialSetter = null;
for ( Method method : theClass.getDeclaredMethods() ) {
final String methodName = method.getName();
if ( method.getParameterCount() == 1 && methodName.startsWith( "set" ) ) {
final String testOldMethod = methodName.substring( 3 );
final String testStdMethod = decapitalize( testOldMethod );
if ( testStdMethod.equals( propertyName ) || testOldMethod.equals( propertyName ) ) {
potentialSetter = method;
if ( propertyType == null || method.getParameterTypes()[0].equals( propertyType ) ) {
break;
}
}
}
}
return potentialSetter;
}
private static String likelySetterMethodNameForProperty(final String propertyName) {
final char firstCharacter = propertyName.charAt( 0 );
return isLowerCase( firstCharacter )
? "set" + Character.toUpperCase( firstCharacter ) + propertyName.substring( 1 )
: "set" + propertyName;
}
/**
* Similar to {@link #getterMethodOrNull}, except that here we are just looking for the
* corresponding getter for a field (defined as field access) if one exists.
* <p>
* We do not look at supers, although conceivably the super could declare the method
* as an abstract - but again, that is such an edge case...
*/
public static Method findGetterMethodForFieldAccess(Field field, String propertyName) {
final var declaringClass = field.getDeclaringClass();
for ( Method method : declaringClass.getDeclaredMethods() ) {
if ( method.getParameterCount() == 0 // if the method has parameters, skip it
&& !Modifier.isStatic( method.getModifiers() )
&& method.getReturnType().isAssignableFrom( field.getType() ) ) {
final String methodName = method.getName();
// try "get"
if ( methodName.startsWith( "get" ) ) {
final String stemName = methodName.substring( 3 );
if ( stemName.equals( propertyName )
|| decapitalize( stemName ).equals( propertyName ) ) {
return method;
}
}
// if not "get", then try "is"
if ( methodName.startsWith( "is" ) ) {
final String stemName = methodName.substring( 2 );
if ( stemName.equals( propertyName )
|| decapitalize( stemName ).equals( propertyName ) ) {
return method;
}
}
}
}
if ( declaringClass.isRecord() ) {
try {
return declaringClass.getMethod( field.getName(), NO_PARAM_SIGNATURE );
}
catch (NoSuchMethodException e) {
// Ignore
}
}
return null;
}
@Deprecated(forRemoval = true)
public static boolean isRecord(Class<?> declaringClass) {
return declaringClass.isRecord();
}
public static Class<?>[] getRecordComponentTypes(Class<?> javaType) {
try {
final var recordComponents = javaType.getRecordComponents();
final var componentTypes = new Class[recordComponents.length];
for (int i = 0; i < recordComponents.length; i++ ) {
componentTypes[i] = recordComponents[i].getType();
}
return componentTypes;
}
catch (Exception e) {
throw new IllegalArgumentException(
"Could not determine the record components for: " + javaType.getName(),
e
);
}
}
public static String[] getRecordComponentNames(Class<?> javaType) {
try {
final var recordComponents = javaType.getRecordComponents();
final var componentNames = new String[recordComponents.length];
for (int i = 0; i < recordComponents.length; i++ ) {
componentNames[i] = recordComponents[i].getName();
}
return componentNames;
}
catch (Exception e) {
throw new IllegalArgumentException(
"Could not determine the record components for: " + javaType.getName(),
e
);
}
}
public static <T> Class<T> getClass(java.lang.reflect.Type type) {
if ( type == null ) {
return null;
}
else if ( type instanceof Class<?> ) {
return (Class<T>) type;
}
else if ( type instanceof ParameterizedType parameterizedType ) {
return (Class<T>) parameterizedType.getRawType();
}
else if ( type instanceof TypeVariable<?> typeVariable ) {
return getClass( typeVariable.getBounds()[0] );
}
else if ( type instanceof WildcardType wildcardType ) {
return getClass( wildcardType.getUpperBounds()[0] );
}
throw new UnsupportedOperationException( "Can't get java type
|
setter
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/core/publisher/scenarios/FluxWindowConsistencyTest.java
|
{
"start": 1233,
"end": 19520
}
|
class ____ {
Sinks.Many<Integer> sourceProcessor = Sinks.unsafe().many().multicast().directBestEffort();
Flux<Integer> source;
AssertSubscriber<Flux<Integer>> mainSubscriber;
private AtomicInteger sourceCount = new AtomicInteger();
private AtomicInteger innerCreated = new AtomicInteger();
private AtomicInteger innerCancelled = new AtomicInteger();
private AtomicInteger innerCompleted = new AtomicInteger();
private AtomicInteger innerTerminated = new AtomicInteger();
private AtomicInteger mainCancelled = new AtomicInteger();
private AtomicInteger mainCompleted = new AtomicInteger();
private AtomicInteger mainTerminated = new AtomicInteger();
@BeforeEach
public void setUp() {
source = sourceProcessor.asFlux().doOnNext(i -> sourceCount.incrementAndGet());
}
private void generate(int start, int count) {
for (int i = 0; i < count; i++) {
sourceProcessor.emitNext(i + start, FAIL_FAST);
}
}
private void generateWithCancel(int start, int countBeforeCancel, int countAfterCancel) {
generate(start, countBeforeCancel);
mainSubscriber.cancel();
generate(start + countBeforeCancel, countAfterCancel);
}
private void generateAndComplete(int start, int count) {
generate(start, count);
sourceProcessor.emitComplete(FAIL_FAST);
generate(start + count, 10);
}
private void expectWindow(int index, Predicate<? super Integer> innerCancelPredicate, List<Integer> values) {
AssertSubscriber<Integer> s = AssertSubscriber.create();
mainSubscriber.values().get(index)
.doOnCancel(() -> innerCancelled.incrementAndGet())
.doOnComplete(() -> {
innerCompleted.incrementAndGet();})
.doOnTerminate(() -> innerTerminated.incrementAndGet())
.takeWhile(innerCancelPredicate).subscribe(s);
s.assertValueSequence(values).assertNoError();
innerCreated.incrementAndGet();
}
private void subscribe(Flux<Flux<Integer>> windows) {
mainSubscriber = AssertSubscriber.create();
windows.doOnCancel(() -> mainCancelled.incrementAndGet())
.doOnComplete(() -> mainCompleted.incrementAndGet())
.doOnTerminate(() -> mainTerminated.incrementAndGet()).subscribe(mainSubscriber);
}
private void subscribeGroups(Flux<GroupedFlux<Integer, Integer>> groups) {
subscribe(groups.map(m -> m));
}
@SafeVarargs
final void verifyMainComplete(List<Integer>... lists) {
for (int i = 0; i < lists.length; i++) {
expectWindow(i, t -> true, lists[i]);
}
assertThat(innerCancelled).as("Inner cancel doesn't match").hasValue(0);
assertThat(innerCompleted).as("Inner complete doesn't match").hasValue(lists.length);
assertThat(innerTerminated).as("Inner terminate doesn't match").hasValue(lists.length);
assertThat(mainCancelled).as("Main cancel doesn't match").hasValue(0);
assertThat(mainCompleted).as("Main complete doesn't match").hasValue(1);
assertThat(mainTerminated).as("Main terminate doesn't match").hasValue(1);
assertThat(innerCompleted).as("Inner not completed").hasValue(innerCreated.get());
}
@SafeVarargs
final void verifyMainCancel(boolean isGroupBy, List<Integer>... lists) {
for (int i = 0; i < lists.length; i++) {
expectWindow(i, t -> true, lists[i]);
}
// All tests except groupBy provide sufficient data/duration for all inner windows to complete
int expectedInnerComplete = isGroupBy ? lists.length : 0;
assertThat(innerCancelled).as("Inner cancel doesn't match").hasValue(0);
assertThat(innerCompleted).as("Inner complete doesn't match").hasValue(expectedInnerComplete);
assertThat(innerTerminated).as("Inner terminate doesn't match").hasValue(expectedInnerComplete);
assertThat(mainCancelled).as("Main cancel doesn't match").hasValue(1);
assertThat(mainCompleted).as("Main complete doesn't match").hasValue(0);
assertThat(mainTerminated).as("Main terminate doesn't match").hasValue(0);
}
@SafeVarargs
final void verifyMainCancelNoNewWindow(int completedWindows, List<Integer>... lists) {
for (int i = 0; i < lists.length; i++) {
expectWindow(i, t -> true, lists[i]);
}
assertThat(innerCancelled).as("Inner cancel doesn't match").hasValue(0);
assertThat(innerCompleted).as("Inner complete doesn't match").hasValue(completedWindows);
assertThat(innerTerminated).as("Inner terminate doesn't match").hasValue(completedWindows);
assertThat(mainCancelled).as("Main cancel doesn't match").hasValue(1);
assertThat(mainCompleted).as("Main complete doesn't match").hasValue(0);
assertThat(mainTerminated).as("Main terminate doesn't match").hasValue(0);
}
@SafeVarargs
final void verifyInnerCancel(int completedWindows, Predicate<? super Integer> predicate, List<Integer>... lists) {
for (int i = 0; i < lists.length; i++) {
expectWindow(i, predicate, lists[i]);
}
assertThat(innerCancelled).as("Inner cancel doesn't match").hasValue(lists.length - completedWindows);
assertThat(innerCompleted).as("Inner complete doesn't match").hasValue(completedWindows);
assertThat(innerTerminated).as("Inner terminate doesn't match").hasValue(completedWindows);
assertThat(mainCancelled).as("Main cancel doesn't match").hasValue(1);
assertThat(mainCompleted).as("Main complete doesn't match").hasValue(0);
assertThat(mainTerminated).as("Main terminate doesn't match").hasValue(0);
}
@Test
public void windowExactComplete() throws Exception {
Flux<Flux<Integer>> windows = source.window(3, 3);
subscribe(windows);
generateAndComplete(0, 6);
verifyMainComplete(Arrays.asList(0, 1, 2), Arrays.asList(3, 4, 5));
}
@Test
public void windowSkipComplete() throws Exception {
Flux<Flux<Integer>> windows = source.window(3, 5);
subscribe(windows);
generateAndComplete(0, 10);
verifyMainComplete(Arrays.asList(0, 1, 2), Arrays.asList(5, 6, 7));
}
@Test
public void windowOverlapComplete() throws Exception {
Flux<Flux<Integer>> windows = source.window(5, 3);
subscribe(windows);
generateAndComplete(0, 5);
verifyMainComplete(Arrays.asList(0, 1, 2, 3, 4), Arrays.asList(3, 4));
}
@Test
public void windowDurationComplete() throws Exception {
Flux<Flux<Integer>> windows = source.window(Duration.ofMillis(200));
subscribe(windows);
generate(0, 3);
Thread.sleep(300);
generateAndComplete(3, 3);
verifyMainComplete(Arrays.asList(0, 1, 2), Arrays.asList(3, 4, 5));
}
@Test
public void windowTimeoutComplete() throws Exception {
Flux<Flux<Integer>> windows = source.windowTimeout(5, Duration.ofMillis(200));
subscribe(windows);
generate(0, 3);
Thread.sleep(300);
generateAndComplete(3, 3);
verifyMainComplete(Arrays.asList(0, 1, 2), Arrays.asList(3, 4, 5));
}
@Test
public void windowBoundaryComplete() throws Exception {
Sinks.Many<Integer> boundary = Sinks.unsafe().many().multicast().directBestEffort();
Flux<Flux<Integer>> windows = source.window(boundary.asFlux());
subscribe(windows);
generate(0, 3);
boundary.emitNext(1, FAIL_FAST);
generateAndComplete(3, 3);
verifyMainComplete(Arrays.asList(0, 1, 2), Arrays.asList(3, 4, 5));
}
@Test
public void windowStartEndComplete() throws Exception {
Sinks.Many<Integer> start = Sinks.unsafe().many().multicast().directBestEffort();
Sinks.Many<Integer> end1 = Sinks.unsafe().many().multicast().directBestEffort();
Sinks.Many<Integer> end2 = Sinks.unsafe().many().multicast().directBestEffort();
Flux<Flux<Integer>> windows = source.windowWhen(start.asFlux(), v -> v == 1 ? end1.asFlux() : end2.asFlux());
subscribe(windows);
start.emitNext(1, FAIL_FAST);
generate(0, 3);
end1.emitNext(1, FAIL_FAST);
start.emitNext(2, FAIL_FAST);
generateAndComplete(3, 3);
verifyMainComplete(Arrays.asList(0, 1, 2), Arrays.asList(3, 4, 5));
}
@Test
public void windowUntilComplete() throws Exception {
Flux<Flux<Integer>> windows = source.windowUntil(i -> i % 3 == 0);
subscribe(windows);
generateAndComplete(1, 5);
verifyMainComplete(Arrays.asList(1, 2, 3), Arrays.asList(4, 5));
}
@Test
public void windowWhileComplete() throws Exception {
Flux<Flux<Integer>> windows = source.windowWhile(i -> i % 3 != 0);
subscribe(windows);
generateAndComplete(1, 5);
verifyMainComplete(Arrays.asList(1, 2), Arrays.asList(4, 5));
}
@Test
public void groupByComplete() throws Exception {
Flux<GroupedFlux<Integer, Integer>> windows = source.groupBy(i -> i % 2);
subscribeGroups(windows);
generateAndComplete(0, 6);
verifyMainComplete(Arrays.asList(0, 2, 4), Arrays.asList(1, 3, 5));
}
@Test
public void windowExactMainCancel() throws Exception {
Flux<Flux<Integer>> windows = source.window(5, 5);
subscribe(windows);
generateWithCancel(0, 7, 10);
verifyMainCancel(true, Arrays.asList(0, 1, 2, 3, 4), Arrays.asList(5, 6, 7, 8, 9));
}
@Test
public void windowSkipMainCancel() throws Exception {
Flux<Flux<Integer>> windows = source.window(3, 5);
subscribe(windows);
generateWithCancel(0, 6, 10);
verifyMainCancel(true, Arrays.asList(0, 1, 2), Arrays.asList(5, 6, 7));
}
@Test
public void windowOverlapMainCancel() throws Exception {
Flux<Flux<Integer>> windows = source.window(5, 3);
subscribe(windows);
generateWithCancel(0, 4, 10);
verifyMainCancel(true, Arrays.asList(0, 1, 2, 3, 4), Arrays.asList(3, 4, 5, 6, 7));
}
@Test
public void windowDurationMainCancel() throws Exception {
Flux<Flux<Integer>> windows = source.window(Duration.ofMillis(100));
subscribe(windows);
generate(0, 2);
mainSubscriber.cancel();
generate(2, 3);
Thread.sleep(200);
generate(5, 10);
verifyMainCancel(true, Arrays.asList(0, 1, 2, 3, 4));
}
@Test
public void windowTimeoutMainCancel() throws Exception {
Flux<Flux<Integer>> windows = source.windowTimeout(10, Duration.ofMillis(100));
subscribe(windows);
generate(0, 2);
mainSubscriber.cancel();
generate(2, 3);
Thread.sleep(200);
generate(5, 10);
verifyMainCancel(true);
}
@Test
public void windowBoundaryMainCancel() throws Exception {
Sinks.Many<Integer> boundary = Sinks.unsafe().many().multicast().directBestEffort();
Flux<Flux<Integer>> windows = source.window(boundary.asFlux());
subscribe(windows);
generate(0, 3);
boundary.emitNext(1, FAIL_FAST);
generate(3, 1);
mainSubscriber.cancel();
generate(4, 2);
boundary.emitNext(1, FAIL_FAST);
generate(6, 10);
verifyMainCancel(true, Arrays.asList(0, 1, 2), Arrays.asList(3, 4, 5));
}
@Test
public void windowStartEndMainCancel() throws Exception {
Sinks.Many<Integer> start = Sinks.unsafe().many().multicast().directBestEffort();
Sinks.Many<Integer> end1 = Sinks.unsafe().many().multicast().directBestEffort();
Sinks.Many<Integer> end2 = Sinks.unsafe().many().multicast().directBestEffort();
Flux<Flux<Integer>> windows = source.windowWhen(start.asFlux(), v -> v == 1 ? end1.asFlux() : end2.asFlux());
subscribe(windows);
start.emitNext(1, FAIL_FAST);
generate(0, 3);
end1.emitNext(1, FAIL_FAST);
start.emitNext(2, FAIL_FAST);
generate(3, 1);
mainSubscriber.cancel();
generate(4, 2);
end2.emitNext(1, FAIL_FAST);
start.emitNext(3, FAIL_FAST);
generate(7, 10);
verifyMainCancel(true, Arrays.asList(0, 1, 2), Arrays.asList(3, 4, 5));
}
@Test
public void windowUntilMainCancel() throws Exception {
Flux<Flux<Integer>> windows = source.windowUntil(i -> i % 3 == 0);
subscribe(windows);
generateWithCancel(1, 4, 10);
verifyMainCancel(true, Arrays.asList(1, 2, 3), Arrays.asList(4, 5, 6));
}
@Test
public void windowWhileMainCancel() throws Exception {
Flux<Flux<Integer>> windows = source.windowWhile(i -> i % 3 != 0);
subscribe(windows);
generateWithCancel(1, 4, 10);
verifyMainCancel(true, Arrays.asList(1, 2), Arrays.asList(4, 5));
}
@Test
public void groupByMainCancel() throws Exception {
Flux<GroupedFlux<Integer, Integer>> windows = source.groupBy(i -> i % 2);
subscribeGroups(windows);
generateWithCancel(0, 5, 1);
verifyMainCancel(false, Arrays.asList(0, 2, 4), Arrays.asList(1, 3, 5));
}
@Test
public void windowExactMainCancelNoNewWindow() throws Exception {
Flux<Flux<Integer>> windows = source.window(5, 5);
subscribe(windows);
generateWithCancel(0, 10, 1);
verifyMainCancelNoNewWindow(2, Arrays.asList(0, 1, 2, 3, 4), Arrays.asList(5, 6, 7, 8, 9));
}
@Test
public void windowSkipMainCancelNoNewWindow() throws Exception {
Flux<Flux<Integer>> windows = source.window(2, 5);
subscribe(windows);
generateWithCancel(0, 5, 1);
verifyMainCancelNoNewWindow(1, Arrays.asList(0, 1));
}
@Test
public void windowOverlapMainCancelNoNewWindow() throws Exception {
Flux<Flux<Integer>> windows = source.window(5, 1);
subscribe(windows);
generateWithCancel(0, 2, 1);
verifyMainCancelNoNewWindow(0, Arrays.asList(0, 1, 2), Arrays.asList(1, 2));
}
@Test
public void windowDurationMainCancelNoNewWindow() throws Exception {
Flux<Flux<Integer>> windows = source.window(Duration.ofMillis(100));
subscribe(windows);
generate(0, 2);
mainSubscriber.cancel();
Thread.sleep(200);
generate(2, 1);
verifyMainCancelNoNewWindow(1, Arrays.asList(0, 1));
}
@Test
public void windowTimeoutMainCancelNoNewWindow() throws Exception {
Flux<Flux<Integer>> windows = source.windowTimeout(5, Duration.ofMillis(200));
subscribe(windows);
generate(0, 1);
Thread.sleep(300);
generate(1, 1);
mainSubscriber.cancel();
Thread.sleep(300);
generate(2, 1);
verifyMainCancelNoNewWindow(1, Arrays.asList(0), Arrays.asList(1));
}
@Test
public void windowBoundaryMainCancelNoNewWindow() throws Exception {
Sinks.Many<Integer> boundary = Sinks.unsafe().many().multicast().directBestEffort();
Flux<Flux<Integer>> windows = source.window(boundary.asFlux());
subscribe(windows);
generate(0, 3);
boundary.emitNext(1, FAIL_FAST);
mainSubscriber.cancel();
generate(3, 1);
verifyMainCancelNoNewWindow(1, Arrays.asList(0, 1, 2));
}
@Test
public void windowStartEndMainCancelNoNewWindow() throws Exception {
Sinks.Many<Integer> start = Sinks.unsafe().many().multicast().directBestEffort();
Sinks.Many<Integer> end1 = Sinks.unsafe().many().multicast().directBestEffort();
Sinks.Many<Integer> end2 = Sinks.unsafe().many().multicast().directBestEffort();
Flux<Flux<Integer>> windows = source.windowWhen(start.asFlux(), v -> v == 1 ? end1.asFlux() : end2.asFlux());
subscribe(windows);
start.emitNext(1, FAIL_FAST);
generate(0, 4);
end1.emitNext(1, FAIL_FAST);
start.emitNext(2, FAIL_FAST);
mainSubscriber.cancel();
generate(5, 1);
verifyMainCancelNoNewWindow(1, Arrays.asList(0, 1, 2, 3));
}
@Test
public void windowUntilMainCancelNoNewWindow() throws Exception {
Flux<Flux<Integer>> windows = source.windowUntil(i -> i % 3 == 0);
subscribe(windows);
generateWithCancel(0, 4, 1);
verifyMainCancelNoNewWindow(2, Arrays.asList(0), Arrays.asList(1, 2, 3));
}
@Test
public void windowWhileMainCancelNoNewWindow() throws Exception {
Flux<Flux<Integer>> windows = source.windowWhile(i -> i % 3 != 1);
subscribe(windows);
generateWithCancel(0, 4, 1);
verifyMainCancelNoNewWindow(2, Arrays.asList(0), Arrays.asList(2, 3));
}
@Test
public void groupByMainCancelNoNewWindow() throws Exception {
Flux<GroupedFlux<Integer, Integer>> windows = source.groupBy(i -> i % 2);
subscribeGroups(windows);
generateWithCancel(0, 1, 1);
verifyMainCancelNoNewWindow(0, Arrays.asList(0));
}
@Test
public void windowExactInnerCancel() throws Exception {
Flux<Flux<Integer>> windows = source.window(5, 5);
subscribe(windows);
generateWithCancel(0, 49, 1);
verifyInnerCancel(1, i -> i != 7, Arrays.asList(0, 1, 2, 3, 4), Arrays.asList(5, 6));
}
@Test
public void windowSkipInnerCancel() throws Exception {
Flux<Flux<Integer>> windows = source.window(2, 5);
subscribe(windows);
generateWithCancel(0, 6, 1);
verifyInnerCancel(1, i -> i != 6, Arrays.asList(0, 1), Arrays.asList(5));
}
@Test
public void windowOverlapInnerCancel() throws Exception {
Flux<Flux<Integer>> windows = source.window(5, 1);
subscribe(windows);
generateWithCancel(0, 6, 1);
verifyInnerCancel(0, i -> i != 2, Arrays.asList(0, 1), Arrays.asList(1));
}
@Test
public void windowDurationInnerCancel() throws Exception {
Flux<Flux<Integer>> windows = source.window(Duration.ofMillis(5000));
subscribe(windows);
generateWithCancel(0, 6, 1);
verifyInnerCancel(0, i -> i != 2, Arrays.asList(0, 1));
}
@Test
public void windowTimeoutInnerCancel() throws Exception {
Flux<Flux<Integer>> windows = source.windowTimeout(5, Duration.ofMillis(5000));
subscribe(windows);
generateWithCancel(0, 6, 1);
verifyInnerCancel(0, i -> i != 2, Arrays.asList(0, 1));
}
@Test
public void windowBoundaryInnerCancel() throws Exception {
Sinks.Many<Integer> boundaryProcessor = Sinks.unsafe().many().multicast().directBestEffort();
Flux<Flux<Integer>> windows = source.window(boundaryProcessor.asFlux());
subscribe(windows);
generateWithCancel(0, 6, 1);
verifyInnerCancel(0, i -> i != 2, Arrays.asList(0, 1));
}
@Test
public void windowStartEndInnerCancel() throws Exception {
Sinks.Many<Integer> start = Sinks.unsafe().many().multicast().directBestEffort();
Sinks.Many<Integer> end1 = Sinks.unsafe().many().multicast().directBestEffort();
Sinks.Many<Integer> end2 = Sinks.unsafe().many().multicast().directBestEffort();
Flux<Flux<Integer>> windows = source.windowWhen(start.asFlux(), v -> v == 1 ? end1.asFlux() : end2.asFlux());
subscribe(windows);
start.emitNext(1, FAIL_FAST);
generateWithCancel(0, 6, 1);
verifyInnerCancel(0, i -> i != 2, Arrays.asList(0, 1));
}
@Test
public void windowUntilInnerCancel() throws Exception {
Flux<Flux<Integer>> windows = source.windowUntil(i -> i % 3 == 0);
subscribe(windows);
generateWithCancel(0, 6, 1);
verifyInnerCancel(1, i -> i != 3, Arrays.asList(0), Arrays.asList(1, 2));
}
@Test
public void windowWhileInnerCancel() throws Exception {
Flux<Flux<Integer>> windows = source.windowWhile(i -> i % 3 != 1);
subscribe(windows);
generateWithCancel(0, 6, 1);
verifyInnerCancel(1, i -> i != 3, Arrays.asList(0), Arrays.asList(2));
}
@Test
public void groupByInnerCancel() throws Exception {
Flux<GroupedFlux<Integer, Integer>> windows = source.groupBy(i -> i % 2);
subscribeGroups(windows);
generateWithCancel(0, 9, 1);
verifyInnerCancel(0, i -> i < 6, Arrays.asList(0, 2, 4), Arrays.asList(1, 3, 5));
}
}
|
FluxWindowConsistencyTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/ActiveRepositoryVerifyIntegrityTasks.java
|
{
"start": 646,
"end": 2480
}
|
class ____ {
private final Map<Long, RepositoryVerifyIntegrityResponseStream> responseStreamsByCoordinatingTaskId = ConcurrentCollections
.newConcurrentMap();
Releasable registerResponseBuilder(long coordinatingTaskId, RepositoryVerifyIntegrityResponseStream responseStream) {
assert responseStream.hasReferences(); // ref held until the REST-layer listener is completed
final var previous = responseStreamsByCoordinatingTaskId.putIfAbsent(coordinatingTaskId, responseStream);
if (previous != null) {
final var exception = new IllegalStateException("already executing verify task [" + coordinatingTaskId + "]");
assert false : exception;
throw exception;
}
return Releasables.assertOnce(() -> {
final var removed = responseStreamsByCoordinatingTaskId.remove(coordinatingTaskId, responseStream);
if (removed == false) {
final var exception = new IllegalStateException("already completed verify task [" + coordinatingTaskId + "]");
assert false : exception;
throw exception;
}
});
}
/**
* Obtain the response stream for the given coordinating-node task ID, and increment its refcount.
* @throws ResourceNotFoundException if the task is not running or its refcount already reached zero (likely because it completed)
*/
RepositoryVerifyIntegrityResponseStream acquireResponseStream(long taskId) {
final var outerRequest = responseStreamsByCoordinatingTaskId.get(taskId);
if (outerRequest == null || outerRequest.tryIncRef() == false) {
throw new ResourceNotFoundException("verify task [" + taskId + "] not found");
}
return outerRequest;
}
}
|
ActiveRepositoryVerifyIntegrityTasks
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java
|
{
"start": 1273,
"end": 4663
}
|
class ____ extends AbstractMapWritable
implements Map<Writable, Writable> {
private Map<Writable, Writable> instance;
/** Default constructor. */
public MapWritable() {
super();
this.instance = new HashMap<Writable, Writable>();
}
/**
* Copy constructor.
*
* @param other the map to copy from
*/
public MapWritable(MapWritable other) {
this();
copy(other);
}
@Override
public void clear() {
instance.clear();
}
@Override
public boolean containsKey(Object key) {
return instance.containsKey(key);
}
@Override
public boolean containsValue(Object value) {
return instance.containsValue(value);
}
@Override
public Set<Map.Entry<Writable, Writable>> entrySet() {
return instance.entrySet();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof MapWritable) {
MapWritable map = (MapWritable) obj;
if (size() != map.size()) {
return false;
}
return entrySet().equals(map.entrySet());
}
return false;
}
@Override
public Writable get(Object key) {
return instance.get(key);
}
@Override
public int hashCode() {
return 1 + this.instance.hashCode();
}
@Override
public boolean isEmpty() {
return instance.isEmpty();
}
@Override
public Set<Writable> keySet() {
return instance.keySet();
}
@Override
public Writable put(Writable key, Writable value) {
addToMap(key.getClass());
addToMap(value.getClass());
return instance.put(key, value);
}
@Override
public void putAll(Map<? extends Writable, ? extends Writable> t) {
for (Map.Entry<? extends Writable, ? extends Writable> e: t.entrySet()) {
put(e.getKey(), e.getValue());
}
}
@Override
public Writable remove(Object key) {
return instance.remove(key);
}
@Override
public int size() {
return instance.size();
}
@Override
public Collection<Writable> values() {
return instance.values();
}
// Writable
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
// Write out the number of entries in the map
out.writeInt(instance.size());
// Then write out each key/value pair
for (Map.Entry<Writable, Writable> e: instance.entrySet()) {
out.writeByte(getId(e.getKey().getClass()));
e.getKey().write(out);
out.writeByte(getId(e.getValue().getClass()));
e.getValue().write(out);
}
}
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
// First clear the map. Otherwise we will just accumulate
// entries every time this method is called.
this.instance.clear();
// Read the number of entries in the map
int entries = in.readInt();
// Then read each key/value pair
for (int i = 0; i < entries; i++) {
Writable key = (Writable) ReflectionUtils.newInstance(getClass(
in.readByte()), getConf());
key.readFields(in);
Writable value = (Writable) ReflectionUtils.newInstance(getClass(
in.readByte()), getConf());
value.readFields(in);
instance.put(key, value);
}
}
@Override
public String toString() {
return instance.toString();
}
}
|
MapWritable
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/internals/KafkaFutureImpl.java
|
{
"start": 1323,
"end": 11505
}
|
class ____<T> extends KafkaFuture<T> {
private final KafkaCompletableFuture<T> completableFuture;
private final boolean isDependant;
public KafkaFutureImpl() {
this(false, new KafkaCompletableFuture<>());
}
private KafkaFutureImpl(boolean isDependant, KafkaCompletableFuture<T> completableFuture) {
this.isDependant = isDependant;
this.completableFuture = completableFuture;
}
@Override
public CompletionStage<T> toCompletionStage() {
return completableFuture;
}
/**
* Returns a new KafkaFuture that, when this future completes normally, is executed with this
* futures's result as the argument to the supplied function.
*/
@Override
public <R> KafkaFuture<R> thenApply(BaseFunction<T, R> function) {
CompletableFuture<R> appliedFuture = completableFuture.thenApply(value -> {
try {
return function.apply(value);
} catch (Throwable t) {
if (t instanceof CompletionException) {
// KafkaFuture#thenApply, when the function threw CompletionException should return
// an ExecutionException wrapping a CompletionException wrapping the exception thrown by the
// function. CompletableFuture#thenApply will just return ExecutionException wrapping the
// exception thrown by the function, so we add an extra CompletionException here to
// maintain the KafkaFuture behaviour.
throw new CompletionException(t);
} else {
throw t;
}
}
});
return new KafkaFutureImpl<>(true, toKafkaCompletableFuture(appliedFuture));
}
private static <U> KafkaCompletableFuture<U> toKafkaCompletableFuture(CompletableFuture<U> completableFuture) {
if (completableFuture instanceof KafkaCompletableFuture) {
return (KafkaCompletableFuture<U>) completableFuture;
} else {
final KafkaCompletableFuture<U> result = new KafkaCompletableFuture<>();
completableFuture.whenComplete((x, y) -> {
if (y != null) {
result.kafkaCompleteExceptionally(y);
} else {
result.kafkaComplete(x);
}
});
return result;
}
}
@Override
public KafkaFuture<T> whenComplete(final BiConsumer<? super T, ? super Throwable> biConsumer) {
CompletableFuture<T> tCompletableFuture = completableFuture.whenComplete((java.util.function.BiConsumer<? super T, ? super Throwable>) (a, b) -> {
try {
biConsumer.accept(a, b);
} catch (Throwable t) {
if (t instanceof CompletionException) {
throw new CompletionException(t);
} else {
throw t;
}
}
});
return new KafkaFutureImpl<>(true, toKafkaCompletableFuture(tCompletableFuture));
}
@Override
public boolean complete(T newValue) {
return completableFuture.kafkaComplete(newValue);
}
@Override
public boolean completeExceptionally(Throwable newException) {
// CompletableFuture#get() always wraps the _cause_ of a CompletionException in ExecutionException
// (which KafkaFuture does not) so wrap CompletionException in an extra one to avoid losing the
// first CompletionException in the exception chain.
return completableFuture.kafkaCompleteExceptionally(
newException instanceof CompletionException ? new CompletionException(newException) : newException);
}
/**
* If not already completed, completes this future with a CancellationException. Dependent
* futures that have not already completed will also complete exceptionally, with a
* CompletionException caused by this CancellationException.
*/
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return completableFuture.cancel(mayInterruptIfRunning);
}
/**
* We need to deal with differences between KafkaFuture's historic API and the API of CompletableFuture:
* CompletableFuture#get() does not wrap CancellationException in ExecutionException (nor does KafkaFuture).
* CompletableFuture#get() always wraps the _cause_ of a CompletionException in ExecutionException
* (which KafkaFuture does not).
*
* The semantics for KafkaFuture are that all exceptional completions of the future (via #completeExceptionally()
* or exceptions from dependants) manifest as ExecutionException, as observed via both get() and getNow().
*/
private void maybeThrowCancellationException(Throwable cause) {
if (cause instanceof CancellationException) {
throw (CancellationException) cause;
}
}
/**
* Waits if necessary for this future to complete, and then returns its result.
*/
@Override
public T get() throws InterruptedException, ExecutionException {
try {
return completableFuture.get();
// In Java 23, When a CompletableFuture is cancelled, get() will throw a CancellationException wrapping a
// CancellationException, thus we need to unwrap it to maintain the KafkaFuture behaviour.
// see https://bugs.openjdk.org/browse/JDK-8331987
} catch (ExecutionException | CancellationException e) {
maybeThrowCancellationException(e.getCause());
throw e;
}
}
/**
* Waits if necessary for at most the given time for this future to complete, and then returns
* its result, if available.
*/
@Override
public T get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException,
TimeoutException {
try {
return completableFuture.get(timeout, unit);
// In Java 23, When a CompletableFuture is cancelled, get() will throw a CancellationException wrapping a
// CancellationException, thus we need to unwrap it to maintain the KafkaFuture behaviour.
// see https://bugs.openjdk.org/browse/JDK-8331987
} catch (ExecutionException | CancellationException e) {
maybeThrowCancellationException(e.getCause());
throw e;
}
}
/**
* Returns the result value (or throws any encountered exception) if completed, else returns
* the given valueIfAbsent.
*/
@Override
public T getNow(T valueIfAbsent) throws ExecutionException {
try {
return completableFuture.getNow(valueIfAbsent);
} catch (CancellationException e) {
// In Java 23, When a CompletableFuture is cancelled, getNow() will throw a CancellationException wrapping a
// CancellationException. whereas in Java < 23, it throws a CompletionException directly.
// see https://bugs.openjdk.org/browse/JDK-8331987
if (e.getCause() instanceof CancellationException) {
throw (CancellationException) e.getCause();
} else {
throw e;
}
} catch (CompletionException e) {
maybeThrowCancellationException(e.getCause());
// Note, unlike CompletableFuture#get() which throws ExecutionException, CompletableFuture#getNow()
// throws CompletionException, thus needs rewrapping to conform to KafkaFuture API,
// where KafkaFuture#getNow() throws ExecutionException.
throw new ExecutionException(e.getCause());
}
}
/**
* Returns true if this CompletableFuture was cancelled before it completed normally.
*/
@Override
public boolean isCancelled() {
if (isDependant) {
// Having isCancelled() for a dependent future just return
// CompletableFuture.isCancelled() would break the historical KafkaFuture behaviour because
// CompletableFuture#isCancelled() just checks for the exception being CancellationException
// whereas it will be a CompletionException wrapping a CancellationException
// due needing to compensate for CompletableFuture's CompletionException unwrapping
// shenanigans in other methods.
try {
completableFuture.getNow(null);
return false;
} catch (Exception e) {
return e instanceof CompletionException
&& e.getCause() instanceof CancellationException;
}
} else {
return completableFuture.isCancelled();
}
}
/**
* Returns true if this CompletableFuture completed exceptionally, in any way.
*/
@Override
public boolean isCompletedExceptionally() {
return completableFuture.isCompletedExceptionally();
}
/**
* Returns true if completed in any fashion: normally, exceptionally, or via cancellation.
*/
@Override
public boolean isDone() {
return completableFuture.isDone();
}
@Override
public String toString() {
T value = null;
Throwable exception = null;
try {
value = completableFuture.getNow(null);
} catch (CancellationException e) {
// In Java 23, When a CompletableFuture is cancelled, getNow() will throw a CancellationException wrapping a
// CancellationException. whereas in Java < 23, it throws a CompletionException directly.
// see https://bugs.openjdk.org/browse/JDK-8331987
if (e.getCause() instanceof CancellationException) {
exception = e.getCause();
} else {
exception = e;
}
} catch (CompletionException e) {
exception = e.getCause();
} catch (Exception e) {
exception = e;
}
return String.format("KafkaFuture{value=%s,exception=%s,done=%b}", value, exception, exception != null || value != null);
}
}
|
KafkaFutureImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/distinct/SelectDistinctHqlTest.java
|
{
"start": 3738,
"end": 4048
}
|
class ____ {
@Id
private Long id;
@OneToMany(mappedBy = "person", cascade = CascadeType.ALL, orphanRemoval = true)
private List<Phone> phones = new ArrayList<>();
public void addPhone(Phone phone) {
phones.add( phone );
phone.person = this;
}
}
@Entity(name = "Phone")
public static
|
Person
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/providers/serialisers/FileBodyHandler.java
|
{
"start": 688,
"end": 2732
}
|
class ____ implements MessageBodyReader<File>, MessageBodyWriter<File> {
public static final String PREFIX = "pfx";
public static final String SUFFIX = "sfx";
@Override
public boolean isReadable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return File.class == type;
}
@Override
public File readFrom(Class<File> type, Type genericType,
Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, String> httpHeaders, InputStream entityStream) throws IOException {
return doRead(httpHeaders, entityStream, Files.createTempFile(PREFIX, SUFFIX).toFile());
}
public static File doRead(MultivaluedMap<String, String> httpHeaders, InputStream entityStream,
File file) throws IOException {
if (HeaderUtil.isContentLengthZero(httpHeaders)) {
return file;
}
try (OutputStream output = new BufferedOutputStream(new FileOutputStream(file))) {
entityStream.transferTo(output);
}
return file;
}
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return File.class.isAssignableFrom(type);
}
public void writeTo(File uploadFile, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders,
OutputStream entityStream) throws IOException {
httpHeaders.add(HttpHeaders.CONTENT_LENGTH, String.valueOf(uploadFile.length()));
doWrite(uploadFile, entityStream);
}
protected void doWrite(File uploadFile, OutputStream out) throws IOException {
try (InputStream inputStream = new BufferedInputStream(new FileInputStream(uploadFile))) {
int read;
final byte[] buf = new byte[2048];
while ((read = inputStream.read(buf)) != -1) {
out.write(buf, 0, read);
}
}
}
}
|
FileBodyHandler
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/state/filesystem/FsCheckpointStateOutputStreamTest.java
|
{
"start": 3314,
"end": 21585
}
|
class ____ {
@Parameters(name = "relativePaths = {0}")
public static List<Boolean> parameters() {
return Arrays.asList(true, false);
}
@Parameter public boolean relativePaths;
@TempDir private java.nio.file.Path tempDir;
@TestTemplate
void testWrongParameters() throws Exception {
// this should fail
assertThatThrownBy(
() ->
new FsCheckpointStreamFactory.FsCheckpointStateOutputStream(
Path.fromLocalFile(TempDirUtils.newFolder(tempDir)),
FileSystem.getLocalFileSystem(),
4000,
5000,
relativePaths))
.isInstanceOf(IllegalArgumentException.class);
}
@TestTemplate
void testEmptyState() throws Exception {
CheckpointStateOutputStream stream =
new FsCheckpointStreamFactory.FsCheckpointStateOutputStream(
Path.fromLocalFile(TempDirUtils.newFolder(tempDir)),
FileSystem.getLocalFileSystem(),
1024,
512,
relativePaths);
StreamStateHandle handle = stream.closeAndGetHandle();
assertThat(handle).isNull();
}
@TestTemplate
void testStateBelowMemThreshold() throws Exception {
runTest(999, 1024, 1000, false);
}
@TestTemplate
void testStateOneBufferAboveThreshold() throws Exception {
runTest(896, 1024, 15, true);
}
@TestTemplate
void testStateAboveMemThreshold() throws Exception {
runTest(576446, 259, 17, true);
}
@TestTemplate
void testZeroThreshold() throws Exception {
runTest(16678, 4096, 0, true);
}
@TestTemplate
void testGetPos() throws Exception {
CheckpointStateOutputStream stream =
new FsCheckpointStreamFactory.FsCheckpointStateOutputStream(
Path.fromLocalFile(TempDirUtils.newFolder(tempDir)),
FileSystem.getLocalFileSystem(),
31,
17,
relativePaths);
for (int i = 0; i < 64; ++i) {
assertThat(stream.getPos()).isEqualTo(i);
stream.write(0x42);
}
stream.closeAndGetHandle();
// ----------------------------------------------------
stream =
new FsCheckpointStreamFactory.FsCheckpointStateOutputStream(
Path.fromLocalFile(TempDirUtils.newFolder(tempDir)),
FileSystem.getLocalFileSystem(),
31,
17,
relativePaths);
byte[] data = "testme!".getBytes(ConfigConstants.DEFAULT_CHARSET);
for (int i = 0; i < 7; ++i) {
assertThat(stream.getPos()).isEqualTo(i * (1L + data.length));
stream.write(0x42);
stream.write(data);
}
stream.closeAndGetHandle();
}
/** Tests that the underlying stream file is deleted upon calling close. */
@TestTemplate
void testCleanupWhenClosingStream() throws IOException {
final FileSystem fs = mock(FileSystem.class);
final FSDataOutputStream outputStream = mock(FSDataOutputStream.class);
final ArgumentCaptor<Path> pathCaptor = ArgumentCaptor.forClass(Path.class);
when(fs.create(pathCaptor.capture(), any(FileSystem.WriteMode.class)))
.thenReturn(outputStream);
CheckpointStateOutputStream stream =
new FsCheckpointStreamFactory.FsCheckpointStateOutputStream(
Path.fromLocalFile(TempDirUtils.newFolder(tempDir)),
fs,
4,
0,
relativePaths);
// this should create the underlying file stream
stream.write(new byte[] {1, 2, 3, 4, 5});
verify(fs).create(any(Path.class), any(FileSystem.WriteMode.class));
stream.close();
verify(fs).delete(eq(pathCaptor.getValue()), anyBoolean());
}
/** Tests that the underlying stream file is deleted if the closeAndGetHandle method fails. */
@TestTemplate
void testCleanupWhenFailingCloseAndGetHandle() throws IOException {
final FileSystem fs = mock(FileSystem.class);
final FSDataOutputStream outputStream = mock(FSDataOutputStream.class);
final ArgumentCaptor<Path> pathCaptor = ArgumentCaptor.forClass(Path.class);
when(fs.create(pathCaptor.capture(), any(FileSystem.WriteMode.class)))
.thenReturn(outputStream);
doThrow(new IOException("Test IOException.")).when(outputStream).close();
CheckpointStateOutputStream stream =
new FsCheckpointStreamFactory.FsCheckpointStateOutputStream(
Path.fromLocalFile(TempDirUtils.newFolder(tempDir)),
fs,
4,
0,
relativePaths);
// this should create the underlying file stream
stream.write(new byte[] {1, 2, 3, 4, 5});
verify(fs).create(any(Path.class), any(FileSystem.WriteMode.class));
assertThatThrownBy(stream::closeAndGetHandle).isInstanceOf(IOException.class);
verify(fs).delete(eq(pathCaptor.getValue()), anyBoolean());
}
private void runTest(int numBytes, int bufferSize, int threshold, boolean expectFile)
throws Exception {
CheckpointStateOutputStream stream =
new FsCheckpointStreamFactory.FsCheckpointStateOutputStream(
Path.fromLocalFile(TempDirUtils.newFolder(tempDir)),
FileSystem.getLocalFileSystem(),
bufferSize,
threshold,
relativePaths);
Random rnd = new Random();
byte[] original = new byte[numBytes];
byte[] bytes = new byte[original.length];
rnd.nextBytes(original);
System.arraycopy(original, 0, bytes, 0, original.length);
// the test writes a mixture of writing individual bytes and byte arrays
int pos = 0;
while (pos < bytes.length) {
boolean single = rnd.nextBoolean();
if (single) {
stream.write(bytes[pos++]);
} else {
int num =
rnd.nextBoolean() ? (bytes.length - pos) : rnd.nextInt(bytes.length - pos);
stream.write(bytes, pos, num);
pos += num;
}
}
StreamStateHandle handle = stream.closeAndGetHandle();
if (expectFile) {
assertThat(handle).isInstanceOf(FileStateHandle.class);
} else {
assertThat(handle).isInstanceOf(ByteStreamStateHandle.class);
}
// make sure the writing process did not alter the original byte array
assertThat(bytes).isEqualTo(original);
try (InputStream inStream = handle.openInputStream()) {
byte[] validation = new byte[bytes.length];
DataInputStream dataInputStream = new DataInputStream(inStream);
dataInputStream.readFully(validation);
assertThat(validation).isEqualTo(bytes);
}
handle.discardState();
}
@TestTemplate
void testWriteFailsFastWhenClosed() throws Exception {
FsCheckpointStateOutputStream stream =
new FsCheckpointStateOutputStream(
Path.fromLocalFile(TempDirUtils.newFolder(tempDir)),
FileSystem.getLocalFileSystem(),
1024,
512,
relativePaths);
assertThat(stream.isClosed()).isFalse();
stream.close();
assertThat(stream.isClosed()).isTrue();
assertThatThrownBy(() -> stream.write(1)).isInstanceOf(IOException.class);
assertThatThrownBy(() -> stream.write(new byte[4], 1, 2)).isInstanceOf(IOException.class);
}
@TestTemplate
void testMixedBelowAndAboveThreshold() throws Exception {
final byte[] state1 = new byte[1274673];
final byte[] state2 = new byte[1];
final byte[] state3 = new byte[0];
final byte[] state4 = new byte[177];
final Random rnd = new Random();
rnd.nextBytes(state1);
rnd.nextBytes(state2);
rnd.nextBytes(state3);
rnd.nextBytes(state4);
final File directory = TempDirUtils.newFolder(tempDir);
final Path basePath = Path.fromLocalFile(directory);
final Supplier<CheckpointStateOutputStream> factory =
() ->
new FsCheckpointStateOutputStream(
basePath, FileSystem.getLocalFileSystem(), 1024, 15, relativePaths);
CheckpointStateOutputStream stream1 = factory.get();
CheckpointStateOutputStream stream2 = factory.get();
CheckpointStateOutputStream stream3 = factory.get();
stream1.write(state1);
stream2.write(state2);
stream3.write(state3);
FileStateHandle handle1 = (FileStateHandle) stream1.closeAndGetHandle();
ByteStreamStateHandle handle2 = (ByteStreamStateHandle) stream2.closeAndGetHandle();
ByteStreamStateHandle handle3 = (ByteStreamStateHandle) stream3.closeAndGetHandle();
// use with try-with-resources
StreamStateHandle handle4;
try (CheckpointStateOutputStream stream4 = factory.get()) {
stream4.write(state4);
handle4 = stream4.closeAndGetHandle();
}
// close before accessing handle
CheckpointStateOutputStream stream5 = factory.get();
stream5.write(state4);
stream5.close();
assertThatThrownBy(stream5::closeAndGetHandle).isInstanceOf(IOException.class);
validateBytesInStream(handle1.openInputStream(), state1);
handle1.discardState();
assertThat(isDirectoryEmpty(directory)).isFalse();
ensureLocalFileDeleted(handle1.getFilePath());
validateBytesInStream(handle2.openInputStream(), state2);
handle2.discardState();
assertThat(isDirectoryEmpty(directory)).isFalse();
// nothing was written to the stream, so it will return nothing
assertThat(handle3).isNull();
assertThat(isDirectoryEmpty(directory)).isFalse();
validateBytesInStream(handle4.openInputStream(), state4);
handle4.discardState();
assertThat(isDirectoryEmpty(directory)).isTrue();
}
// ------------------------------------------------------------------------
// Not deleting parent directories
// ------------------------------------------------------------------------
/**
* This test checks that the stream does not check and clean the parent directory when
* encountering a write error.
*/
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@TestTemplate
void testStreamDoesNotTryToCleanUpParentOnError() throws Exception {
final File directory = TempDirUtils.newFolder(tempDir);
// prevent creation of files in that directory
// this operation does not work reliably on Windows, so we use an "assume" to skip the test
// is this prerequisite operation is not supported.
assumeThat(directory.setWritable(false, true)).isTrue();
checkDirectoryNotWritable(directory);
FileSystem fs = spy(FileSystem.getLocalFileSystem());
FsCheckpointStateOutputStream stream1 =
new FsCheckpointStateOutputStream(
Path.fromLocalFile(directory), fs, 1024, 1, relativePaths);
FsCheckpointStateOutputStream stream2 =
new FsCheckpointStateOutputStream(
Path.fromLocalFile(directory), fs, 1024, 1, relativePaths);
stream1.write(new byte[61]);
stream2.write(new byte[61]);
assertThatThrownBy(stream1::closeAndGetHandle).isInstanceOf(IOException.class);
stream2.close();
// no delete call must have happened
verify(fs, times(0)).delete(any(Path.class), anyBoolean());
// the directory must still exist as a proper directory
assertThat(directory).exists();
assertThat(directory).isDirectory();
}
/**
* FLINK-28984. This test checks that the inner stream should be closed when
* FsCheckpointStateOutputStream#close() and FsCheckpointStateOutputStream#flushToFile() run
* concurrently.
*/
@TestTemplate
public void testCleanupWhenCloseableRegistryClosedBeforeCreatingStream() throws Exception {
OneShotLatch streamCreationLatch = new OneShotLatch();
OneShotLatch startCloseLatch = new OneShotLatch();
OneShotLatch endCloseLatch = new OneShotLatch();
FileSystem fs = mock(FileSystem.class);
FSDataOutputStream fsDataOutputStream = mock(FSDataOutputStream.class);
// mock the FileSystem#create method to simulate concurrency situation with
// FsCheckpointStateOutputStream#close thread
doAnswer(
invocation -> {
// make sure stream creation thread goes first
streamCreationLatch.trigger();
// wait for CloseableRegistry#close (and
// FsCheckpointStateOutputStream#close) getting to be triggered
startCloseLatch.await();
// make sure the CloseableRegistry#close cannot be completed due to
// failing to acquire lock
assertThrows(
TimeoutException.class,
() -> endCloseLatch.await(1, TimeUnit.SECONDS));
return fsDataOutputStream;
})
.when(fs)
.create(any(Path.class), any(FileSystem.WriteMode.class));
FsCheckpointStateOutputStream outputStream =
new FsCheckpointStateOutputStream(
Path.fromLocalFile(TempDirUtils.newFolder(tempDir)),
fs,
1024,
1,
relativePaths);
CompletableFuture<Void> flushFuture;
CloseableRegistry closeableRegistry = new CloseableRegistry();
closeableRegistry.registerCloseable(outputStream);
flushFuture =
CompletableFuture.runAsync(
() -> {
try {
// try to create a stream
outputStream.flushToFile();
} catch (IOException e) {
// ignore this exception because we don't want to fail the test due
// to IO issue
}
},
Executors.newSingleThreadExecutor());
// make sure stream creation thread goes first
streamCreationLatch.await();
// verify the outputStream and inner fsDataOutputStream is not closed
assertFalse(outputStream.isClosed());
verify(fsDataOutputStream, never()).close();
// start to close the outputStream (inside closeableRegistry)
startCloseLatch.trigger();
closeableRegistry.close();
// This endCloseLatch should not be triggered in time because the
// FsCheckpointStateOutputStream#close will be blocked due to failing to acquire lock
endCloseLatch.trigger();
// wait for flush completed
flushFuture.get();
// verify the outputStream and inner fsDataOutputStream is correctly closed
assertTrue(outputStream.isClosed());
verify(fsDataOutputStream).close();
}
// ------------------------------------------------------------------------
// Utilities
// ------------------------------------------------------------------------
private static void ensureLocalFileDeleted(Path path) {
URI uri = path.toUri();
if ("file".equals(uri.getScheme())) {
File file = new File(uri.getPath());
assertThat(file).withFailMessage("file not properly deleted").doesNotExist();
} else {
throw new IllegalArgumentException("not a local path");
}
}
private static boolean isDirectoryEmpty(File directory) {
if (!directory.exists()) {
return true;
}
String[] nested = directory.list();
return nested == null || nested.length == 0;
}
private static void validateBytesInStream(InputStream is, byte[] data) throws IOException {
try {
byte[] holder = new byte[data.length];
int pos = 0;
int read;
while (pos < holder.length
&& (read = is.read(holder, pos, holder.length - pos)) != -1) {
pos += read;
}
assertThat(pos).withFailMessage("not enough data").isEqualTo(holder.length);
assertThat(is.read()).withFailMessage("too much data").isEqualTo(-1);
assertThat(holder).withFailMessage("wrong data").isEqualTo(data);
} finally {
is.close();
}
}
private static void checkDirectoryNotWritable(File directory) {
assertThatThrownBy(
() -> {
try (FileOutputStream fos =
new FileOutputStream(new File(directory, "temp"))) {
fos.write(42);
fos.flush();
}
})
.withFailMessage("this should fail when writing is properly prevented")
.isInstanceOf(IOException.class);
}
}
|
FsCheckpointStateOutputStreamTest
|
java
|
quarkusio__quarkus
|
extensions/hibernate-search-standalone-elasticsearch/deployment/src/main/java/io/quarkus/hibernate/search/standalone/elasticsearch/deployment/HibernateSearchStandaloneEnabledBuildItem.java
|
{
"start": 280,
"end": 918
}
|
class ____ extends SimpleBuildItem {
final HibernateSearchStandaloneElasticsearchMapperContext mapperContext;
private final Set<String> rootAnnotationMappedClassNames;
public HibernateSearchStandaloneEnabledBuildItem(HibernateSearchStandaloneElasticsearchMapperContext mapperContext,
Set<String> rootAnnotationMappedClassNames) {
this.mapperContext = mapperContext;
this.rootAnnotationMappedClassNames = rootAnnotationMappedClassNames;
}
public Set<String> getRootAnnotationMappedClassNames() {
return rootAnnotationMappedClassNames;
}
}
|
HibernateSearchStandaloneEnabledBuildItem
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/filter/CustomNullSerializationTest.java
|
{
"start": 1108,
"end": 1175
}
|
class ____ {
public String name = null;
}
static
|
Bean1
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/taskmanager/TaskTest.java
|
{
"start": 67981,
"end": 68719
}
|
class ____ extends AwaitLatchInvokable {
final OneShotLatch triggerLatch = new OneShotLatch();
public TriggerLatchInvokable(Environment environment) {
super(environment);
}
void trigger() {
triggerLatch.trigger();
}
void awaitTriggerLatch() {
awaitLatch.trigger();
// make sure that the interrupt call does not
// grab us out of the lock early
while (true) {
try {
triggerLatch.await();
break;
} catch (InterruptedException e) {
// fall through the loop
}
}
}
}
}
|
TriggerLatchInvokable
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/JobSnapshotUpgraderResultProcessor.java
|
{
"start": 2345,
"end": 11655
}
|
class ____ {
private static final Logger LOGGER = LogManager.getLogger(JobSnapshotUpgraderResultProcessor.class);
final CountDownLatch completionLatch = new CountDownLatch(1);
private final String jobId;
private final String snapshotId;
private final JobResultsPersister persister;
private final AutodetectProcess process;
private final JobResultsPersister.Builder bulkResultsPersister;
private final FlushListener flushListener;
private volatile boolean processKilled;
private volatile boolean failed;
public JobSnapshotUpgraderResultProcessor(
String jobId,
String snapshotId,
JobResultsPersister persister,
AutodetectProcess autodetectProcess
) {
this.jobId = Objects.requireNonNull(jobId);
this.snapshotId = Objects.requireNonNull(snapshotId);
this.persister = Objects.requireNonNull(persister);
this.process = Objects.requireNonNull(autodetectProcess);
this.bulkResultsPersister = persister.bulkPersisterBuilder(jobId, this::isAlive);
this.flushListener = new FlushListener();
}
public void process() {
// If a function call in this throws for some reason we don't want it
// to kill the results reader thread as autodetect will be blocked
// trying to write its output.
try {
readResults();
try {
if (processKilled == false) {
bulkResultsPersister.executeRequest();
}
} catch (Exception e) {
LOGGER.warn(() -> format("[%s] [%s] Error persisting model snapshot upgrade results", jobId, snapshotId), e);
}
} catch (Exception e) {
failed = true;
if (processKilled) {
// Don't log the stack trace in this case. Log just enough to hint
// that it would have been better to close jobs before shutting down,
// but we now fully expect jobs to move between nodes without doing
// all their graceful close activities.
LOGGER.warn(
"[{}] [{}] some model snapshot upgrade results not processed due to the process being killed",
jobId,
snapshotId
);
} else if (process.isProcessAliveAfterWaiting() == false) {
// Don't log the stack trace to not shadow the root cause.
LOGGER.warn(
"[{}] [{}] some model snapshot upgrade results not processed due to the termination of autodetect",
jobId,
snapshotId
);
} else {
// We should only get here if the iterator throws in which
// case parsing the autodetect output has failed.
LOGGER.error(() -> format("[%s] [%s] error parsing model snapshot upgrade output", jobId, snapshotId), e);
}
} finally {
completionLatch.countDown();
}
}
private void readResults() {
try {
Iterator<AutodetectResult> iterator = process.readAutodetectResults();
while (iterator.hasNext()) {
try {
AutodetectResult result = iterator.next();
processResult(result);
} catch (Exception e) {
if (isAlive() == false) {
throw e;
}
LOGGER.warn(() -> format("[%s] [%s] Error processing model snapshot upgrade result", jobId, snapshotId), e);
}
}
} finally {
process.consumeAndCloseOutputStream();
}
}
public void setProcessKilled() {
processKilled = true;
}
public boolean isProcessKilled() {
return processKilled;
}
private void logUnexpectedResult(String resultType) {
String msg = "[" + jobId + "] [" + snapshotId + "] unexpected result read [" + resultType + "]";
// This should never happen, but we definitely want to fail if -ea is provided (e.g. during tests)
assert true : msg;
LOGGER.info(msg);
}
void processResult(AutodetectResult result) {
if (processKilled) {
return;
}
Bucket bucket = result.getBucket();
if (bucket != null) {
logUnexpectedResult(Bucket.RESULT_TYPE_VALUE);
}
List<AnomalyRecord> records = result.getRecords();
if (records != null && records.isEmpty() == false) {
logUnexpectedResult(AnomalyRecord.RESULT_TYPE_VALUE);
}
List<Influencer> influencers = result.getInfluencers();
if (influencers != null && influencers.isEmpty() == false) {
logUnexpectedResult(Influencer.RESULT_TYPE_VALUE);
}
CategoryDefinition categoryDefinition = result.getCategoryDefinition();
if (categoryDefinition != null) {
logUnexpectedResult(CategoryDefinition.TYPE.getPreferredName());
}
CategorizerStats categorizerStats = result.getCategorizerStats();
if (categorizerStats != null) {
logUnexpectedResult(CategorizerStats.RESULT_TYPE_VALUE);
}
ModelPlot modelPlot = result.getModelPlot();
if (modelPlot != null) {
logUnexpectedResult(ModelSnapshot.TYPE.getPreferredName());
}
Annotation annotation = result.getAnnotation();
if (annotation != null) {
logUnexpectedResult(Annotation.TYPE.getPreferredName());
}
Forecast forecast = result.getForecast();
if (forecast != null) {
logUnexpectedResult(Forecast.RESULT_TYPE_VALUE);
}
ForecastRequestStats forecastRequestStats = result.getForecastRequestStats();
if (forecastRequestStats != null) {
logUnexpectedResult(ForecastRequestStats.RESULT_TYPE_VALUE);
}
ModelSizeStats modelSizeStats = result.getModelSizeStats();
if (modelSizeStats != null) {
logUnexpectedResult(ModelSizeStats.RESULT_TYPE_VALUE);
}
ModelSnapshot modelSnapshot = result.getModelSnapshot();
if (modelSnapshot != null) {
BulkResponse bulkResponse = persister.persistModelSnapshot(modelSnapshot, WriteRequest.RefreshPolicy.IMMEDIATE, this::isAlive);
assert bulkResponse.getItems().length == 1;
}
Quantiles quantiles = result.getQuantiles();
if (quantiles != null) {
logUnexpectedResult(Quantiles.TYPE.getPreferredName());
}
FlushAcknowledgement flushAcknowledgement = result.getFlushAcknowledgement();
if (flushAcknowledgement != null) {
LOGGER.debug(
() -> format(
"[%s] [%s] Flush acknowledgement parsed from output for ID %s",
jobId,
snapshotId,
flushAcknowledgement.getId()
)
);
flushListener.acknowledgeFlush(flushAcknowledgement, null);
}
}
/**
* Blocks until a flush is acknowledged or the timeout expires, whichever happens first.
*
* @param flushId the id of the flush request to wait for
* @param timeout the timeout
* @return The {@link FlushAcknowledgement} if the flush has completed or the parsing finished; {@code null} if the timeout expired
*/
@Nullable
public FlushAcknowledgement waitForFlushAcknowledgement(String flushId, Duration timeout) throws Exception {
return failed ? null : flushListener.waitForFlush(flushId, timeout);
}
public void clearAwaitingFlush(String flushId) {
flushListener.clear(flushId);
}
public void awaitCompletion() throws TimeoutException {
try {
// Although the results won't take 30 minutes to finish, the pipe won't be closed
// until the state is persisted, and that can take a while
if (completionLatch.await(MachineLearningField.STATE_PERSIST_RESTORE_TIMEOUT.getMinutes(), TimeUnit.MINUTES) == false) {
throw new TimeoutException(
"Timed out waiting for model snapshot upgrader results processor to complete for job "
+ jobId
+ " and snapshot "
+ snapshotId
);
}
// These lines ensure that the "completion" we're awaiting includes making the results searchable
persister.commitWrites(jobId, JobResultsPersister.CommitType.STATE);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOGGER.info("[{}] [{}] Interrupted waiting for model snapshot upgrade results processor to complete", jobId, snapshotId);
}
}
/**
* If failed then there was an error parsing the results that cannot be recovered from
*
* @return true if failed
*/
public boolean isFailed() {
return failed;
}
private boolean isAlive() {
if (processKilled) {
return false;
}
return process.isProcessAliveAfterWaiting();
}
}
|
JobSnapshotUpgraderResultProcessor
|
java
|
resilience4j__resilience4j
|
resilience4j-micrometer/src/main/java/io/github/resilience4j/micrometer/Observations.java
|
{
"start": 502,
"end": 606
}
|
class ____ using {@link Observation}s.
*
* @author Marcin Grzejszczak
* @since 2.0.0
*/
public final
|
for
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-textract/src/generated/java/org/apache/camel/component/aws2/textract/Textract2EndpointUriFactory.java
|
{
"start": 523,
"end": 3064
}
|
class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":label";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(22);
props.add("accessKey");
props.add("label");
props.add("lazyStartProducer");
props.add("operation");
props.add("overrideEndpoint");
props.add("pojoRequest");
props.add("profileCredentialsName");
props.add("proxyHost");
props.add("proxyPort");
props.add("proxyProtocol");
props.add("region");
props.add("s3Bucket");
props.add("s3Object");
props.add("s3ObjectVersion");
props.add("secretKey");
props.add("sessionToken");
props.add("textractClient");
props.add("trustAllCertificates");
props.add("uriEndpointOverride");
props.add("useDefaultCredentialsProvider");
props.add("useProfileCredentialsProvider");
props.add("useSessionCredentials");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
Set<String> secretProps = new HashSet<>(3);
secretProps.add("accessKey");
secretProps.add("secretKey");
secretProps.add("sessionToken");
SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "aws2-textract".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "label", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
|
Textract2EndpointUriFactory
|
java
|
grpc__grpc-java
|
examples/src/main/java/io/grpc/examples/preserialized/ByteArrayMarshaller.java
|
{
"start": 943,
"end": 1308
}
|
class ____ implements MethodDescriptor.Marshaller<byte[]> {
@Override
public byte[] parse(InputStream stream) {
try {
return ByteStreams.toByteArray(stream);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
@Override
public InputStream stream(byte[] b) {
return new ByteArrayInputStream(b);
}
}
|
ByteArrayMarshaller
|
java
|
netty__netty
|
buffer/src/main/java/io/netty/buffer/ByteBuf.java
|
{
"start": 20156,
"end": 107007
}
|
class ____ for more detailed explanation.
*/
public abstract ByteBuf discardReadBytes();
/**
* Similar to {@link ByteBuf#discardReadBytes()} except that this method might discard
* some, all, or none of read bytes depending on its internal implementation to reduce
* overall memory bandwidth consumption at the cost of potentially additional memory
* consumption.
*/
public abstract ByteBuf discardSomeReadBytes();
/**
* Expands the buffer {@link #capacity()} to make sure the number of
* {@linkplain #writableBytes() writable bytes} is equal to or greater than the
* specified value. If there are enough writable bytes in this buffer, this method
* returns with no side effect.
*
* @param minWritableBytes
* the expected minimum number of writable bytes
* @throws IndexOutOfBoundsException
* if {@link #writerIndex()} + {@code minWritableBytes} > {@link #maxCapacity()}.
* @see #capacity(int)
*/
public abstract ByteBuf ensureWritable(int minWritableBytes);
/**
* Expands the buffer {@link #capacity()} to make sure the number of
* {@linkplain #writableBytes() writable bytes} is equal to or greater than the
* specified value. Unlike {@link #ensureWritable(int)}, this method returns a status code.
*
* @param minWritableBytes
* the expected minimum number of writable bytes
* @param force
* When {@link #writerIndex()} + {@code minWritableBytes} > {@link #maxCapacity()}:
* <ul>
* <li>{@code true} - the capacity of the buffer is expanded to {@link #maxCapacity()}</li>
* <li>{@code false} - the capacity of the buffer is unchanged</li>
* </ul>
* @return {@code 0} if the buffer has enough writable bytes, and its capacity is unchanged.
* {@code 1} if the buffer does not have enough bytes, and its capacity is unchanged.
* {@code 2} if the buffer has enough writable bytes, and its capacity has been increased.
* {@code 3} if the buffer does not have enough bytes, but its capacity has been
* increased to its maximum.
*/
public abstract int ensureWritable(int minWritableBytes, boolean force);
/**
* Gets a boolean at the specified absolute (@code index) in this buffer.
* This method does not modify the {@code readerIndex} or {@code writerIndex}
* of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 1} is greater than {@code this.capacity}
*/
public abstract boolean getBoolean(int index);
/**
* Gets a byte at the specified absolute {@code index} in this buffer.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 1} is greater than {@code this.capacity}
*/
public abstract byte getByte(int index);
/**
* Gets an unsigned byte at the specified absolute {@code index} in this
* buffer. This method does not modify {@code readerIndex} or
* {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 1} is greater than {@code this.capacity}
*/
public abstract short getUnsignedByte(int index);
/**
* Gets a 16-bit short integer at the specified absolute {@code index} in
* this buffer. This method does not modify {@code readerIndex} or
* {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 2} is greater than {@code this.capacity}
*/
public abstract short getShort(int index);
/**
* Gets a 16-bit short integer at the specified absolute {@code index} in
* this buffer in Little Endian Byte Order. This method does not modify
* {@code readerIndex} or {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 2} is greater than {@code this.capacity}
*/
public abstract short getShortLE(int index);
/**
* Gets an unsigned 16-bit short integer at the specified absolute
* {@code index} in this buffer. This method does not modify
* {@code readerIndex} or {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 2} is greater than {@code this.capacity}
*/
public abstract int getUnsignedShort(int index);
/**
* Gets an unsigned 16-bit short integer at the specified absolute
* {@code index} in this buffer in Little Endian Byte Order.
* This method does not modify {@code readerIndex} or
* {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 2} is greater than {@code this.capacity}
*/
public abstract int getUnsignedShortLE(int index);
/**
* Gets a 24-bit medium integer at the specified absolute {@code index} in
* this buffer. This method does not modify {@code readerIndex} or
* {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 3} is greater than {@code this.capacity}
*/
public abstract int getMedium(int index);
/**
* Gets a 24-bit medium integer at the specified absolute {@code index} in
* this buffer in the Little Endian Byte Order. This method does not
* modify {@code readerIndex} or {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 3} is greater than {@code this.capacity}
*/
public abstract int getMediumLE(int index);
/**
* Gets an unsigned 24-bit medium integer at the specified absolute
* {@code index} in this buffer. This method does not modify
* {@code readerIndex} or {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 3} is greater than {@code this.capacity}
*/
public abstract int getUnsignedMedium(int index);
/**
* Gets an unsigned 24-bit medium integer at the specified absolute
* {@code index} in this buffer in Little Endian Byte Order.
* This method does not modify {@code readerIndex} or
* {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 3} is greater than {@code this.capacity}
*/
public abstract int getUnsignedMediumLE(int index);
/**
* Gets a 32-bit integer at the specified absolute {@code index} in
* this buffer. This method does not modify {@code readerIndex} or
* {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
public abstract int getInt(int index);
/**
* Gets a 32-bit integer at the specified absolute {@code index} in
* this buffer with Little Endian Byte Order. This method does not
* modify {@code readerIndex} or {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
public abstract int getIntLE(int index);
/**
* Gets an unsigned 32-bit integer at the specified absolute {@code index}
* in this buffer. This method does not modify {@code readerIndex} or
* {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
public abstract long getUnsignedInt(int index);
/**
* Gets an unsigned 32-bit integer at the specified absolute {@code index}
* in this buffer in Little Endian Byte Order. This method does not
* modify {@code readerIndex} or {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
public abstract long getUnsignedIntLE(int index);
/**
* Gets a 64-bit long integer at the specified absolute {@code index} in
* this buffer. This method does not modify {@code readerIndex} or
* {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 8} is greater than {@code this.capacity}
*/
public abstract long getLong(int index);
/**
* Gets a 64-bit long integer at the specified absolute {@code index} in
* this buffer in Little Endian Byte Order. This method does not
* modify {@code readerIndex} or {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 8} is greater than {@code this.capacity}
*/
public abstract long getLongLE(int index);
/**
* Gets a 2-byte UTF-16 character at the specified absolute
* {@code index} in this buffer. This method does not modify
* {@code readerIndex} or {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 2} is greater than {@code this.capacity}
*/
public abstract char getChar(int index);
/**
* Gets a 32-bit floating point number at the specified absolute
* {@code index} in this buffer. This method does not modify
* {@code readerIndex} or {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
public abstract float getFloat(int index);
/**
* Gets a 32-bit floating point number at the specified absolute
* {@code index} in this buffer in Little Endian Byte Order.
* This method does not modify {@code readerIndex} or
* {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
public float getFloatLE(int index) {
return Float.intBitsToFloat(getIntLE(index));
}
/**
* Gets a 64-bit floating point number at the specified absolute
* {@code index} in this buffer. This method does not modify
* {@code readerIndex} or {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 8} is greater than {@code this.capacity}
*/
public abstract double getDouble(int index);
/**
* Gets a 64-bit floating point number at the specified absolute
* {@code index} in this buffer in Little Endian Byte Order.
* This method does not modify {@code readerIndex} or
* {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 8} is greater than {@code this.capacity}
*/
public double getDoubleLE(int index) {
return Double.longBitsToDouble(getLongLE(index));
}
/**
* Transfers this buffer's data to the specified destination starting at
* the specified absolute {@code index} until the destination becomes
* non-writable. This method is basically same with
* {@link #getBytes(int, ByteBuf, int, int)}, except that this
* method increases the {@code writerIndex} of the destination by the
* number of the transferred bytes while
* {@link #getBytes(int, ByteBuf, int, int)} does not.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* the source buffer (i.e. {@code this}).
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + dst.writableBytes} is greater than
* {@code this.capacity}
*/
public abstract ByteBuf getBytes(int index, ByteBuf dst);
/**
* Transfers this buffer's data to the specified destination starting at
* the specified absolute {@code index}. This method is basically same
* with {@link #getBytes(int, ByteBuf, int, int)}, except that this
* method increases the {@code writerIndex} of the destination by the
* number of the transferred bytes while
* {@link #getBytes(int, ByteBuf, int, int)} does not.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* the source buffer (i.e. {@code this}).
*
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0},
* if {@code index + length} is greater than
* {@code this.capacity}, or
* if {@code length} is greater than {@code dst.writableBytes}
*/
public abstract ByteBuf getBytes(int index, ByteBuf dst, int length);
/**
* Transfers this buffer's data to the specified destination starting at
* the specified absolute {@code index}.
* This method does not modify {@code readerIndex} or {@code writerIndex}
* of both the source (i.e. {@code this}) and the destination.
*
* @param dstIndex the first index of the destination
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0},
* if the specified {@code dstIndex} is less than {@code 0},
* if {@code index + length} is greater than
* {@code this.capacity}, or
* if {@code dstIndex + length} is greater than
* {@code dst.capacity}
*/
public abstract ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length);
/**
* Transfers this buffer's data to the specified destination starting at
* the specified absolute {@code index}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + dst.length} is greater than
* {@code this.capacity}
*/
public abstract ByteBuf getBytes(int index, byte[] dst);
/**
* Transfers this buffer's data to the specified destination starting at
* the specified absolute {@code index}.
* This method does not modify {@code readerIndex} or {@code writerIndex}
* of this buffer.
*
* @param dstIndex the first index of the destination
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0},
* if the specified {@code dstIndex} is less than {@code 0},
* if {@code index + length} is greater than
* {@code this.capacity}, or
* if {@code dstIndex + length} is greater than
* {@code dst.length}
*/
public abstract ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length);
/**
* Transfers this buffer's data to the specified destination starting at
* the specified absolute {@code index} until the destination's position
* reaches its limit.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer while the destination's {@code position} will be increased.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + dst.remaining()} is greater than
* {@code this.capacity}
*/
public abstract ByteBuf getBytes(int index, ByteBuffer dst);
/**
* Transfers this buffer's data to the specified stream starting at the
* specified absolute {@code index}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than
* {@code this.capacity}
* @throws IOException
* if the specified stream threw an exception during I/O
*/
public abstract ByteBuf getBytes(int index, OutputStream out, int length) throws IOException;
/**
* Transfers this buffer's data to the specified channel starting at the
* specified absolute {@code index}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @param length the maximum number of bytes to transfer
*
* @return the actual number of bytes written out to the specified channel
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than
* {@code this.capacity}
* @throws IOException
* if the specified channel threw an exception during I/O
*/
public abstract int getBytes(int index, GatheringByteChannel out, int length) throws IOException;
/**
* Transfers this buffer's data starting at the specified absolute {@code index}
* to the specified channel starting at the given file position.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer. This method does not modify the channel's position.
*
* @param position the file position at which the transfer is to begin
* @param length the maximum number of bytes to transfer
*
* @return the actual number of bytes written out to the specified channel
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than
* {@code this.capacity}
* @throws IOException
* if the specified channel threw an exception during I/O
*/
public abstract int getBytes(int index, FileChannel out, long position, int length) throws IOException;
/**
* Gets a {@link CharSequence} with the given length at the given index.
*
* @param length the length to read
* @param charset that should be used
* @return the sequence
* @throws IndexOutOfBoundsException
* if {@code length} is greater than {@code this.readableBytes}
*/
public abstract CharSequence getCharSequence(int index, int length, Charset charset);
/**
* Sets the specified boolean at the specified absolute {@code index} in this
* buffer.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 1} is greater than {@code this.capacity}
*/
public abstract ByteBuf setBoolean(int index, boolean value);
/**
* Sets the specified byte at the specified absolute {@code index} in this
* buffer. The 24 high-order bits of the specified value are ignored.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 1} is greater than {@code this.capacity}
*/
public abstract ByteBuf setByte(int index, int value);
/**
* Sets the specified 16-bit short integer at the specified absolute
* {@code index} in this buffer. The 16 high-order bits of the specified
* value are ignored.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 2} is greater than {@code this.capacity}
*/
public abstract ByteBuf setShort(int index, int value);
/**
* Sets the specified 16-bit short integer at the specified absolute
* {@code index} in this buffer with the Little Endian Byte Order.
* The 16 high-order bits of the specified value are ignored.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 2} is greater than {@code this.capacity}
*/
public abstract ByteBuf setShortLE(int index, int value);
/**
* Sets the specified 24-bit medium integer at the specified absolute
* {@code index} in this buffer. Please note that the most significant
* byte is ignored in the specified value.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 3} is greater than {@code this.capacity}
*/
public abstract ByteBuf setMedium(int index, int value);
/**
* Sets the specified 24-bit medium integer at the specified absolute
* {@code index} in this buffer in the Little Endian Byte Order.
* Please note that the most significant byte is ignored in the
* specified value.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 3} is greater than {@code this.capacity}
*/
public abstract ByteBuf setMediumLE(int index, int value);
/**
* Sets the specified 32-bit integer at the specified absolute
* {@code index} in this buffer.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
public abstract ByteBuf setInt(int index, int value);
/**
* Sets the specified 32-bit integer at the specified absolute
* {@code index} in this buffer with Little Endian byte order
* .
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
public abstract ByteBuf setIntLE(int index, int value);
/**
* Sets the specified 64-bit long integer at the specified absolute
* {@code index} in this buffer.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 8} is greater than {@code this.capacity}
*/
public abstract ByteBuf setLong(int index, long value);
/**
* Sets the specified 64-bit long integer at the specified absolute
* {@code index} in this buffer in Little Endian Byte Order.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 8} is greater than {@code this.capacity}
*/
public abstract ByteBuf setLongLE(int index, long value);
/**
* Sets the specified 2-byte UTF-16 character at the specified absolute
* {@code index} in this buffer.
* The 16 high-order bits of the specified value are ignored.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 2} is greater than {@code this.capacity}
*/
public abstract ByteBuf setChar(int index, int value);
/**
* Sets the specified 32-bit floating-point number at the specified
* absolute {@code index} in this buffer.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
public abstract ByteBuf setFloat(int index, float value);
/**
* Sets the specified 32-bit floating-point number at the specified
* absolute {@code index} in this buffer in Little Endian Byte Order.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
public ByteBuf setFloatLE(int index, float value) {
return setIntLE(index, Float.floatToRawIntBits(value));
}
/**
* Sets the specified 64-bit floating-point number at the specified
* absolute {@code index} in this buffer.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 8} is greater than {@code this.capacity}
*/
public abstract ByteBuf setDouble(int index, double value);
/**
* Sets the specified 64-bit floating-point number at the specified
* absolute {@code index} in this buffer in Little Endian Byte Order.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 8} is greater than {@code this.capacity}
*/
public ByteBuf setDoubleLE(int index, double value) {
return setLongLE(index, Double.doubleToRawLongBits(value));
}
/**
* Transfers the specified source buffer's data to this buffer starting at
* the specified absolute {@code index} until the source buffer becomes
* unreadable. This method is basically same with
* {@link #setBytes(int, ByteBuf, int, int)}, except that this
* method increases the {@code readerIndex} of the source buffer by
* the number of the transferred bytes while
* {@link #setBytes(int, ByteBuf, int, int)} does not.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer (i.e. {@code this}).
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + src.readableBytes} is greater than
* {@code this.capacity}
*/
public abstract ByteBuf setBytes(int index, ByteBuf src);
/**
* Transfers the specified source buffer's data to this buffer starting at
* the specified absolute {@code index}. This method is basically same
* with {@link #setBytes(int, ByteBuf, int, int)}, except that this
* method increases the {@code readerIndex} of the source buffer by
* the number of the transferred bytes while
* {@link #setBytes(int, ByteBuf, int, int)} does not.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer (i.e. {@code this}).
*
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0},
* if {@code index + length} is greater than
* {@code this.capacity}, or
* if {@code length} is greater than {@code src.readableBytes}
*/
public abstract ByteBuf setBytes(int index, ByteBuf src, int length);
/**
* Transfers the specified source buffer's data to this buffer starting at
* the specified absolute {@code index}.
* This method does not modify {@code readerIndex} or {@code writerIndex}
* of both the source (i.e. {@code this}) and the destination.
*
* @param srcIndex the first index of the source
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0},
* if the specified {@code srcIndex} is less than {@code 0},
* if {@code index + length} is greater than
* {@code this.capacity}, or
* if {@code srcIndex + length} is greater than
* {@code src.capacity}
*/
public abstract ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length);
/**
* Transfers the specified source array's data to this buffer starting at
* the specified absolute {@code index}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + src.length} is greater than
* {@code this.capacity}
*/
public abstract ByteBuf setBytes(int index, byte[] src);
/**
* Transfers the specified source array's data to this buffer starting at
* the specified absolute {@code index}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0},
* if the specified {@code srcIndex} is less than {@code 0},
* if {@code index + length} is greater than
* {@code this.capacity}, or
* if {@code srcIndex + length} is greater than {@code src.length}
*/
public abstract ByteBuf setBytes(int index, byte[] src, int srcIndex, int length);
/**
* Transfers the specified source buffer's data to this buffer starting at
* the specified absolute {@code index} until the source buffer's position
* reaches its limit.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + src.remaining()} is greater than
* {@code this.capacity}
*/
public abstract ByteBuf setBytes(int index, ByteBuffer src);
/**
* Transfers the content of the specified source stream to this buffer
* starting at the specified absolute {@code index}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @param length the number of bytes to transfer
*
* @return the actual number of bytes read in from the specified channel.
* {@code -1} if the specified {@link InputStream} reached EOF.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than {@code this.capacity}
* @throws IOException
* if the specified stream threw an exception during I/O
*/
public abstract int setBytes(int index, InputStream in, int length) throws IOException;
/**
* Transfers the content of the specified source channel to this buffer
* starting at the specified absolute {@code index}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @param length the maximum number of bytes to transfer
*
* @return the actual number of bytes read in from the specified channel.
* {@code -1} if the specified channel is closed or reached EOF.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than {@code this.capacity}
* @throws IOException
* if the specified channel threw an exception during I/O
*/
public abstract int setBytes(int index, ScatteringByteChannel in, int length) throws IOException;
/**
* Transfers the content of the specified source channel starting at the given file position
* to this buffer starting at the specified absolute {@code index}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer. This method does not modify the channel's position.
*
* @param position the file position at which the transfer is to begin
* @param length the maximum number of bytes to transfer
*
* @return the actual number of bytes read in from the specified channel.
* {@code -1} if the specified channel is closed or reached EOF.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than {@code this.capacity}
* @throws IOException
* if the specified channel threw an exception during I/O
*/
public abstract int setBytes(int index, FileChannel in, long position, int length) throws IOException;
/**
* Fills this buffer with <tt>NUL (0x00)</tt> starting at the specified
* absolute {@code index}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @param length the number of <tt>NUL</tt>s to write to the buffer
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than {@code this.capacity}
*/
public abstract ByteBuf setZero(int index, int length);
/**
* Writes the specified {@link CharSequence} at the given {@code index}.
* The {@code writerIndex} is not modified by this method.
*
* @param index on which the sequence should be written
* @param sequence to write
* @param charset that should be used.
* @return the written number of bytes.
* @throws IndexOutOfBoundsException
* if the sequence at the given index would be out of bounds of the buffer capacity
*/
public abstract int setCharSequence(int index, CharSequence sequence, Charset charset);
/**
* Gets a boolean at the current {@code readerIndex} and increases
* the {@code readerIndex} by {@code 1} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 1}
*/
public abstract boolean readBoolean();
/**
* Gets a byte at the current {@code readerIndex} and increases
* the {@code readerIndex} by {@code 1} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 1}
*/
public abstract byte readByte();
/**
* Gets an unsigned byte at the current {@code readerIndex} and increases
* the {@code readerIndex} by {@code 1} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 1}
*/
public abstract short readUnsignedByte();
/**
* Gets a 16-bit short integer at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 2} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 2}
*/
public abstract short readShort();
/**
* Gets a 16-bit short integer at the current {@code readerIndex}
* in the Little Endian Byte Order and increases the {@code readerIndex}
* by {@code 2} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 2}
*/
public abstract short readShortLE();
/**
* Gets an unsigned 16-bit short integer at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 2} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 2}
*/
public abstract int readUnsignedShort();
/**
* Gets an unsigned 16-bit short integer at the current {@code readerIndex}
* in the Little Endian Byte Order and increases the {@code readerIndex}
* by {@code 2} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 2}
*/
public abstract int readUnsignedShortLE();
/**
* Gets a 24-bit medium integer at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 3} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 3}
*/
public abstract int readMedium();
/**
* Gets a 24-bit medium integer at the current {@code readerIndex}
* in the Little Endian Byte Order and increases the
* {@code readerIndex} by {@code 3} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 3}
*/
public abstract int readMediumLE();
/**
* Gets an unsigned 24-bit medium integer at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 3} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 3}
*/
public abstract int readUnsignedMedium();
/**
* Gets an unsigned 24-bit medium integer at the current {@code readerIndex}
* in the Little Endian Byte Order and increases the {@code readerIndex}
* by {@code 3} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 3}
*/
public abstract int readUnsignedMediumLE();
/**
* Gets a 32-bit integer at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 4} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 4}
*/
public abstract int readInt();
/**
* Gets a 32-bit integer at the current {@code readerIndex}
* in the Little Endian Byte Order and increases the {@code readerIndex}
* by {@code 4} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 4}
*/
public abstract int readIntLE();
/**
* Gets an unsigned 32-bit integer at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 4} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 4}
*/
public abstract long readUnsignedInt();
/**
* Gets an unsigned 32-bit integer at the current {@code readerIndex}
* in the Little Endian Byte Order and increases the {@code readerIndex}
* by {@code 4} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 4}
*/
public abstract long readUnsignedIntLE();
/**
* Gets a 64-bit integer at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 8} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 8}
*/
public abstract long readLong();
/**
* Gets a 64-bit integer at the current {@code readerIndex}
* in the Little Endian Byte Order and increases the {@code readerIndex}
* by {@code 8} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 8}
*/
public abstract long readLongLE();
/**
* Gets a 2-byte UTF-16 character at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 2} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 2}
*/
public abstract char readChar();
/**
* Gets a 32-bit floating point number at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 4} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 4}
*/
public abstract float readFloat();
/**
* Gets a 32-bit floating point number at the current {@code readerIndex}
* in Little Endian Byte Order and increases the {@code readerIndex}
* by {@code 4} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 4}
*/
public float readFloatLE() {
return Float.intBitsToFloat(readIntLE());
}
/**
* Gets a 64-bit floating point number at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 8} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 8}
*/
public abstract double readDouble();
/**
* Gets a 64-bit floating point number at the current {@code readerIndex}
* in Little Endian Byte Order and increases the {@code readerIndex}
* by {@code 8} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readableBytes} is less than {@code 8}
*/
public double readDoubleLE() {
return Double.longBitsToDouble(readLongLE());
}
/**
* Transfers this buffer's data to a newly created buffer starting at
* the current {@code readerIndex} and increases the {@code readerIndex}
* by the number of the transferred bytes (= {@code length}).
* The returned buffer's {@code readerIndex} and {@code writerIndex} are
* {@code 0} and {@code length} respectively.
*
* @param length the number of bytes to transfer
*
* @return the newly created buffer which contains the transferred bytes
*
* @throws IndexOutOfBoundsException
* if {@code length} is greater than {@code this.readableBytes}
*/
public abstract ByteBuf readBytes(int length);
/**
* Returns a new slice of this buffer's sub-region starting at the current
* {@code readerIndex} and increases the {@code readerIndex} by the size
* of the new slice (= {@code length}).
* <p>
* Also be aware that this method will NOT call {@link #retain()} and so the
* reference count will NOT be increased.
*
* @param length the size of the new slice
*
* @return the newly created slice
*
* @throws IndexOutOfBoundsException
* if {@code length} is greater than {@code this.readableBytes}
*/
public abstract ByteBuf readSlice(int length);
/**
* Returns a new retained slice of this buffer's sub-region starting at the current
* {@code readerIndex} and increases the {@code readerIndex} by the size
* of the new slice (= {@code length}).
* <p>
* Note that this method returns a {@linkplain #retain() retained} buffer unlike {@link #readSlice(int)}.
* This method behaves similarly to {@code readSlice(...).retain()} except that this method may return
* a buffer implementation that produces less garbage.
*
* @param length the size of the new slice
*
* @return the newly created slice
*
* @throws IndexOutOfBoundsException
* if {@code length} is greater than {@code this.readableBytes}
*/
public abstract ByteBuf readRetainedSlice(int length);
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code readerIndex} until the destination becomes
* non-writable, and increases the {@code readerIndex} by the number of the
* transferred bytes. This method is basically same with
* {@link #readBytes(ByteBuf, int, int)}, except that this method
* increases the {@code writerIndex} of the destination by the number of
* the transferred bytes while {@link #readBytes(ByteBuf, int, int)}
* does not.
*
* @throws IndexOutOfBoundsException
* if {@code dst.writableBytes} is greater than
* {@code this.readableBytes}
*/
public abstract ByteBuf readBytes(ByteBuf dst);
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code readerIndex} and increases the {@code readerIndex}
* by the number of the transferred bytes (= {@code length}). This method
* is basically same with {@link #readBytes(ByteBuf, int, int)},
* except that this method increases the {@code writerIndex} of the
* destination by the number of the transferred bytes (= {@code length})
* while {@link #readBytes(ByteBuf, int, int)} does not.
*
* @throws IndexOutOfBoundsException
* if {@code length} is greater than {@code this.readableBytes} or
* if {@code length} is greater than {@code dst.writableBytes}
*/
public abstract ByteBuf readBytes(ByteBuf dst, int length);
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code readerIndex} and increases the {@code readerIndex}
* by the number of the transferred bytes (= {@code length}).
*
* @param dstIndex the first index of the destination
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code dstIndex} is less than {@code 0},
* if {@code length} is greater than {@code this.readableBytes}, or
* if {@code dstIndex + length} is greater than
* {@code dst.capacity}
*/
public abstract ByteBuf readBytes(ByteBuf dst, int dstIndex, int length);
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code readerIndex} and increases the {@code readerIndex}
* by the number of the transferred bytes (= {@code dst.length}).
*
* @throws IndexOutOfBoundsException
* if {@code dst.length} is greater than {@code this.readableBytes}
*/
public abstract ByteBuf readBytes(byte[] dst);
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code readerIndex} and increases the {@code readerIndex}
* by the number of the transferred bytes (= {@code length}).
*
* @param dstIndex the first index of the destination
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code dstIndex} is less than {@code 0},
* if {@code length} is greater than {@code this.readableBytes}, or
* if {@code dstIndex + length} is greater than {@code dst.length}
*/
public abstract ByteBuf readBytes(byte[] dst, int dstIndex, int length);
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code readerIndex} until the destination's position
* reaches its limit, and increases the {@code readerIndex} by the
* number of the transferred bytes.
*
* @throws IndexOutOfBoundsException
* if {@code dst.remaining()} is greater than
* {@code this.readableBytes}
*/
public abstract ByteBuf readBytes(ByteBuffer dst);
/**
* Transfers this buffer's data to the specified stream starting at the
* current {@code readerIndex}.
*
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if {@code length} is greater than {@code this.readableBytes}
* @throws IOException
* if the specified stream threw an exception during I/O
*/
public abstract ByteBuf readBytes(OutputStream out, int length) throws IOException;
/**
* Transfers this buffer's data to the specified stream starting at the
* current {@code readerIndex}.
*
* @param length the maximum number of bytes to transfer
*
* @return the actual number of bytes written out to the specified channel
*
* @throws IndexOutOfBoundsException
* if {@code length} is greater than {@code this.readableBytes}
* @throws IOException
* if the specified channel threw an exception during I/O
*/
public abstract int readBytes(GatheringByteChannel out, int length) throws IOException;
/**
* Gets a {@link CharSequence} with the given length at the current {@code readerIndex}
* and increases the {@code readerIndex} by the given length.
*
* @param length the length to read
* @param charset that should be used
* @return the sequence
* @throws IndexOutOfBoundsException
* if {@code length} is greater than {@code this.readableBytes}
*/
public abstract CharSequence readCharSequence(int length, Charset charset);
/**
* Gets a {@link String} with the given length at the current {@code readerIndex}
* and increases the {@code readerIndex} by the given length.
*
* @param length the length to read
* @param charset that should be used
* @return the string
* @throws IndexOutOfBoundsException
* if {@code length} is greater than {@code this.readableBytes}
*/
public String readString(int length, Charset charset) {
int readerIndex = readerIndex();
String string = toString(readerIndex, length, charset);
readerIndex(readerIndex + length);
return string;
}
/**
* Transfers this buffer's data starting at the current {@code readerIndex}
* to the specified channel starting at the given file position.
* This method does not modify the channel's position.
*
* @param position the file position at which the transfer is to begin
* @param length the maximum number of bytes to transfer
*
* @return the actual number of bytes written out to the specified channel
*
* @throws IndexOutOfBoundsException
* if {@code length} is greater than {@code this.readableBytes}
* @throws IOException
* if the specified channel threw an exception during I/O
*/
public abstract int readBytes(FileChannel out, long position, int length) throws IOException;
/**
* Increases the current {@code readerIndex} by the specified
* {@code length} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code length} is greater than {@code this.readableBytes}
*/
public abstract ByteBuf skipBytes(int length);
/**
* Sets the specified boolean at the current {@code writerIndex}
* and increases the {@code writerIndex} by {@code 1} in this buffer.
* If {@code this.writableBytes} is less than {@code 1}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public abstract ByteBuf writeBoolean(boolean value);
/**
* Sets the specified byte at the current {@code writerIndex}
* and increases the {@code writerIndex} by {@code 1} in this buffer.
* The 24 high-order bits of the specified value are ignored.
* If {@code this.writableBytes} is less than {@code 1}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public abstract ByteBuf writeByte(int value);
/**
* Sets the specified 16-bit short integer at the current
* {@code writerIndex} and increases the {@code writerIndex} by {@code 2}
* in this buffer. The 16 high-order bits of the specified value are ignored.
* If {@code this.writableBytes} is less than {@code 2}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public abstract ByteBuf writeShort(int value);
/**
* Sets the specified 16-bit short integer in the Little Endian Byte
* Order at the current {@code writerIndex} and increases the
* {@code writerIndex} by {@code 2} in this buffer.
* The 16 high-order bits of the specified value are ignored.
* If {@code this.writableBytes} is less than {@code 2}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public abstract ByteBuf writeShortLE(int value);
/**
* Sets the specified 24-bit medium integer at the current
* {@code writerIndex} and increases the {@code writerIndex} by {@code 3}
* in this buffer.
* If {@code this.writableBytes} is less than {@code 3}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public abstract ByteBuf writeMedium(int value);
/**
* Sets the specified 24-bit medium integer at the current
* {@code writerIndex} in the Little Endian Byte Order and
* increases the {@code writerIndex} by {@code 3} in this
* buffer.
* If {@code this.writableBytes} is less than {@code 3}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public abstract ByteBuf writeMediumLE(int value);
/**
* Sets the specified 32-bit integer at the current {@code writerIndex}
* and increases the {@code writerIndex} by {@code 4} in this buffer.
* If {@code this.writableBytes} is less than {@code 4}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public abstract ByteBuf writeInt(int value);
/**
* Sets the specified 32-bit integer at the current {@code writerIndex}
* in the Little Endian Byte Order and increases the {@code writerIndex}
* by {@code 4} in this buffer.
* If {@code this.writableBytes} is less than {@code 4}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public abstract ByteBuf writeIntLE(int value);
/**
* Sets the specified 64-bit long integer at the current
* {@code writerIndex} and increases the {@code writerIndex} by {@code 8}
* in this buffer.
* If {@code this.writableBytes} is less than {@code 8}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public abstract ByteBuf writeLong(long value);
/**
* Sets the specified 64-bit long integer at the current
* {@code writerIndex} in the Little Endian Byte Order and
* increases the {@code writerIndex} by {@code 8}
* in this buffer.
* If {@code this.writableBytes} is less than {@code 8}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public abstract ByteBuf writeLongLE(long value);
/**
* Sets the specified 2-byte UTF-16 character at the current
* {@code writerIndex} and increases the {@code writerIndex} by {@code 2}
* in this buffer. The 16 high-order bits of the specified value are ignored.
* If {@code this.writableBytes} is less than {@code 2}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public abstract ByteBuf writeChar(int value);
/**
* Sets the specified 32-bit floating point number at the current
* {@code writerIndex} and increases the {@code writerIndex} by {@code 4}
* in this buffer.
* If {@code this.writableBytes} is less than {@code 4}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public abstract ByteBuf writeFloat(float value);
/**
* Sets the specified 32-bit floating point number at the current
* {@code writerIndex} in Little Endian Byte Order and increases
* the {@code writerIndex} by {@code 4} in this buffer.
* If {@code this.writableBytes} is less than {@code 4}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public ByteBuf writeFloatLE(float value) {
return writeIntLE(Float.floatToRawIntBits(value));
}
/**
* Sets the specified 64-bit floating point number at the current
* {@code writerIndex} and increases the {@code writerIndex} by {@code 8}
* in this buffer.
* If {@code this.writableBytes} is less than {@code 8}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public abstract ByteBuf writeDouble(double value);
/**
* Sets the specified 64-bit floating point number at the current
* {@code writerIndex} in Little Endian Byte Order and increases
* the {@code writerIndex} by {@code 8} in this buffer.
* If {@code this.writableBytes} is less than {@code 8}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public ByteBuf writeDoubleLE(double value) {
return writeLongLE(Double.doubleToRawLongBits(value));
}
/**
* Transfers the specified source buffer's data to this buffer starting at
* the current {@code writerIndex} until the source buffer becomes
* unreadable, and increases the {@code writerIndex} by the number of
* the transferred bytes. This method is basically same with
* {@link #writeBytes(ByteBuf, int, int)}, except that this method
* increases the {@code readerIndex} of the source buffer by the number of
* the transferred bytes while {@link #writeBytes(ByteBuf, int, int)}
* does not.
* If {@code this.writableBytes} is less than {@code src.readableBytes},
* {@link #ensureWritable(int)} will be called in an attempt to expand
* capacity to accommodate.
*/
public abstract ByteBuf writeBytes(ByteBuf src);
/**
* Transfers the specified source buffer's data to this buffer starting at
* the current {@code writerIndex} and increases the {@code writerIndex}
* by the number of the transferred bytes (= {@code length}). This method
* is basically same with {@link #writeBytes(ByteBuf, int, int)},
* except that this method increases the {@code readerIndex} of the source
* buffer by the number of the transferred bytes (= {@code length}) while
* {@link #writeBytes(ByteBuf, int, int)} does not.
* If {@code this.writableBytes} is less than {@code length}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*
* @param length the number of bytes to transfer
* @throws IndexOutOfBoundsException if {@code length} is greater then {@code src.readableBytes}
*/
public abstract ByteBuf writeBytes(ByteBuf src, int length);
/**
* Transfers the specified source buffer's data to this buffer starting at
* the current {@code writerIndex} and increases the {@code writerIndex}
* by the number of the transferred bytes (= {@code length}).
* If {@code this.writableBytes} is less than {@code length}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*
* @param srcIndex the first index of the source
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code srcIndex} is less than {@code 0}, or
* if {@code srcIndex + length} is greater than {@code src.capacity}
*/
public abstract ByteBuf writeBytes(ByteBuf src, int srcIndex, int length);
/**
* Transfers the specified source array's data to this buffer starting at
* the current {@code writerIndex} and increases the {@code writerIndex}
* by the number of the transferred bytes (= {@code src.length}).
* If {@code this.writableBytes} is less than {@code src.length}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*/
public abstract ByteBuf writeBytes(byte[] src);
/**
* Transfers the specified source array's data to this buffer starting at
* the current {@code writerIndex} and increases the {@code writerIndex}
* by the number of the transferred bytes (= {@code length}).
* If {@code this.writableBytes} is less than {@code length}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*
* @param srcIndex the first index of the source
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code srcIndex} is less than {@code 0}, or
* if {@code srcIndex + length} is greater than {@code src.length}
*/
public abstract ByteBuf writeBytes(byte[] src, int srcIndex, int length);
/**
* Transfers the specified source buffer's data to this buffer starting at
* the current {@code writerIndex} until the source buffer's position
* reaches its limit, and increases the {@code writerIndex} by the
* number of the transferred bytes.
* If {@code this.writableBytes} is less than {@code src.remaining()},
* {@link #ensureWritable(int)} will be called in an attempt to expand
* capacity to accommodate.
*/
public abstract ByteBuf writeBytes(ByteBuffer src);
/**
* Transfers the content of the specified stream to this buffer
* starting at the current {@code writerIndex} and increases the
* {@code writerIndex} by the number of the transferred bytes.
* If {@code this.writableBytes} is less than {@code length}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*
* @param length the number of bytes to transfer
*
* @return the actual number of bytes read in from the specified channel.
* {@code -1} if the specified {@link InputStream} reached EOF.
*
* @throws IOException if the specified stream threw an exception during I/O
*/
public abstract int writeBytes(InputStream in, int length) throws IOException;
/**
* Transfers the content of the specified channel to this buffer
* starting at the current {@code writerIndex} and increases the
* {@code writerIndex} by the number of the transferred bytes.
* If {@code this.writableBytes} is less than {@code length}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*
* @param length the maximum number of bytes to transfer
*
* @return the actual number of bytes read in from the specified channel.
* {@code -1} if the specified channel is closed or reached EOF.
*
* @throws IOException
* if the specified channel threw an exception during I/O
*/
public abstract int writeBytes(ScatteringByteChannel in, int length) throws IOException;
/**
* Transfers the content of the specified channel starting at the given file position
* to this buffer starting at the current {@code writerIndex} and increases the
* {@code writerIndex} by the number of the transferred bytes.
* This method does not modify the channel's position.
* If {@code this.writableBytes} is less than {@code length}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*
* @param position the file position at which the transfer is to begin
* @param length the maximum number of bytes to transfer
*
* @return the actual number of bytes read in from the specified channel.
* {@code -1} if the specified channel is closed or reached EOF.
*
* @throws IOException
* if the specified channel threw an exception during I/O
*/
public abstract int writeBytes(FileChannel in, long position, int length) throws IOException;
/**
* Fills this buffer with <tt>NUL (0x00)</tt> starting at the current
* {@code writerIndex} and increases the {@code writerIndex} by the
* specified {@code length}.
* If {@code this.writableBytes} is less than {@code length}, {@link #ensureWritable(int)}
* will be called in an attempt to expand capacity to accommodate.
*
* @param length the number of <tt>NUL</tt>s to write to the buffer
*/
public abstract ByteBuf writeZero(int length);
/**
* Writes the specified {@link CharSequence} at the current {@code writerIndex} and increases
* the {@code writerIndex} by the written bytes.
* If {@code this.writableBytes} is not large enough to write the whole sequence,
* {@link #ensureWritable(int)} will be called in an attempt to expand capacity to accommodate.
*
* @param sequence to write
* @param charset that should be used
* @return the written number of bytes
*/
public abstract int writeCharSequence(CharSequence sequence, Charset charset);
/**
* Locates the first occurrence of the specified {@code value} in this
* buffer. The search takes place from the specified {@code fromIndex}
* (inclusive) to the specified {@code toIndex} (exclusive).
* <p>
* If {@code fromIndex} is greater than {@code toIndex}, the search is
* performed in a reversed order from {@code fromIndex} (exclusive)
* down to {@code toIndex} (inclusive).
* <p>
* Note that the lower index is always included and higher always excluded.
* <p>
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @return the absolute index of the first occurrence if found.
* {@code -1} otherwise.
*/
public abstract int indexOf(int fromIndex, int toIndex, byte value);
/**
* Locates the first occurrence of the specified {@code value} in this
* buffer. The search takes place from the current {@code readerIndex}
* (inclusive) to the current {@code writerIndex} (exclusive).
* <p>
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @return the number of bytes between the current {@code readerIndex}
* and the first occurrence if found. {@code -1} otherwise.
*/
public abstract int bytesBefore(byte value);
/**
* Locates the first occurrence of the specified {@code value} in this
* buffer. The search starts from the current {@code readerIndex}
* (inclusive) and lasts for the specified {@code length}.
* <p>
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @return the number of bytes between the current {@code readerIndex}
* and the first occurrence if found. {@code -1} otherwise.
*
* @throws IndexOutOfBoundsException
* if {@code length} is greater than {@code this.readableBytes}
*/
public abstract int bytesBefore(int length, byte value);
/**
* Locates the first occurrence of the specified {@code value} in this
* buffer. The search starts from the specified {@code index} (inclusive)
* and lasts for the specified {@code length}.
* <p>
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @return the number of bytes between the specified {@code index}
* and the first occurrence if found. {@code -1} otherwise.
*
* @throws IndexOutOfBoundsException
* if {@code index + length} is greater than {@code this.capacity}
*/
public abstract int bytesBefore(int index, int length, byte value);
/**
* Iterates over the readable bytes of this buffer with the specified {@code processor} in ascending order.
*
* @return {@code -1} if the processor iterated to or beyond the end of the readable bytes.
* The last-visited index If the {@link ByteProcessor#process(byte)} returned {@code false}.
*/
public abstract int forEachByte(ByteProcessor processor);
/**
* Iterates over the specified area of this buffer with the specified {@code processor} in ascending order.
* (i.e. {@code index}, {@code (index + 1)}, .. {@code (index + length - 1)})
*
* @return {@code -1} if the processor iterated to or beyond the end of the specified area.
* The last-visited index If the {@link ByteProcessor#process(byte)} returned {@code false}.
*/
public abstract int forEachByte(int index, int length, ByteProcessor processor);
/**
* Iterates over the readable bytes of this buffer with the specified {@code processor} in descending order.
*
* @return {@code -1} if the processor iterated to or beyond the beginning of the readable bytes.
* The last-visited index If the {@link ByteProcessor#process(byte)} returned {@code false}.
*/
public abstract int forEachByteDesc(ByteProcessor processor);
/**
* Iterates over the specified area of this buffer with the specified {@code processor} in descending order.
* (i.e. {@code (index + length - 1)}, {@code (index + length - 2)}, ... {@code index})
*
*
* @return {@code -1} if the processor iterated to or beyond the beginning of the specified area.
* The last-visited index If the {@link ByteProcessor#process(byte)} returned {@code false}.
*/
public abstract int forEachByteDesc(int index, int length, ByteProcessor processor);
/**
* Returns a copy of this buffer's readable bytes. Modifying the content
* of the returned buffer or this buffer does not affect each other at all.
* This method is identical to {@code buf.copy(buf.readerIndex(), buf.readableBytes())}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*/
public abstract ByteBuf copy();
/**
* Returns a copy of this buffer's sub-region. Modifying the content of
* the returned buffer or this buffer does not affect each other at all.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*/
public abstract ByteBuf copy(int index, int length);
/**
* Returns a slice of this buffer's readable bytes. Modifying the content
* of the returned buffer or this buffer affects each other's content
* while they maintain separate indexes and marks. This method is
* identical to {@code buf.slice(buf.readerIndex(), buf.readableBytes())}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
* <p>
* Also be aware that this method will NOT call {@link #retain()} and so the
* reference count will NOT be increased.
*/
public abstract ByteBuf slice();
/**
* Returns a retained slice of this buffer's readable bytes. Modifying the content
* of the returned buffer or this buffer affects each other's content
* while they maintain separate indexes and marks. This method is
* identical to {@code buf.slice(buf.readerIndex(), buf.readableBytes())}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
* <p>
* Note that this method returns a {@linkplain #retain() retained} buffer unlike {@link #slice()}.
* This method behaves similarly to {@code slice().retain()} except that this method may return
* a buffer implementation that produces less garbage.
*/
public abstract ByteBuf retainedSlice();
/**
* Returns a slice of this buffer's sub-region. Modifying the content of
* the returned buffer or this buffer affects each other's content while
* they maintain separate indexes and marks.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
* <p>
* Also be aware that this method will NOT call {@link #retain()} and so the
* reference count will NOT be increased.
*/
public abstract ByteBuf slice(int index, int length);
/**
* Returns a retained slice of this buffer's sub-region. Modifying the content of
* the returned buffer or this buffer affects each other's content while
* they maintain separate indexes and marks.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
* <p>
* Note that this method returns a {@linkplain #retain() retained} buffer unlike {@link #slice(int, int)}.
* This method behaves similarly to {@code slice(...).retain()} except that this method may return
* a buffer implementation that produces less garbage.
*/
public abstract ByteBuf retainedSlice(int index, int length);
/**
* Returns a buffer which shares the whole region of this buffer.
* Modifying the content of the returned buffer or this buffer affects
* each other's content while they maintain separate indexes and marks.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
* <p>
* The reader and writer marks will not be duplicated. Also be aware that this method will
* NOT call {@link #retain()} and so the reference count will NOT be increased.
* @return A buffer whose readable content is equivalent to the buffer returned by {@link #slice()}.
* However this buffer will share the capacity of the underlying buffer, and therefore allows access to all of the
* underlying content if necessary.
*/
public abstract ByteBuf duplicate();
/**
* Returns a retained buffer which shares the whole region of this buffer.
* Modifying the content of the returned buffer or this buffer affects
* each other's content while they maintain separate indexes and marks.
* This method is identical to {@code buf.slice(0, buf.capacity())}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
* <p>
* Note that this method returns a {@linkplain #retain() retained} buffer unlike {@link #slice(int, int)}.
* This method behaves similarly to {@code duplicate().retain()} except that this method may return
* a buffer implementation that produces less garbage.
*/
public abstract ByteBuf retainedDuplicate();
/**
* Returns the maximum number of NIO {@link ByteBuffer}s that consist this buffer. Note that {@link #nioBuffers()}
* or {@link #nioBuffers(int, int)} might return a less number of {@link ByteBuffer}s.
*
* @return {@code -1} if this buffer has no underlying {@link ByteBuffer}.
* the number of the underlying {@link ByteBuffer}s if this buffer has at least one underlying
* {@link ByteBuffer}. Note that this method does not return {@code 0} to avoid confusion.
*
* @see #nioBuffer()
* @see #nioBuffer(int, int)
* @see #nioBuffers()
* @see #nioBuffers(int, int)
*/
public abstract int nioBufferCount();
/**
* Exposes this buffer's readable bytes as an NIO {@link ByteBuffer}. The returned buffer
* either share or contains the copied content of this buffer, while changing the position
* and limit of the returned NIO buffer does not affect the indexes and marks of this buffer.
* This method is identical to {@code buf.nioBuffer(buf.readerIndex(), buf.readableBytes())}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of this buffer.
* Please note that the returned NIO buffer will not see the changes of this buffer if this buffer
* is a dynamic buffer and it adjusted its capacity.
*
* @throws UnsupportedOperationException
* if this buffer cannot create a {@link ByteBuffer} that shares the content with itself
*
* @see #nioBufferCount()
* @see #nioBuffers()
* @see #nioBuffers(int, int)
*/
public abstract ByteBuffer nioBuffer();
/**
* Exposes this buffer's sub-region as an NIO {@link ByteBuffer}. The returned buffer
* either share or contains the copied content of this buffer, while changing the position
* and limit of the returned NIO buffer does not affect the indexes and marks of this buffer.
* This method does not modify {@code readerIndex} or {@code writerIndex} of this buffer.
* Please note that the returned NIO buffer will not see the changes of this buffer if this buffer
* is a dynamic buffer and it adjusted its capacity.
*
* @throws UnsupportedOperationException
* if this buffer cannot create a {@link ByteBuffer} that shares the content with itself
*
* @see #nioBufferCount()
* @see #nioBuffers()
* @see #nioBuffers(int, int)
*/
public abstract ByteBuffer nioBuffer(int index, int length);
/**
* Internal use only: Exposes the internal NIO buffer.
*/
public abstract ByteBuffer internalNioBuffer(int index, int length);
/**
* Exposes this buffer's readable bytes as an NIO {@link ByteBuffer}'s. The returned buffer
* either share or contains the copied content of this buffer, while changing the position
* and limit of the returned NIO buffer does not affect the indexes and marks of this buffer.
* This method does not modify {@code readerIndex} or {@code writerIndex} of this buffer.
* Please note that the returned NIO buffer will not see the changes of this buffer if this buffer
* is a dynamic buffer and it adjusted its capacity.
*
*
* @throws UnsupportedOperationException
* if this buffer cannot create a {@link ByteBuffer} that shares the content with itself
*
* @see #nioBufferCount()
* @see #nioBuffer()
* @see #nioBuffer(int, int)
*/
public abstract ByteBuffer[] nioBuffers();
/**
* Exposes this buffer's bytes as an NIO {@link ByteBuffer}'s for the specified index and length
* The returned buffer either share or contains the copied content of this buffer, while changing
* the position and limit of the returned NIO buffer does not affect the indexes and marks of this buffer.
* This method does not modify {@code readerIndex} or {@code writerIndex} of this buffer. Please note that the
* returned NIO buffer will not see the changes of this buffer if this buffer is a dynamic
* buffer and it adjusted its capacity.
*
* @throws UnsupportedOperationException
* if this buffer cannot create a {@link ByteBuffer} that shares the content with itself
*
* @see #nioBufferCount()
* @see #nioBuffer()
* @see #nioBuffer(int, int)
*/
public abstract ByteBuffer[] nioBuffers(int index, int length);
/**
* Returns {@code true} if and only if this buffer has a backing byte array.
* If this method returns true, you can safely call {@link #array()} and
* {@link #arrayOffset()}.
*/
public abstract boolean hasArray();
/**
* Returns the backing byte array of this buffer.
*
* @throws UnsupportedOperationException
* if there no accessible backing byte array
*/
public abstract byte[] array();
/**
* Returns the offset of the first byte within the backing byte array of
* this buffer.
*
* @throws UnsupportedOperationException
* if there no accessible backing byte array
*/
public abstract int arrayOffset();
/**
* Returns {@code true} if and only if this buffer has a reference to the low-level memory address that points
* to the backing data.
*/
public abstract boolean hasMemoryAddress();
/**
* Returns the low-level memory address that point to the first byte of ths backing data.
*
* @throws UnsupportedOperationException
* if this buffer does not support accessing the low-level memory address
*/
public abstract long memoryAddress();
/**
* Returns {@code true} if this {@link ByteBuf} implementation is backed by a single memory region.
* Composite buffer implementations must return false even if they currently hold ≤ 1 components.
* For buffers that return {@code true}, it's guaranteed that a successful call to {@link #discardReadBytes()}
* will increase the value of {@link #maxFastWritableBytes()} by the current {@code readerIndex}.
* <p>
* This method will return {@code false} by default, and a {@code false} return value does not necessarily
* mean that the implementation is composite or that it is <i>not</i> backed by a single memory region.
*/
public boolean isContiguous() {
return false;
}
/**
* A {@code ByteBuf} can turn into itself.
* @return This {@code ByteBuf} instance.
*/
@Override
public ByteBuf asByteBuf() {
return this;
}
/**
* Decodes this buffer's readable bytes into a string with the specified
* character set name. This method is identical to
* {@code buf.toString(buf.readerIndex(), buf.readableBytes(), charsetName)}.
* This method does not modify {@code readerIndex} or {@code writerIndex} of
* this buffer.
*
* @throws UnsupportedCharsetException
* if the specified character set name is not supported by the
* current VM
*/
public abstract String toString(Charset charset);
/**
* Decodes this buffer's sub-region into a string with the specified
* character set. This method does not modify {@code readerIndex} or
* {@code writerIndex} of this buffer.
*/
public abstract String toString(int index, int length, Charset charset);
/**
* Returns a hash code which was calculated from the content of this
* buffer. If there's a byte array which is
* {@linkplain #equals(Object) equal to} this array, both arrays should
* return the same value.
*/
@Override
public abstract int hashCode();
/**
* Determines if the content of the specified buffer is identical to the
* content of this array. 'Identical' here means:
* <ul>
* <li>the size of the contents of the two buffers are same and</li>
* <li>every single byte of the content of the two buffers are same.</li>
* </ul>
* Please note that it does not compare {@link #readerIndex()} nor
* {@link #writerIndex()}. This method also returns {@code false} for
* {@code null} and an object which is not an instance of
* {@link ByteBuf} type.
*/
@Override
public abstract boolean equals(Object obj);
/**
* Compares the content of the specified buffer to the content of this
* buffer. Comparison is performed in the same manner with the string
* comparison functions of various languages such as {@code strcmp},
* {@code memcmp} and {@link String#compareTo(String)}.
*/
@Override
public abstract int compareTo(ByteBuf buffer);
/**
* Returns the string representation of this buffer. This method does not
* necessarily return the whole content of the buffer but returns
* the values of the key properties such as {@link #readerIndex()},
* {@link #writerIndex()} and {@link #capacity()}.
*/
@Override
public abstract String toString();
@Override
public abstract ByteBuf retain(int increment);
@Override
public abstract ByteBuf retain();
@Override
public abstract ByteBuf touch();
@Override
public abstract ByteBuf touch(Object hint);
/**
* Used internally by {@link AbstractByteBuf#ensureAccessible()} to try to guard
* against using the buffer after it was released (best-effort).
*/
boolean isAccessible() {
return refCnt() != 0;
}
}
|
documentation
|
java
|
apache__camel
|
components/camel-jq/src/test/java/org/apache/camel/language/jq/JqExpressionPropertyFnTest.java
|
{
"start": 984,
"end": 1903
}
|
class ____ extends JqTestSupport {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.transform().jq(".foo = property(\"MyProperty\")")
.to("mock:result");
}
};
}
@Test
public void testExpression() throws Exception {
getMockEndpoint("mock:result")
.expectedBodiesReceived(MAPPER.createObjectNode().put("foo", "MyPropertyValue"));
fluentTemplate.to("direct:start")
.withProcessor(e -> {
e.setProperty("MyProperty", "MyPropertyValue");
e.getMessage().setBody(node("foo", "bar"));
})
.send();
MockEndpoint.assertIsSatisfied(context);
}
}
|
JqExpressionPropertyFnTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/usertypes/UserTypeMappingTest.java
|
{
"start": 790,
"end": 1851
}
|
class ____ {
private Configuration cfg;
private ServiceRegistry serviceRegistry;
@BeforeEach
public void setup() {
cfg = new Configuration();
serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( cfg.getProperties() );
}
@AfterEach
public void tearDown() {
if ( serviceRegistry != null ) {
ServiceRegistryBuilder.destroy( serviceRegistry );
}
}
@Test
public void testFirstTypeThenEntity() {
cfg.addResource( "org/hibernate/orm/test/mapping/usertypes/TestEnumType.hbm.xml" )
.addResource( "org/hibernate/orm/test/mapping/usertypes/TestEntity.hbm.xml" );
try (SessionFactory sessions = cfg.buildSessionFactory( serviceRegistry )) {
assertNotNull( sessions );
}
}
@Test
public void testFirstEntityThenType() {
cfg.addResource( "org/hibernate/orm/test/mapping/usertypes/TestEntity.hbm.xml" )
.addResource( "org/hibernate/orm/test/mapping/usertypes/TestEnumType.hbm.xml" );
try (SessionFactory sessions = cfg.buildSessionFactory( serviceRegistry )) {
assertNotNull( sessions );
}
}
}
|
UserTypeMappingTest
|
java
|
apache__avro
|
lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputFormat.java
|
{
"start": 1550,
"end": 2930
}
|
class ____ extends FileOutputFormat<TetherData, NullWritable> {
/** Enable output compression using the deflate codec and specify its level. */
public static void setDeflateLevel(JobConf job, int level) {
FileOutputFormat.setCompressOutput(job, true);
job.setInt(AvroOutputFormat.DEFLATE_LEVEL_KEY, level);
}
@SuppressWarnings("unchecked")
@Override
public RecordWriter<TetherData, NullWritable> getRecordWriter(FileSystem ignore, JobConf job, String name,
Progressable prog) throws IOException {
Schema schema = AvroJob.getOutputSchema(job);
final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter());
if (FileOutputFormat.getCompressOutput(job)) {
int level = job.getInt(AvroOutputFormat.DEFLATE_LEVEL_KEY, CodecFactory.DEFAULT_DEFLATE_LEVEL);
writer.setCodec(CodecFactory.deflateCodec(level));
}
Path path = FileOutputFormat.getTaskOutputPath(job, name + AvroOutputFormat.EXT);
writer.create(schema, path.getFileSystem(job).create(path));
return new RecordWriter<TetherData, NullWritable>() {
@Override
public void write(TetherData datum, NullWritable ignore) throws IOException {
writer.appendEncoded(datum.buffer());
}
@Override
public void close(Reporter reporter) throws IOException {
writer.close();
}
};
}
}
|
TetherOutputFormat
|
java
|
spring-projects__spring-boot
|
buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/build/BuildpackLayersMetadata.java
|
{
"start": 3669,
"end": 4406
}
|
class ____ {
private final Map<String, BuildpackVersions> buildpacks = new HashMap<>();
private @Nullable BuildpackLayerDetails getBuildpack(String id, @Nullable String version) {
if (this.buildpacks.containsKey(id)) {
return this.buildpacks.get(id).getBuildpack(version);
}
return null;
}
private void addBuildpackVersions(String id, BuildpackVersions versions) {
this.buildpacks.put(id, versions);
}
private static Buildpacks fromJson(JsonNode node) {
Buildpacks buildpacks = new Buildpacks();
node.properties()
.forEach((field) -> buildpacks.addBuildpackVersions(field.getKey(),
BuildpackVersions.fromJson(field.getValue())));
return buildpacks;
}
}
private static final
|
Buildpacks
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/util/concurrent/SimpleTimeLimiterTest.java
|
{
"start": 1388,
"end": 8890
}
|
class ____ extends TestCase {
private static final long DELAY_MS = 50;
private static final long ENOUGH_MS = 10000;
private static final long NOT_ENOUGH_MS = 5;
private static final String GOOD_CALLABLE_RESULT = "good callable result";
private static final Callable<String> GOOD_CALLABLE =
new Callable<String>() {
@Override
public String call() throws InterruptedException {
MILLISECONDS.sleep(DELAY_MS);
return GOOD_CALLABLE_RESULT;
}
};
private static final Callable<String> BAD_CALLABLE =
new Callable<String>() {
@Override
public String call() throws InterruptedException, SampleException {
MILLISECONDS.sleep(DELAY_MS);
throw new SampleException();
}
};
private static final Runnable GOOD_RUNNABLE =
new Runnable() {
@Override
public void run() {
try {
MILLISECONDS.sleep(DELAY_MS);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
};
private static final Runnable BAD_RUNNABLE =
new Runnable() {
@Override
public void run() {
try {
MILLISECONDS.sleep(DELAY_MS);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
throw new SampleRuntimeException();
}
};
private TimeLimiter service;
private static final ExecutorService executor = newFixedThreadPool(1);
@Override
protected void setUp() throws Exception {
super.setUp();
service = SimpleTimeLimiter.create(executor);
}
public void testNewProxy_goodMethodWithEnoughTime() throws Exception {
SampleImpl target = new SampleImpl(DELAY_MS);
Sample proxy = service.newProxy(target, Sample.class, ENOUGH_MS, MILLISECONDS);
Stopwatch stopwatch = Stopwatch.createStarted();
String result = proxy.sleepThenReturnInput("x");
assertThat(result).isEqualTo("x");
assertThat(stopwatch.elapsed(MILLISECONDS)).isIn(Range.closed(DELAY_MS, ENOUGH_MS));
assertThat(target.finished).isTrue();
}
public void testNewProxy_goodMethodWithNotEnoughTime() throws Exception {
SampleImpl target = new SampleImpl(9999);
Sample proxy = service.newProxy(target, Sample.class, NOT_ENOUGH_MS, MILLISECONDS);
Stopwatch stopwatch = Stopwatch.createStarted();
assertThrows(UncheckedTimeoutException.class, () -> proxy.sleepThenReturnInput("x"));
assertThat(stopwatch.elapsed(MILLISECONDS)).isIn(Range.closed(NOT_ENOUGH_MS, DELAY_MS * 2));
// Is it still computing away anyway?
assertThat(target.finished).isFalse();
MILLISECONDS.sleep(ENOUGH_MS);
assertThat(target.finished).isFalse();
}
public void testNewProxy_badMethodWithEnoughTime() throws Exception {
SampleImpl target = new SampleImpl(DELAY_MS);
Sample proxy = service.newProxy(target, Sample.class, ENOUGH_MS, MILLISECONDS);
Stopwatch stopwatch = Stopwatch.createStarted();
assertThrows(SampleException.class, () -> proxy.sleepThenThrowException());
assertThat(stopwatch.elapsed(MILLISECONDS)).isIn(Range.closed(DELAY_MS, ENOUGH_MS));
}
public void testNewProxy_badMethodWithNotEnoughTime() throws Exception {
SampleImpl target = new SampleImpl(9999);
Sample proxy = service.newProxy(target, Sample.class, NOT_ENOUGH_MS, MILLISECONDS);
Stopwatch stopwatch = Stopwatch.createStarted();
assertThrows(UncheckedTimeoutException.class, () -> proxy.sleepThenThrowException());
assertThat(stopwatch.elapsed(MILLISECONDS)).isIn(Range.closed(NOT_ENOUGH_MS, DELAY_MS * 2));
}
public void testCallWithTimeout_goodCallableWithEnoughTime() throws Exception {
Stopwatch stopwatch = Stopwatch.createStarted();
String result = service.callWithTimeout(GOOD_CALLABLE, ENOUGH_MS, MILLISECONDS);
assertThat(result).isEqualTo(GOOD_CALLABLE_RESULT);
assertThat(stopwatch.elapsed(MILLISECONDS)).isIn(Range.closed(DELAY_MS, ENOUGH_MS));
}
public void testCallWithTimeout_goodCallableWithNotEnoughTime() throws Exception {
assertThrows(
TimeoutException.class,
() -> service.callWithTimeout(GOOD_CALLABLE, NOT_ENOUGH_MS, MILLISECONDS));
}
public void testCallWithTimeout_badCallableWithEnoughTime() throws Exception {
ExecutionException expected =
assertThrows(
ExecutionException.class,
() -> service.callWithTimeout(BAD_CALLABLE, ENOUGH_MS, MILLISECONDS));
assertThat(expected).hasCauseThat().isInstanceOf(SampleException.class);
}
public void testCallUninterruptiblyWithTimeout_goodCallableWithEnoughTime() throws Exception {
Stopwatch stopwatch = Stopwatch.createStarted();
String result = service.callUninterruptiblyWithTimeout(GOOD_CALLABLE, ENOUGH_MS, MILLISECONDS);
assertThat(result).isEqualTo(GOOD_CALLABLE_RESULT);
assertThat(stopwatch.elapsed(MILLISECONDS)).isIn(Range.closed(DELAY_MS, ENOUGH_MS));
}
public void testCallUninterruptiblyWithTimeout_goodCallableWithNotEnoughTime() throws Exception {
assertThrows(
TimeoutException.class,
() -> service.callUninterruptiblyWithTimeout(GOOD_CALLABLE, NOT_ENOUGH_MS, MILLISECONDS));
}
public void testCallUninterruptiblyWithTimeout_badCallableWithEnoughTime() throws Exception {
ExecutionException expected =
assertThrows(
ExecutionException.class,
() -> service.callUninterruptiblyWithTimeout(BAD_CALLABLE, ENOUGH_MS, MILLISECONDS));
assertThat(expected).hasCauseThat().isInstanceOf(SampleException.class);
}
public void testRunWithTimeout_goodRunnableWithEnoughTime() throws Exception {
Stopwatch stopwatch = Stopwatch.createStarted();
service.runWithTimeout(GOOD_RUNNABLE, ENOUGH_MS, MILLISECONDS);
assertThat(stopwatch.elapsed(MILLISECONDS)).isIn(Range.closed(DELAY_MS, ENOUGH_MS));
}
public void testRunWithTimeout_goodRunnableWithNotEnoughTime() throws Exception {
assertThrows(
TimeoutException.class,
() -> service.runWithTimeout(GOOD_RUNNABLE, NOT_ENOUGH_MS, MILLISECONDS));
}
public void testRunWithTimeout_badRunnableWithEnoughTime() throws Exception {
UncheckedExecutionException expected =
assertThrows(
UncheckedExecutionException.class,
() -> service.runWithTimeout(BAD_RUNNABLE, ENOUGH_MS, MILLISECONDS));
assertThat(expected).hasCauseThat().isInstanceOf(SampleRuntimeException.class);
}
public void testRunUninterruptiblyWithTimeout_goodRunnableWithEnoughTime() throws Exception {
Stopwatch stopwatch = Stopwatch.createStarted();
service.runUninterruptiblyWithTimeout(GOOD_RUNNABLE, ENOUGH_MS, MILLISECONDS);
assertThat(stopwatch.elapsed(MILLISECONDS)).isIn(Range.closed(DELAY_MS, ENOUGH_MS));
}
public void testRunUninterruptiblyWithTimeout_goodRunnableWithNotEnoughTime() throws Exception {
assertThrows(
TimeoutException.class,
() -> service.runUninterruptiblyWithTimeout(GOOD_RUNNABLE, NOT_ENOUGH_MS, MILLISECONDS));
}
public void testRunUninterruptiblyWithTimeout_badRunnableWithEnoughTime() throws Exception {
UncheckedExecutionException expected =
assertThrows(
UncheckedExecutionException.class,
() -> service.runUninterruptiblyWithTimeout(BAD_RUNNABLE, ENOUGH_MS, MILLISECONDS));
assertThat(expected).hasCauseThat().isInstanceOf(SampleRuntimeException.class);
}
private
|
SimpleTimeLimiterTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-web-server/src/testFixtures/java/org/springframework/boot/web/server/autoconfigure/servlet/AbstractServletWebServerAutoConfigurationTests.java
|
{
"start": 7941,
"end": 8510
}
|
class ____ implements BeanPostProcessor {
@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) {
if (bean instanceof ConfigurableServletWebServerFactory) {
MockServletWebServerFactory webServerFactory = (MockServletWebServerFactory) bean;
assertThat(webServerFactory.getServletContext()).isNull();
}
return bean;
}
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) {
return bean;
}
}
@Configuration(proxyBeanMethods = false)
static
|
EnsureWebServerHasNoServletContext
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java
|
{
"start": 1661,
"end": 10749
}
|
class ____ implements Writable {
static { // register a ctor
WritableFactories.setFactory
(AccessControlList.class,
new WritableFactory() {
@Override
public Writable newInstance() { return new AccessControlList(); }
});
}
// Indicates an ACL string that represents access to all users
public static final String WILDCARD_ACL_VALUE = "*";
private static final int INITIAL_CAPACITY = 256;
public static final String USE_REAL_ACLS = "~";
// Set of users who are granted access.
private Collection<String> users;
// Set of groups which are granted access
private Collection<String> groups;
// Whether all users are granted access.
private boolean allAllowed;
private Groups groupsMapping = Groups.getUserToGroupsMappingService(new Configuration());
/**
* This constructor exists primarily for AccessControlList to be Writable.
*/
public AccessControlList() {
}
/**
* Construct a new ACL from a String representation of the same.
*
* The String is a a comma separated list of users and groups.
* The user list comes first and is separated by a space followed
* by the group list. For e.g. "user1,user2 group1,group2"
*
* @param aclString String representation of the ACL
*/
public AccessControlList(String aclString) {
buildACL(aclString.split(" ", 2));
}
/**
* Construct a new ACL from String representation of users and groups
*
* The arguments are comma separated lists
*
* @param users comma separated list of users
* @param groups comma separated list of groups
*/
public AccessControlList(String users, String groups) {
buildACL(new String[] {users, groups});
}
/**
* Build ACL from the given array of strings.
* The strings contain comma separated values.
*
* @param userGroupStrings build ACL from array of Strings
*/
private void buildACL(String[] userGroupStrings) {
users = new HashSet<>();
groups = new HashSet<>();
for (String aclPart : userGroupStrings) {
if (aclPart != null && isWildCardACLValue(aclPart)) {
allAllowed = true;
break;
}
}
if (!allAllowed) {
if (userGroupStrings.length >= 1 && userGroupStrings[0] != null) {
users = StringUtils.getTrimmedStringCollection(userGroupStrings[0]);
}
if (userGroupStrings.length == 2 && userGroupStrings[1] != null) {
groups = StringUtils.getTrimmedStringCollection(userGroupStrings[1]);
groupsMapping.cacheGroupsAdd(new LinkedList<String>(groups));
}
}
}
/**
* Checks whether ACL string contains wildcard
*
* @param aclString check this ACL string for wildcard
* @return true if ACL string contains wildcard false otherwise
*/
private boolean isWildCardACLValue(String aclString) {
if (aclString.contains(WILDCARD_ACL_VALUE) &&
aclString.trim().equals(WILDCARD_ACL_VALUE)) {
return true;
}
return false;
}
public boolean isAllAllowed() {
return allAllowed;
}
/**
* Add user to the names of users allowed for this service.
*
* @param user
* The user name
*/
public void addUser(String user) {
if (isWildCardACLValue(user)) {
throw new IllegalArgumentException("User " + user + " can not be added");
}
if (!isAllAllowed()) {
users.add(user);
}
}
/**
* Add group to the names of groups allowed for this service.
*
* @param group
* The group name
*/
public void addGroup(String group) {
if (isWildCardACLValue(group)) {
throw new IllegalArgumentException("Group " + group + " can not be added");
}
if (!isAllAllowed()) {
List<String> groupsList = new LinkedList<String>();
groupsList.add(group);
groupsMapping.cacheGroupsAdd(groupsList);
groups.add(group);
}
}
/**
* Remove user from the names of users allowed for this service.
*
* @param user
* The user name
*/
public void removeUser(String user) {
if (isWildCardACLValue(user)) {
throw new IllegalArgumentException("User " + user + " can not be removed");
}
if (!isAllAllowed()) {
users.remove(user);
}
}
/**
* Remove group from the names of groups allowed for this service.
*
* @param group
* The group name
*/
public void removeGroup(String group) {
if (isWildCardACLValue(group)) {
throw new IllegalArgumentException("Group " + group
+ " can not be removed");
}
if (!isAllAllowed()) {
groups.remove(group);
}
}
/**
* Get the names of users allowed for this service.
* @return the set of user names. the set must not be modified.
*/
public Collection<String> getUsers() {
return users;
}
/**
* Get the names of user groups allowed for this service.
* @return the set of group names. the set must not be modified.
*/
public Collection<String> getGroups() {
return groups;
}
/**
* Checks if a user represented by the provided {@link UserGroupInformation}
* is a member of the Access Control List. If user was proxied and
* USE_REAL_ACLS + the real user name is in the control list, then treat this
* case as if user were in the ACL list.
* @param ugi UserGroupInformation to check if contained in the ACL
* @return true if ugi is member of the list or if USE_REAL_ACLS + real user
* is in the list
*/
public final boolean isUserInList(UserGroupInformation ugi) {
if (allAllowed || users.contains(ugi.getShortUserName())) {
return true;
} else if (!groups.isEmpty()) {
Set<String> ugiGroups = ugi.getGroupsSet();
for (String group : groups) {
if (ugiGroups.contains(group)) {
return true;
}
}
}
UserGroupInformation realUgi = ugi.getRealUser();
return realUgi != null &&
users.contains(USE_REAL_ACLS + realUgi.getShortUserName());
}
public boolean isUserAllowed(UserGroupInformation ugi) {
return isUserInList(ugi);
}
/**
* Returns descriptive way of users and groups that are part of this ACL.
* Use {@link #getAclString()} to get the exact String that can be given to
* the constructor of AccessControlList to create a new instance.
*/
@Override
public String toString() {
String str = null;
if (allAllowed) {
str = "All users are allowed";
}
else if (users.isEmpty() && groups.isEmpty()) {
str = "No users are allowed";
}
else {
String usersStr = null;
String groupsStr = null;
if (!users.isEmpty()) {
usersStr = users.toString();
}
if (!groups.isEmpty()) {
groupsStr = groups.toString();
}
if (!users.isEmpty() && !groups.isEmpty()) {
str = "Users " + usersStr + " and members of the groups "
+ groupsStr + " are allowed";
}
else if (!users.isEmpty()) {
str = "Users " + usersStr + " are allowed";
}
else {// users is empty array and groups is nonempty
str = "Members of the groups "
+ groupsStr + " are allowed";
}
}
return str;
}
/**
* Returns the access control list as a String that can be used for building a
* new instance by sending it to the constructor of {@link AccessControlList}.
* @return acl string.
*/
public String getAclString() {
StringBuilder sb = new StringBuilder(INITIAL_CAPACITY);
if (allAllowed) {
sb.append('*');
}
else {
sb.append(getUsersString())
.append(" ")
.append(getGroupsString());
}
return sb.toString();
}
/**
* Serializes the AccessControlList object
*/
@Override
public void write(DataOutput out) throws IOException {
String aclString = getAclString();
Text.writeString(out, aclString);
}
/**
* Deserializes the AccessControlList object
*/
@Override
public void readFields(DataInput in) throws IOException {
String aclString = Text.readString(in);
buildACL(aclString.split(" ", 2));
}
/**
* Returns comma-separated concatenated single String of the set 'users'
*
* @return comma separated list of users
*/
private String getUsersString() {
return getString(users);
}
/**
* Returns comma-separated concatenated single String of the set 'groups'
*
* @return comma separated list of groups
*/
private String getGroupsString() {
return getString(groups);
}
/**
* Returns comma-separated concatenated single String of all strings of
* the given set
*
* @param strings set of strings to concatenate
*/
private String getString(Collection<String> strings) {
StringBuilder sb = new StringBuilder(INITIAL_CAPACITY);
boolean first = true;
for(String str: strings) {
if (!first) {
sb.append(",");
} else {
first = false;
}
sb.append(str);
}
return sb.toString();
}
}
|
AccessControlList
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/mappedBy/IsNullAndMappedByTest.java
|
{
"start": 1152,
"end": 9108
}
|
class ____ {
@BeforeAll
public void setUp(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final Person person1 = new Person( 1, "Luigi" );
final Person person2 = new Person( 2, "Andrea" );
final Person person3 = new Person( 3, "Max" );
final Account account1 = new Account( 1, null, null, person1 );
final Account account2 = new Account( 2, "Fab", null, person2 );
final Account account3 = new Account( 3, "And", null, null );
session.persist( person1 );
session.persist( person2 );
session.persist( person3 );
session.persist( account1 );
session.persist( account2 );
session.persist( account3 );
} );
}
@AfterAll
public void tearDown(SessionFactoryScope scope) {
scope.inTransaction( session -> {
session.createMutationQuery( "delete from Account" ).executeUpdate();
session.createMutationQuery( "delete from Person" ).executeUpdate();
} );
}
@Test
public void testAssociationDereferenceIsNullInWhereClause(SessionFactoryScope scope) {
final SQLStatementInspector inspector = scope.getCollectingStatementInspector();
scope.inTransaction( session -> {
inspector.clear();
// should produce an inner join to ACCOUNT_TABLE
final List<Integer> ids = session.createQuery(
"select p.id from Person p where p.account.code is null",
Integer.class
).getResultList();
assertEquals( 1, ids.size() );
assertEquals( 1, (int) ids.get( 0 ) );
assertThat( inspector.getSqlQueries() ).hasSize( 1 );
assertThat( inspector.getSqlQueries().get( 0 ) ).containsIgnoringCase( " join " );
assertThat( inspector.getSqlQueries().get( 0 ) ).doesNotContainIgnoringCase( " left " );
assertThat( inspector.getSqlQueries().get( 0 ) ).containsIgnoringCase( " ACCOUNT_TABLE " );
} );
}
@Test
public void testAssociationIsNullInWhereClause(SessionFactoryScope scope) {
final SQLStatementInspector inspector = scope.getCollectingStatementInspector();
scope.inTransaction( session -> {
inspector.clear();
// should produce a left join to ACCOUNT_TABLE and restrict based on the Account's id -
//
// ...
// from PERSON p
// left join ACCOUNT_TABLE a
// on p.account_id = a.id
// where a.id is null
final List<Integer> ids = session.createQuery(
"select distinct p.id from Person p where p.account is null",
Integer.class
).getResultList();
assertEquals( 1, ids.size() );
assertEquals( 3, (int) ids.get( 0 ) );
assertThat( inspector.getSqlQueries() ).hasSize( 1 );
assertThat( inspector.getSqlQueries().get( 0 ) ).containsIgnoringCase( " left join " );
assertThat( inspector.getSqlQueries().get( 0 ) ).containsIgnoringCase( ".id is null" );
} );
}
@Test
public void testFetchedAssociationIsNullInWhereClause(SessionFactoryScope scope) {
final SQLStatementInspector inspector = scope.getCollectingStatementInspector();
scope.inTransaction( session -> {
inspector.clear();
// should produce an inner join to ACCOUNT_TABLE since it's explicitly selected
//
// ...
// from PERSON p
// join ACCOUNT_TABLE a
// on p.account_id = a.id
// where a.id is null
final List<Account> results = session.createQuery(
"select p.account from Person p where p.account is null",
Account.class
).getResultList();
assertThat( results ).isEmpty();
assertThat( inspector.getSqlQueries() ).hasSize( 1 );
assertThat( inspector.getSqlQueries().get( 0 ) ).containsIgnoringCase( "join" );
assertThat( inspector.getSqlQueries().get( 0 ) ).doesNotContainIgnoringCase( " left join " );
} );
}
@Test
public void testIsNullInWhereClause3(SessionFactoryScope scope) {
final SQLStatementInspector inspector = scope.getCollectingStatementInspector();
scope.inTransaction( session -> {
inspector.clear();
final List<Integer> ids = session.createQuery(
"select distinct a.id from Account a where fk(a.person) is null",
Integer.class
).getResultList();
assertEquals( 1, ids.size() );
assertEquals( 3, (int) ids.get( 0 ) );
assertThat( inspector.getSqlQueries() ).hasSize( 1 );
assertThat( inspector.getSqlQueries().get( 0 ) ).doesNotContainIgnoringCase( " join " );
assertThat( inspector.getSqlQueries().get( 0 ) ).containsIgnoringCase( ".person_id is null" );
} );
}
@Test
public void testAssociationEqualsInWhereClause(SessionFactoryScope scope) {
final SQLStatementInspector inspector = scope.getCollectingStatementInspector();
scope.inTransaction( session -> {
inspector.clear();
// at the moment -
//
// select
// distinct p1_0.id
// from
// Person p1_0
// join
// ACCOUNT_TABLE a1_0
// on a1_0.id=p1_0.account_id
// where
// a1_0.id=?
final List<Integer> ids = session.createQuery(
"select distinct p.id from Person p where p.account = :acct",
Integer.class
).setParameter( "acct", new Account( 1, null, null, null ) ).getResultList();
assertThat( ids ).hasSize( 1 );
assertThat( ids.get( 0 ) ).isEqualTo( 1 );
assertThat( inspector.getSqlQueries() ).hasSize( 1 );
assertThat( inspector.getSqlQueries().get( 0 ) ).containsIgnoringCase( " join " );
assertThat( inspector.getSqlQueries().get( 0 ) ).containsIgnoringCase( ".id=?" );
} );
}
@Test
public void testIsNullInWhereClause5(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final List<Integer> ids = session.createQuery(
"select p.id from Person p where p.account.code is null or p.account.id is null",
Integer.class
)
.getResultList();
assertEquals( 1, ids.size() );
assertEquals( 1, (int) ids.get( 0 ) );
} );
}
@Test
public void testWhereClause(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final List<Integer> ids = session.createQuery(
"select p.id from Person p where p.account.code = :code and p.account.id = :id",
Integer.class
)
.setParameter( "code", "Fab" )
.setParameter( "id", 2 )
.getResultList();
assertEquals( 1, ids.size() );
assertEquals( 2, (int) ids.get( 0 ) );
} );
}
@Test
public void testDelete(SessionFactoryScope scope) {
final SQLStatementInspector inspector = scope.getCollectingStatementInspector();
inspector.clear();
scope.inTransaction( (entityManager) -> {
entityManager.createMutationQuery( "delete from Person p where p.account is null" ).executeUpdate();
assertThat( inspector.getSqlQueries() ).hasSize( 1 );
// could physically be a join or exists sub-query
assertThat( inspector.getSqlQueries()
.get( 0 ) ).matches( (sql) -> sql.contains( "left join" ) || sql.contains( "not exists" ) );
} );
}
@Test
public void testHqlUpdate(SessionFactoryScope scope) {
final SQLStatementInspector inspector = scope.getCollectingStatementInspector();
inspector.clear();
scope.inTransaction( (entityManager) -> {
entityManager.createMutationQuery( "update Person p set p.name = 'abc' where p.account is null" ).executeUpdate();
assertThat( inspector.getSqlQueries() ).hasSize( 1 );
// could physically be a join or exists sub-query
assertThat( inspector.getSqlQueries()
.get( 0 ) ).matches( (sql) -> sql.contains( "left join" ) || sql.contains( "not exists" ) );
} );
}
@Test
public void testHqlUpdateSet(SessionFactoryScope scope) {
final SQLStatementInspector inspector = scope.getCollectingStatementInspector();
inspector.clear();
scope.inTransaction( (entityManager) -> {
entityManager.createMutationQuery( "update Account a set a.person = null where id = 99" ).executeUpdate();
assertThat( inspector.getSqlQueries() ).hasSize( 1 );
assertThat( inspector.getSqlQueries().get( 0 ) ).doesNotContainIgnoringCase( " join " );
assertThat( inspector.getSqlQueries().get( 0 ) ).containsIgnoringCase( "person_id=null" );
} );
}
@Entity( name = "Person" )
public static
|
IsNullAndMappedByTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/orphan/onetomany/LazyOneToManyOrphanWithIdentityIdTest.java
|
{
"start": 2758,
"end": 3338
}
|
class ____ {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
Long id;
String name;
@OneToMany(mappedBy = "parent", orphanRemoval = true)
List<Child> children = new ArrayList<>();
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public List<Child> getChildren() {
return children;
}
public void setChildren(List<Child> children) {
this.children = children;
}
public void addChild(Child child) {
children.add( child );
child.setParent( this );
}
}
@Entity(name = "Child")
static
|
Parent
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/filter/CustomNullSerializationTest.java
|
{
"start": 1274,
"end": 2210
}
|
class ____ extends SerializationContexts
{
public MyNullSerializerContexts() { super(); }
public MyNullSerializerContexts(TokenStreamFactory tsf, SerializerFactory serializerFactory,
SerializerCache cache) {
super(tsf, serializerFactory, cache);
}
@Override
public SerializationContexts forMapper(Object mapper,
TokenStreamFactory tsf, SerializerFactory serializerFactory,
SerializerCache cache) {
return new MyNullSerializerContexts(tsf, serializerFactory, cache);
}
@Override
public SerializationContextExt createContext(SerializationConfig config,
GeneratorSettings genSettings) {
return new MyNullSerializerSerializationContext(_streamFactory, _cache,
config, genSettings, _serializerFactory);
}
}
static
|
MyNullSerializerContexts
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ShouldHaveSameHashCode.java
|
{
"start": 657,
"end": 1256
}
|
class ____ extends BasicErrorMessageFactory {
public static ErrorMessageFactory shouldHaveSameHashCode(Object actual, Object other) {
return new ShouldHaveSameHashCode(actual, other);
}
private ShouldHaveSameHashCode(Object actual, Object expected) {
super("%nExpecting%n" +
" %s%n" +
"to have the same hash code as:%n" +
" %s%n" +
"but actual hash code is%n" +
" %s%n" +
"while expected hash code was:%n" +
" %s",
actual, expected, actual.hashCode(), expected.hashCode());
}
}
|
ShouldHaveSameHashCode
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/metamodel/AttributeClassification.java
|
{
"start": 388,
"end": 1525
}
|
enum ____ {
/**
* @see jakarta.persistence.Basic
*/
BASIC,
/**
* @see jakarta.persistence.Embedded
*/
EMBEDDED,
/**
* @see org.hibernate.annotations.Any
*/
ANY,
/**
* @see jakarta.persistence.OneToOne
*/
ONE_TO_ONE,
/**
* @see jakarta.persistence.ManyToOne
*/
MANY_TO_ONE,
/**
* @see jakarta.persistence.ElementCollection
*/
ELEMENT_COLLECTION,
/**
* @see jakarta.persistence.OneToMany
*/
ONE_TO_MANY,
/**
* @see jakarta.persistence.ManyToMany
*/
MANY_TO_MANY;
/**
* The associated {@link PersistentAttributeType}, if one
*/
public PersistentAttributeType getJpaClassification() {
return switch ( this ) {
case BASIC -> PersistentAttributeType.BASIC;
case EMBEDDED -> PersistentAttributeType.EMBEDDED;
case ONE_TO_ONE -> PersistentAttributeType.ONE_TO_ONE;
case MANY_TO_ONE -> PersistentAttributeType.MANY_TO_ONE;
case ELEMENT_COLLECTION -> PersistentAttributeType.ELEMENT_COLLECTION;
case ONE_TO_MANY -> PersistentAttributeType.ONE_TO_MANY;
case MANY_TO_MANY -> PersistentAttributeType.MANY_TO_MANY;
case ANY -> null;
};
}
}
|
AttributeClassification
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/HazelcastTopicComponentBuilderFactory.java
|
{
"start": 1939,
"end": 6851
}
|
interface ____ extends ComponentBuilder<HazelcastTopicComponent> {
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default HazelcastTopicComponentBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default HazelcastTopicComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default HazelcastTopicComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* The hazelcast instance reference which can be used for hazelcast
* endpoint. If you don't specify the instance reference, camel use the
* default hazelcast instance from the camel-hazelcast instance.
*
* The option is a:
* <code>com.hazelcast.core.HazelcastInstance</code> type.
*
* Group: advanced
*
* @param hazelcastInstance the value to set
* @return the dsl builder
*/
default HazelcastTopicComponentBuilder hazelcastInstance(com.hazelcast.core.HazelcastInstance hazelcastInstance) {
doSetProperty("hazelcastInstance", hazelcastInstance);
return this;
}
/**
* The hazelcast mode reference which kind of instance should be used.
* If you don't specify the mode, then the node mode will be the
* default.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: node
* Group: advanced
*
* @param hazelcastMode the value to set
* @return the dsl builder
*/
default HazelcastTopicComponentBuilder hazelcastMode(java.lang.String hazelcastMode) {
doSetProperty("hazelcastMode", hazelcastMode);
return this;
}
}
|
HazelcastTopicComponentBuilder
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLAlterTableDisableConstraint.java
|
{
"start": 812,
"end": 1338
}
|
class ____ extends SQLObjectImpl implements SQLAlterTableItem {
private SQLName constraintName;
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, this.constraintName);
}
visitor.endVisit(this);
}
public SQLName getConstraintName() {
return constraintName;
}
public void setConstraintName(SQLName constraintName) {
this.constraintName = constraintName;
}
}
|
SQLAlterTableDisableConstraint
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/client/protocol/decoder/MapValueDecoder.java
|
{
"start": 851,
"end": 1469
}
|
class ____<T> implements MultiDecoder<Object> {
private final MultiDecoder<Object> decoder;
public MapValueDecoder(MultiDecoder<Object> decoder) {
this.decoder = decoder;
}
public MapValueDecoder() {
this(null);
}
@Override
public Decoder<Object> getDecoder(Codec codec, int paramNum, State state, long size) {
return codec.getMapValueDecoder();
}
@Override
public T decode(List<Object> parts, State state) {
if (decoder != null) {
return (T) decoder.decode(parts, state);
}
return (T) parts;
}
}
|
MapValueDecoder
|
java
|
apache__camel
|
core/camel-core-processor/src/main/java/org/apache/camel/processor/saga/SagaProcessor.java
|
{
"start": 1515,
"end": 6063
}
|
class ____ extends BaseDelegateProcessorSupport implements Traceable, IdAware, RouteIdAware {
protected final CamelSagaService sagaService;
protected final CamelSagaStep step;
protected final SagaCompletionMode completionMode;
private String id;
private String routeId;
public SagaProcessor(CamelContext camelContext, Processor childProcessor, CamelSagaService sagaService,
SagaCompletionMode completionMode, CamelSagaStep step) {
super(ObjectHelper.notNull(childProcessor, "childProcessor"));
this.sagaService = ObjectHelper.notNull(sagaService, "sagaService");
this.completionMode = ObjectHelper.notNull(completionMode, "completionMode");
this.step = ObjectHelper.notNull(step, "step");
}
protected CompletableFuture<CamelSagaCoordinator> getCurrentSagaCoordinator(Exchange exchange) {
String currentSaga = exchange.getIn().getHeader(Exchange.SAGA_LONG_RUNNING_ACTION, String.class);
if (currentSaga != null) {
return sagaService.getSaga(currentSaga);
}
return CompletableFuture.completedFuture(null);
}
protected void setCurrentSagaCoordinator(Exchange exchange, CamelSagaCoordinator coordinator) {
if (coordinator != null) {
exchange.getIn().setHeader(Exchange.SAGA_LONG_RUNNING_ACTION, coordinator.getId());
} else {
exchange.getIn().removeHeader(Exchange.SAGA_LONG_RUNNING_ACTION);
exchange.getMessage().removeHeader(Exchange.SAGA_LONG_RUNNING_ACTION);
}
}
protected void handleSagaCompletion(
Exchange exchange, CamelSagaCoordinator coordinator, CamelSagaCoordinator previousCoordinator,
AsyncCallback callback) {
if (this.completionMode == SagaCompletionMode.AUTO) {
if (exchange.getException() != null) {
if (coordinator != null) {
coordinator.compensate(exchange).whenComplete((done, ex) -> ifNotException(ex, exchange, callback, () -> {
setCurrentSagaCoordinator(exchange, previousCoordinator);
callback.done(false);
}));
} else {
// No coordinator available, so no saga available.
callback.done(false);
}
} else {
coordinator.complete(exchange).whenComplete((done, ex) -> ifNotException(ex, exchange, callback, () -> {
setCurrentSagaCoordinator(exchange, previousCoordinator);
callback.done(false);
}));
}
} else if (this.completionMode == SagaCompletionMode.MANUAL) {
// Completion will be handled manually by the user
callback.done(false);
} else {
throw new IllegalStateException("Unsupported completion mode: " + this.completionMode);
}
}
public CamelSagaService getSagaService() {
return sagaService;
}
@Override
public String getId() {
return id;
}
@Override
public void setId(String id) {
this.id = id;
}
@Override
public String getRouteId() {
return routeId;
}
@Override
public void setRouteId(String routeId) {
this.routeId = routeId;
}
@Override
public String toString() {
return "id";
}
@Override
public String getTraceLabel() {
return "saga";
}
protected void ifNotException(Throwable ex, Exchange exchange, AsyncCallback callback, Runnable code) {
ifNotException(ex, exchange, false, null, null, callback, code);
}
protected void ifNotException(
Throwable ex, Exchange exchange, boolean handleCompletion, CamelSagaCoordinator coordinator,
CamelSagaCoordinator previousCoordinator, AsyncCallback callback, Runnable code) {
if (ex != null) {
exchange.setException(ex);
if (handleCompletion) {
handleSagaCompletion(exchange, coordinator, previousCoordinator, callback);
} else {
callback.done(false);
}
} else {
code.run();
}
}
@Override
protected void doStart() throws Exception {
super.doStart();
ServiceHelper.startService(sagaService);
}
@Override
protected void doStop() throws Exception {
super.doStop();
ServiceHelper.stopService(sagaService);
}
}
|
SagaProcessor
|
java
|
apache__camel
|
components/camel-file/src/main/java/org/apache/camel/component/file/cluster/FileLockClusterView.java
|
{
"start": 13074,
"end": 13933
}
|
class ____ implements CamelClusterMember {
private final AtomicReference<ClusterMemberStatus> status = new AtomicReference<>(ClusterMemberStatus.STOPPED);
private final String uuid = UUID.randomUUID().toString();
@Override
public boolean isLeader() {
return getStatus().equals(ClusterMemberStatus.LEADER);
}
@Override
public boolean isLocal() {
return true;
}
@Override
public String getId() {
return getClusterService().getId();
}
public String getUuid() {
return uuid;
}
public ClusterMemberStatus getStatus() {
return status.get();
}
private void setStatus(ClusterMemberStatus status) {
this.status.set(status);
}
}
private
|
ClusterMember
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/util/ConcurrencyThrottleSupport.java
|
{
"start": 988,
"end": 1370
}
|
class ____ throttling concurrent access to a specific resource.
*
* <p>Designed for use as a base class, with the subclass invoking
* the {@link #beforeAccess()} and {@link #afterAccess()} methods at
* appropriate points of its workflow. Note that {@code afterAccess}
* should usually be called in a {@code finally} block!
*
* <p>The default concurrency limit of this support
|
for
|
java
|
elastic__elasticsearch
|
libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/common/ProjectIdBridge.java
|
{
"start": 696,
"end": 1342
}
|
interface ____ extends StableBridgeAPI<ProjectId> {
String id();
static ProjectIdBridge fromInternal(final ProjectId projectId) {
return new ProxyInternal(projectId);
}
static ProjectIdBridge fromId(final String id) {
final ProjectId internal = ProjectId.fromId(id);
return new ProxyInternal(internal);
}
static ProjectIdBridge getDefault() {
return ProxyInternal.DEFAULT;
}
/**
* An implementation of {@link ProjectIdBridge} that proxies calls to
* an internal {@link ProjectId} instance.
*
* @see StableBridgeAPI.ProxyInternal
*/
final
|
ProjectIdBridge
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/XTypesToStableStringTest.java
|
{
"start": 5115,
"end": 5425
}
|
interface ____ {
static TestComponent create(Elements elements, Types types) {
return DaggerXTypesToStableStringTest_TestComponent.builder()
.javacPluginModule(new JavacPluginModule(elements, types))
.build();
}
void inject(XTypesToStableStringTest test);
}
}
|
TestComponent
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/processor/src/main/java/org/jboss/resteasy/reactive/server/processor/scanning/FeatureScanner.java
|
{
"start": 549,
"end": 781
}
|
interface ____ {
FeatureScanResult integrate(IndexView application, ScannedApplication scannedApplication);
default void integrateWithIndexer(ServerEndpointIndexer.Builder builder, IndexView index) {
}
|
FeatureScanner
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/BindsMethodValidationTest.java
|
{
"start": 7571,
"end": 7971
}
|
class ____<T> extends MissingType {}");
CompilerTests.daggerCompiler(module, child, parent)
.compile(
subject -> {
switch (CompilerTests.backend(subject)) {
case JAVAC:
subject.hasErrorCount(3);
subject.hasErrorContaining(
"cannot find symbol"
+ "\n symbol:
|
Parent
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/ResourceManager.java
|
{
"start": 62972,
"end": 63583
}
|
class ____ implements ResourceEventListener {
@Override
public void notEnoughResourceAvailable(
JobID jobId, Collection<ResourceRequirement> acquiredResources) {
validateRunsInMainThread();
JobManagerRegistration jobManagerRegistration = jobManagerRegistrations.get(jobId);
if (jobManagerRegistration != null) {
jobManagerRegistration
.getJobManagerGateway()
.notifyNotEnoughResourcesAvailable(acquiredResources);
}
}
}
private
|
ResourceEventListenerImpl
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/consumer/internals/events/ApplicationEventProcessor.java
|
{
"start": 37627,
"end": 37689
}
|
interface ____ as a shim to support both.
*/
private
|
acts
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AllFirstTests.java
|
{
"start": 717,
"end": 1204
}
|
class ____ extends AbstractFirstLastTestCase {
public AllFirstTests(@Name("TestCase") Supplier<TestCaseSupplier.TestCase> testCaseSupplier) {
this.testCase = testCaseSupplier.get();
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
return parameters(true, true);
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new AllFirst(source, args.get(0), args.get(1));
}
}
|
AllFirstTests
|
java
|
micronaut-projects__micronaut-core
|
http-client/src/main/java/io/micronaut/http/client/netty/Pool49.java
|
{
"start": 33816,
"end": 42504
}
|
class ____ extends AtomicBoolean implements Pool.PendingRequest {
private static final AtomicInteger NEXT_DEBUG_ID = new AtomicInteger(1);
/**
* Hint for which thread is blocked waiting for this connection.
*/
final @Nullable BlockHint blockHint;
/**
* {@link ExecutionFlow} that completes with the connection.
*/
private final DelayedExecutionFlow<ConnectionManager.PoolHandle> sink = DelayedExecutionFlow.create();
/**
* Pool that we prefer based on the caller.
*/
private final LocalPoolPair preferredPool;
/**
* Pools other than {@link #preferredPool} may pick up this connection if and only if this
* field is {@code true}.
*/
private final boolean permitStealing;
/**
* The pool this request is currently assigned to. May change throughout the lifetime of
* the request.
*/
private volatile LocalPoolPair destPool;
private int debugId;
PendingRequest(@Nullable BlockHint blockHint) {
this.blockHint = blockHint;
preferredPool = pickPreferredPool();
permitStealing = preferredPool == null ||
connectionPoolConfiguration.getConnectionLocality() == HttpClientConfiguration.ConnectionPoolConfiguration.ConnectionLocality.PREFERRED;
}
private synchronized int debugId() {
if (debugId == 0) {
debugId = NEXT_DEBUG_ID.getAndIncrement();
}
return debugId;
}
/**
* Flow that completes when this request is assigned to a connection.
*
* @return The flow
*/
@Override
public @NonNull ExecutionFlow<ConnectionManager.PoolHandle> flow() {
return sink;
}
/**
* Kick off dispatching this request to a connection. Note that this must be called exactly
* once.
*/
@Override
public void dispatch() {
if (globalPending != null && globalPending.sum() >= connectionPoolConfiguration.getMaxPendingAcquires()) {
tryCompleteExceptionally(new HttpClientException("Cannot acquire connection, exceeded max pending acquires configuration"));
return;
}
if (log.isTraceEnabled()) {
log.trace("{}: Starting dispatch, preferred pool {}", this, preferredPool);
}
if (globalPending != null) {
globalPending.increment();
}
redispatch();
}
/**
* Attempt to redispatch this connection. Unlike {@link #dispatch()}, can be called
* multiple times, because it doesn't increase {@link #globalPending}.
*/
@Override
public void redispatch() {
if (preferredPool == null) {
destPool = localPools.get(ThreadLocalRandom.current().nextInt(localPools.size()));
if (log.isTraceEnabled()) {
log.trace("{}: Scheduling dispatch on {}", this, destPool);
}
} else {
destPool = preferredPool;
}
if (destPool.loop.inEventLoop()) {
dispatchLocal();
} else {
destPool.loop.execute(this::dispatchLocal);
}
}
/**
* Dispatch this request to the current {@link #destPool}.
*/
private void dispatchLocal() {
assert destPool.loop.inEventLoop();
boolean traceEnabled = log.isTraceEnabled();
if (traceEnabled) {
log.trace("{}: Attempting dispatch on {}", this, destPool);
}
// is there a connection already? use it.
PoolEntry available = destPool.findAvailablePoolEntry();
if (available != null) {
dispatchTo(available);
return;
}
if (permitStealing) {
// does another pool have an available connection? Move to that pool.
for (LocalPoolPair pool : RandomOffsetIterator.iterable(localPools)) {
if (pool != destPool && (pool.http1.firstAvailable != null || pool.http2.firstAvailable != null)) {
destPool = pool;
pool.loop.execute(this::dispatchLocal);
return;
}
}
}
// need to open a new connection.
if (preferredPool != null && destPool != preferredPool) {
if (traceEnabled) {
log.trace("{}: Moving back to preferred pool to open a new connection", this);
}
// move back to preferred pool first
destPool = preferredPool;
destPool.loop.execute(this::dispatchLocal);
return;
}
if (blockHint != null && blockHint.blocks((EventLoop) destPool.loop)) {
tryCompleteExceptionally(BlockHint.createException());
return;
}
boolean open = openConnectionStep1();
if (open) {
destPool.openConnectionStep2();
}
if (open || !permitStealing) {
if (traceEnabled) {
log.trace("{}: Adding to local pending requests", this);
}
destPool.addLocalPendingRequest(this);
} else {
// Limits are hit, can't open a new connection. Move this request to
// globalPendingRequests as fallback.
if (traceEnabled) {
log.trace("{}: Adding to global pending requests", this);
}
destPool = null;
globalPendingRequests.add(this);
for (LocalPoolPair pool : localPools) {
pool.notifyGlobalPendingRequestQueued();
}
}
if (open) {
if (traceEnabled) {
log.trace("{}: Opening a new connection", this);
}
destPool.openConnectionStep3();
}
}
/**
* Assign this request to the given connection.
*
* @param entry The connection
*/
private void dispatchTo(PoolEntry entry) {
if (log.isTraceEnabled()) {
log.trace("{}: Dispatching to connection {}", this, entry);
}
if (destPool == null) {
// from global pending request queue
destPool = entry.poolPair;
} else {
assert destPool.loop.inEventLoop();
assert destPool == entry.poolPair;
}
BlockHint blockHint = this.blockHint;
if (blockHint != null && blockHint.blocks(entry.poolPair.loop)) {
tryCompleteExceptionally(BlockHint.createException());
return;
}
entry.preDispatch(this);
dispatchSafe(entry.connection, this);
}
/**
* The event loop this request will <i>likely</i> run on. This is only best effort.
*
* @return The event loop, or {@code null} if unknown
*/
@Override
public @Nullable EventExecutor likelyEventLoop() {
LocalPoolPair pool = destPool;
return pool == null ? null : pool.loop;
}
// DelayedExecutionFlow does not allow concurrent completes, so this is a simple guard
boolean tryCompleteExceptionally(Throwable t) {
if (compareAndSet(false, true)) {
if (globalPending != null) {
globalPending.decrement();
}
sink.completeExceptionally(t);
return true;
} else {
return false;
}
}
@Override
public boolean tryComplete(ConnectionManager.PoolHandle value) {
if (compareAndSet(false, true)) {
if (globalPending != null) {
globalPending.decrement();
}
if (sink.isCancelled()) {
return false;
}
sink.complete(value);
return true;
} else {
return false;
}
}
@Override
public String toString() {
return "PendingRequest[" + debugId() + "]";
}
}
}
|
PendingRequest
|
java
|
quarkusio__quarkus
|
extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/webjar/WebJarUtil.java
|
{
"start": 6435,
"end": 9365
}
|
class ____ extends SimpleFileVisitor<Path> {
private final WebJarResourcesTargetVisitor visitor;
private final Path rootFolderToCopy;
private final ResolvedDependency resourcesArtifact;
private final ResolvedDependency userApplication;
private final WebJarResourcesFilter filter;
private final ClassLoader classLoader;
private final WebJarBuildItem webJar;
public ResourcesFileVisitor(WebJarResourcesTargetVisitor visitor, Path rootFolderToCopy,
ResolvedDependency resourcesArtifact, ResolvedDependency userApplication, WebJarResourcesFilter filter,
ClassLoader classLoader, WebJarBuildItem webJar) {
this.visitor = visitor;
this.rootFolderToCopy = rootFolderToCopy;
this.resourcesArtifact = resourcesArtifact;
this.userApplication = userApplication;
this.filter = filter;
this.classLoader = classLoader;
this.webJar = webJar;
}
@Override
public FileVisitResult preVisitDirectory(final Path dir,
final BasicFileAttributes attrs) throws IOException {
visitor.visitDirectory(rootFolderToCopy.relativize(dir).toString());
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFile(final Path file,
final BasicFileAttributes attrs) throws IOException {
String fileName = rootFolderToCopy.relativize(file).toString();
String moduleName = getModuleOverrideName(resourcesArtifact, fileName);
boolean overrideFileCreated = false;
if (OVERRIDABLE_RESOURCES.contains(fileName)) {
try (WebJarResourcesFilter.FilterResult filterResult = filter.apply(fileName,
getOverride(userApplication, classLoader,
fileName, moduleName, webJar.getUseDefaultQuarkusBranding()))) {
if (filterResult.hasStream()) {
overrideFileCreated = true;
// Override (either developer supplied or Quarkus)
visitor.visitFile(fileName, filterResult.getStream());
}
}
}
if (!overrideFileCreated) {
try (WebJarResourcesFilter.FilterResult filterResult = filter.apply(fileName, Files.newInputStream(file))) {
if (!visitor.supportsOnlyCopyingNonArtifactFiles() || !webJar.getOnlyCopyNonArtifactFiles()
|| filterResult.isChanged()) {
if (filterResult.hasStream()) {
visitor.visitFile(fileName, filterResult.getStream());
}
}
}
}
return FileVisitResult.CONTINUE;
}
}
}
|
ResourcesFileVisitor
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/event/collection/ChildEntity.java
|
{
"start": 186,
"end": 449
}
|
class ____ extends ChildValue implements Entity {
private Long id;
public ChildEntity() {
super();
}
public ChildEntity(String name) {
super( name );
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
}
|
ChildEntity
|
java
|
google__dagger
|
hilt-android/main/java/dagger/hilt/android/lifecycle/RetainedLifecycle.java
|
{
"start": 729,
"end": 833
}
|
class ____ registered listeners on a retained lifecycle (generally backed up by a ViewModel).
*/
public
|
for
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/launcher/AbstractServiceLauncherTestBase.java
|
{
"start": 8300,
"end": 9615
}
|
class ____ create
* @param conf configuration
* @param expectedText expected text; may be "" or null
* @param errorCode error code
* @param args varargs launch arguments
* @return the exception returned if there was a match
* @throws AssertionError on a mismatch of expectation and actual
*/
protected ExitUtil.ExitException launchExpectingException(Class serviceClass,
Configuration conf,
String expectedText,
int errorCode,
String... args) {
try {
ServiceLauncher<Service> launch = launchService(serviceClass,
conf,
Arrays.asList(args),
true);
failf("Expected an exception with error code %d and text \"%s\" "
+ " -but the service completed with :%s",
errorCode, expectedText,
launch.getServiceException());
return null;
} catch (ExitUtil.ExitException e) {
int actualCode = e.getExitCode();
boolean condition = errorCode != actualCode ||
!StringUtils.contains(e.toString(), expectedText);
failif(condition,
"Expected an exception with error code %d and text \"%s\" "
+ " -but the service threw an exception with exit code %d: %s",
errorCode, expectedText,
actualCode, e);
return e;
}
}
}
|
to
|
java
|
micronaut-projects__micronaut-core
|
inject-groovy/src/main/groovy/io/micronaut/ast/groovy/visitor/GroovyPackageElement.java
|
{
"start": 1028,
"end": 2546
}
|
class ____ extends AbstractGroovyElement implements PackageElement {
private final PackageNode packageNode;
/**
* Default constructor.
*
* @param visitorContext The visitor context
* @param packageNode The package node
* @param annotationMetadataFactory The annotation metadata
*/
public GroovyPackageElement(GroovyVisitorContext visitorContext,
PackageNode packageNode,
ElementAnnotationMetadataFactory annotationMetadataFactory) {
super(visitorContext, new GroovyNativeElement.Package(packageNode), annotationMetadataFactory);
this.packageNode = packageNode;
}
@Override
protected @NonNull AbstractGroovyElement copyConstructor() {
return new GroovyPackageElement(visitorContext, packageNode, elementAnnotationMetadataFactory);
}
@NonNull
@Override
public String getName() {
final String n = packageNode.getName();
if (n.endsWith(".")) {
return n.substring(0, n.length() - 1);
}
return n;
}
@Override
public @NonNull String getSimpleName() {
String name = getName();
int index = name.lastIndexOf(".");
if (index > -1) {
return name.substring(index + 1);
}
return name;
}
@Override
public boolean isProtected() {
return false;
}
@Override
public boolean isPublic() {
return true;
}
}
|
GroovyPackageElement
|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/repositories/blobstore/RepositoryFileType.java
|
{
"start": 732,
"end": 2423
}
|
enum ____ {
ROOT_INDEX_N("index-NUM"),
ROOT_INDEX_LATEST("index.latest"),
SNAPSHOT_INFO("snap-UUID.dat"),
GLOBAL_METADATA("meta-UUID.dat"),
INDEX_METADATA("indices/UUID/meta-SHORTUUID.dat"),
SHARD_GENERATION("indices/UUID/NUM/index-UUID"),
SHARD_SNAPSHOT_INFO("indices/UUID/NUM/snap-UUID.dat"),
SHARD_DATA("indices/UUID/NUM/__UUID"),
// NB no support for legacy names (yet)
;
private final Pattern pattern;
RepositoryFileType(String regex) {
pattern = Pattern.compile(
"^("
+ regex
// decimal numbers
.replace("NUM", "(0|[1-9][0-9]*)")
// 15-byte UUIDS from TimeBasedUUIDGenerator
.replace("SHORTUUID", "[0-9a-zA-Z_-]{" + UUIDs.TIME_BASED_UUID_STRING_LENGTH + "}")
// 16-byte UUIDs from RandomBasedUUIDGenerator
.replace("UUID", "[0-9a-zA-Z_-]{" + UUIDs.RANDOM_BASED_UUID_STRING_LENGTH + "}")
+ ")$"
);
}
public static RepositoryFileType getRepositoryFileType(Path repositoryRoot, Path blobPath) {
final var relativePath = repositoryRoot.relativize(blobPath).toString().replace(repositoryRoot.getFileSystem().getSeparator(), "/");
for (final var repositoryFileType : RepositoryFileType.values()) {
if (repositoryFileType.pattern.matcher(relativePath).matches()) {
return repositoryFileType;
}
}
throw new IllegalArgumentException(
Strings.format("[%s] is not the path of a known blob type within [%s]", relativePath, repositoryRoot)
);
}
}
|
RepositoryFileType
|
java
|
netty__netty
|
common/src/main/java/io/netty/util/internal/MacAddressUtil.java
|
{
"start": 1546,
"end": 2165
}
|
interface ____ on public
* networks is better than a local network interface.
*
* @return byte array containing a MAC. null if no MAC can be found.
*/
public static byte[] bestAvailableMac() {
// Find the best MAC address available.
byte[] bestMacAddr = EMPTY_BYTES;
InetAddress bestInetAddr = NetUtil.LOCALHOST4;
// Retrieve the list of available network interfaces.
Map<NetworkInterface, InetAddress> ifaces = new LinkedHashMap<NetworkInterface, InetAddress>();
for (NetworkInterface iface: NetUtil.NETWORK_INTERFACES) {
// Use the
|
used
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/io/support/PropertySourceProcessorTests.java
|
{
"start": 3470,
"end": 4829
}
|
class ____ {
@Test
void processorIgnoresIllegalArgumentException() {
assertProcessorIgnoresFailure(PlaceholderResolutionExceptionPropertySourceFactory.class);
}
@Test
void processorIgnoresFileNotFoundException() {
assertProcessorIgnoresFailure(FileNotFoundExceptionPropertySourceFactory.class);
}
@Test
void processorIgnoresUnknownHostException() {
assertProcessorIgnoresFailure(UnknownHostExceptionPropertySourceFactory.class);
}
@Test
void processorIgnoresSocketException() {
assertProcessorIgnoresFailure(SocketExceptionPropertySourceFactory.class);
}
@Test
void processorIgnoresSupportedExceptionWrappedInIllegalStateException() {
assertProcessorIgnoresFailure(WrappedIOExceptionPropertySourceFactory.class);
}
@Test
void processorIgnoresSupportedExceptionWrappedInUncheckedIOException() {
assertProcessorIgnoresFailure(UncheckedIOExceptionPropertySourceFactory.class);
}
private void assertProcessorIgnoresFailure(Class<? extends PropertySourceFactory> factoryClass) {
PropertySourceDescriptor descriptor = new PropertySourceDescriptor(List.of(PROPS_FILE), true, null, factoryClass, null);
assertThatNoException().isThrownBy(() -> processor.processPropertySource(descriptor));
assertThat(environment.getPropertySources()).hasSize(2);
}
}
private static
|
IgnoreResourceNotFoundTests
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test-autoconfigure/src/main/java/org/springframework/boot/test/autoconfigure/json/JsonTypeExcludeFilter.java
|
{
"start": 953,
"end": 1121
}
|
class ____ extends StandardAnnotationCustomizableTypeExcludeFilter<JsonTest> {
JsonTypeExcludeFilter(Class<?> testClass) {
super(testClass);
}
}
|
JsonTypeExcludeFilter
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/ExecutionGraphInfo.java
|
{
"start": 1231,
"end": 1319
}
|
class ____ provides different {@link
* ExecutionGraph}-related information.
*/
public
|
that
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/main/java/io/quarkus/resteasy/reactive/server/deployment/ResteasyReactiveResourceMethodEntriesBuildItem.java
|
{
"start": 1034,
"end": 2109
}
|
class ____ {
private final EndpointIndexer.BasicResourceClassInfo basicResourceClassInfo;
private final MethodInfo methodInfo;
private final ClassInfo actualClassInfo;
private final ResourceMethod resourceMethod;
public Entry(EndpointIndexer.BasicResourceClassInfo basicResourceClassInfo, MethodInfo methodInfo,
ClassInfo actualClassInfo, ResourceMethod resourceMethod) {
this.basicResourceClassInfo = basicResourceClassInfo;
this.methodInfo = methodInfo;
this.actualClassInfo = actualClassInfo;
this.resourceMethod = resourceMethod;
}
public EndpointIndexer.BasicResourceClassInfo getBasicResourceClassInfo() {
return basicResourceClassInfo;
}
public MethodInfo getMethodInfo() {
return methodInfo;
}
public ClassInfo getActualClassInfo() {
return actualClassInfo;
}
public ResourceMethod getResourceMethod() {
return resourceMethod;
}
}
}
|
Entry
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/onetoone/joined/Person.java
|
{
"start": 181,
"end": 270
}
|
class ____ extends Entity {
public Address address;
public Address mailingAddress;
}
|
Person
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TestInstancePreDestroyCallbackTests.java
|
{
"start": 3087,
"end": 3294
}
|
class ____ {
boolean destroyed;
void setDestroyed() {
this.destroyed = true;
}
}
@SuppressWarnings("JUnitMalformedDeclaration")
@ExtendWith(FooInstancePreDestroyCallback.class)
static
|
Destroyable
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/authorization/OidcClientRegistrationTests.java
|
{
"start": 32263,
"end": 33765
}
|
class ____ extends AuthorizationServerConfiguration {
// @formatter:off
@Bean
@Override
public SecurityFilterChain authorizationServerSecurityFilterChain(HttpSecurity http) throws Exception {
http
.oauth2AuthorizationServer((authorizationServer) ->
authorizationServer
.oidc((oidc) ->
oidc
.clientRegistrationEndpoint((clientRegistration) ->
clientRegistration
.authenticationProviders(configureClientRegistrationConverters())
)
)
)
.authorizeHttpRequests((authorize) ->
authorize.anyRequest().authenticated()
);
return http.build();
}
// @formatter:on
private Consumer<List<AuthenticationProvider>> configureClientRegistrationConverters() {
// @formatter:off
return (authenticationProviders) ->
authenticationProviders.forEach((authenticationProvider) -> {
List<String> supportedCustomClientMetadata = List.of("custom-metadata-name-1", "custom-metadata-name-2");
if (authenticationProvider instanceof OidcClientRegistrationAuthenticationProvider provider) {
provider.setRegisteredClientConverter(new CustomRegisteredClientConverter(supportedCustomClientMetadata));
provider.setClientRegistrationConverter(new CustomClientRegistrationConverter(supportedCustomClientMetadata));
}
});
// @formatter:on
}
}
@EnableWebSecurity
@Configuration(proxyBeanMethods = false)
static
|
CustomClientMetadataConfiguration
|
java
|
apache__camel
|
core/camel-core-reifier/src/main/java/org/apache/camel/reifier/language/WasmExpressionReifier.java
|
{
"start": 1009,
"end": 1693
}
|
class ____ extends TypedExpressionReifier<WasmExpression> {
public WasmExpressionReifier(CamelContext camelContext, ExpressionDefinition definition) {
super(camelContext, definition);
}
@Override
public boolean isResolveOptionalExternalScriptEnabled() {
// simple language will handle to resolve external scripts as they can be dynamic using simple language itself
return false;
}
@Override
protected Object[] createProperties() {
Object[] properties = new Object[2];
properties[0] = asResultType();
properties[1] = parseString(definition.getModule());
return properties;
}
}
|
WasmExpressionReifier
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/GetClassOnEnum.java
|
{
"start": 1490,
"end": 2158
}
|
class ____ extends BugChecker implements BugChecker.MethodInvocationTreeMatcher {
private static final Matcher<ExpressionTree> ENUM_CLASS =
instanceMethod().onDescendantOf(Enum.class.getName()).named("getClass").withNoParameters();
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
if (ENUM_CLASS.matches(tree, state)) {
return describeMatch(
tree,
SuggestedFix.replace(
state.getEndPosition(ASTHelpers.getReceiver(tree)),
state.getEndPosition(tree),
".getDeclaringClass()"));
}
return Description.NO_MATCH;
}
}
|
GetClassOnEnum
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/bug/Bug_for_Issue_535.java
|
{
"start": 211,
"end": 697
}
|
class ____ extends TestCase {
public void test_for_issue() throws Exception {
TestPOJO testPOJO = new TestPOJO();
testPOJO.setA("a");
testPOJO.setB(new BigDecimal("1234512312312312312312"));
String s = JSON.toJSONString(testPOJO);
System.out.println(s);
TestPOJO vo2 = JSON.parseObject(s, TestPOJO.class, Feature.UseBigDecimal);
Assert.assertEquals(testPOJO.getB(), vo2.getB());
}
public static
|
Bug_for_Issue_535
|
java
|
apache__camel
|
components/camel-servlet/src/test/java/org/apache/camel/component/servlet/ServletCamelRouterTestSupport.java
|
{
"start": 7448,
"end": 9812
}
|
class ____ {
HttpURLConnection con;
String text;
public WebResponse(HttpURLConnection con) {
this.con = con;
}
public int getResponseCode() throws IOException {
return con.getResponseCode();
}
public String getText(Charset charset) throws IOException {
if (text == null) {
if (con.getContentLength() != 0) {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
IOHelper.copy(con.getInputStream(), baos);
text = baos.toString(charset.name());
} catch (IOException e) {
text = e.getMessage();
}
} else {
text = "";
}
}
return text;
}
public String getText() throws IOException {
return getText(Charset.defaultCharset());
}
public String getContentType() {
String content = con.getContentType();
return content != null && content.contains(";")
? content.substring(0, content.indexOf(";"))
: content;
}
public InputStream getInputStream() throws IOException {
try {
return con.getInputStream();
} catch (IOException e) {
try {
Field f = con.getClass().getDeclaredField("inputStream");
f.setAccessible(true);
return (InputStream) f.get(con);
} catch (Throwable t) {
e.addSuppressed(t);
throw e;
}
}
}
public String getResponseMessage() throws IOException {
return con.getResponseMessage();
}
public String getCharacterSet() {
String content = con.getContentType();
return content != null && content.contains(";charset=")
? content.substring(content.lastIndexOf(";charset=") + ";charset=".length())
: con.getContentEncoding();
}
public String getHeaderField(String key) {
return con.getHeaderField(key);
}
}
protected static
|
WebResponse
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServer.java
|
{
"start": 2237,
"end": 9290
}
|
class ____ {
private File secretFile;
private HttpFSServerWebServer webServer;
@BeforeEach
public void init() throws Exception {
File homeDir = GenericTestUtils.setupTestRootDir(TestHttpFSServerWebServer.class);
File confDir = new File(homeDir, "etc/hadoop");
File logsDir = new File(homeDir, "logs");
File tempDir = new File(homeDir, "temp");
confDir.mkdirs();
logsDir.mkdirs();
tempDir.mkdirs();
if (Shell.WINDOWS) {
File binDir = new File(homeDir, "bin");
binDir.mkdirs();
File winutils = Shell.getWinUtilsFile();
if (winutils.exists()) {
FileUtils.copyFileToDirectory(winutils, binDir);
}
}
System.setProperty("hadoop.home.dir", homeDir.getAbsolutePath());
System.setProperty("hadoop.log.dir", logsDir.getAbsolutePath());
System.setProperty("httpfs.home.dir", homeDir.getAbsolutePath());
System.setProperty("httpfs.log.dir", logsDir.getAbsolutePath());
System.setProperty("httpfs.config.dir", confDir.getAbsolutePath());
secretFile = new File(System.getProperty("httpfs.config.dir"),
"httpfs-signature-custom.secret");
}
@AfterEach
public void teardown() throws Exception {
if (webServer != null) {
webServer.stop();
}
}
@Test
public void testStartStop() throws Exception {
webServer = createWebServer(createConfigurationWithRandomSecret());
webServer.start();
webServer.stop();
}
@Test
public void testJustStop() throws Exception {
webServer = createWebServer(createConfigurationWithRandomSecret());
webServer.stop();
}
@Test
public void testDoubleStop() throws Exception {
webServer = createWebServer(createConfigurationWithRandomSecret());
webServer.start();
webServer.stop();
webServer.stop();
}
@Test
public void testDoubleStart() throws Exception {
webServer = createWebServer(createConfigurationWithRandomSecret());
webServer.start();
webServer.start();
webServer.stop();
}
@Test
public void testServiceWithSecretFile() throws Exception {
createSecretFile("foo");
webServer = createWebServer(createConfigurationWithSecretFile());
webServer.start();
assertServiceRespondsWithOK(webServer.getUrl());
assertSignerSecretProviderType(webServer.getHttpServer(),
FileSignerSecretProvider.class);
webServer.stop();
}
@Test
public void testServiceWithSecretFileWithDeprecatedConfigOnly()
throws Exception {
createSecretFile("foo");
Configuration conf = createConfiguration();
setDeprecatedSecretFile(conf, secretFile.getAbsolutePath());
webServer = createWebServer(conf);
webServer.start();
assertServiceRespondsWithOK(webServer.getUrl());
assertSignerSecretProviderType(webServer.getHttpServer(),
FileSignerSecretProvider.class);
webServer.stop();
}
@Test
public void testServiceWithSecretFileWithBothConfigOptions() throws Exception {
createSecretFile("foo");
Configuration conf = createConfigurationWithSecretFile();
setDeprecatedSecretFile(conf, secretFile.getAbsolutePath());
webServer = createWebServer(conf);
webServer.start();
assertServiceRespondsWithOK(webServer.getUrl());
assertSignerSecretProviderType(webServer.getHttpServer(),
FileSignerSecretProvider.class);
webServer.stop();
}
@Test
public void testServiceWithMissingSecretFile() throws Exception {
webServer = createWebServer(createConfigurationWithSecretFile());
webServer.start();
assertServiceRespondsWithOK(webServer.getUrl());
assertSignerSecretProviderType(webServer.getHttpServer(),
RandomSignerSecretProvider.class);
webServer.stop();
}
@Test
public void testServiceWithEmptySecretFile() throws Exception {
// The AuthenticationFilter.constructSecretProvider will do the fallback
// to the random secrets not the HttpFSAuthenticationFilter.
createSecretFile("");
webServer = createWebServer(createConfigurationWithSecretFile());
webServer.start();
assertServiceRespondsWithOK(webServer.getUrl());
assertSignerSecretProviderType(webServer.getHttpServer(),
RandomSignerSecretProvider.class);
webServer.stop();
}
private <T extends SignerSecretProvider> void assertSignerSecretProviderType(
HttpServer2 server, Class<T> expected) {
SignerSecretProvider secretProvider = (SignerSecretProvider)
server.getWebAppContext().getServletContext()
.getAttribute(SIGNER_SECRET_PROVIDER_ATTRIBUTE);
assertNotNull(secretProvider, "The secret provider must not be null");
assertEquals(expected, secretProvider.getClass(),
"The secret provider must match the following");
}
private void assertServiceRespondsWithOK(URL serviceURL)
throws Exception {
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
URL url = new URL(serviceURL, MessageFormat.format(
"/webhdfs/v1/?user.name={0}&op=liststatus", user));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(conn.getInputStream()))) {
reader.readLine();
}
}
private void setDeprecatedSecretFile(Configuration conf, String path) {
conf.set(HttpFSAuthenticationFilter.CONF_PREFIX +
AuthenticationFilter.SIGNATURE_SECRET_FILE,
path);
}
private Configuration createConfigurationWithRandomSecret() {
Configuration conf = createConfiguration();
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
AuthenticationFilter.SIGNER_SECRET_PROVIDER, "random");
return conf;
}
private Configuration createConfigurationWithSecretFile() {
Configuration conf = createConfiguration();
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
AuthenticationFilter.SIGNATURE_SECRET_FILE,
secretFile.getAbsolutePath());
return conf;
}
private Configuration createConfiguration() {
Configuration conf = new Configuration(false);
conf.set(HttpFSServerWebServer.HTTP_HOSTNAME_KEY, "localhost");
conf.setInt(HttpFSServerWebServer.HTTP_PORT_KEY, 0);
return conf;
}
private HttpFSServerWebServer createWebServer(Configuration conf)
throws Exception {
Configuration sslConf = new Configuration(false);
// The configuration must be stored for the HttpFSAuthenticatorFilter, because
// it accesses the configuration from the webapp: HttpFSServerWebApp.get().getConfig()
try (FileOutputStream os = new FileOutputStream(
new File(System.getProperty("httpfs.config.dir"), "httpfs-site.xml"))) {
conf.writeXml(os);
}
return new HttpFSServerWebServer(conf, sslConf);
}
private void createSecretFile(String content) throws IOException {
assertTrue(secretFile.createNewFile());
FileUtils.writeStringToFile(secretFile, content, StandardCharsets.UTF_8);
}
}
|
TestHttpFSServerWebServer
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/activities/ActivitiesManager.java
|
{
"start": 24623,
"end": 26472
}
|
class ____ {
private boolean enabled = false;
private DiagnosticsCollector gdc;
public boolean isEnabled() {
return enabled;
}
public void enable() {
this.enabled = true;
}
public void disable() {
this.enabled = false;
}
public DiagnosticsCollectorManager(DiagnosticsCollector gdc) {
this.gdc = gdc;
}
public Optional<DiagnosticsCollector> getOptionalDiagnosticsCollector() {
if (enabled) {
return Optional.of(gdc);
} else {
return Optional.empty();
}
}
}
public Optional<DiagnosticsCollector> getOptionalDiagnosticsCollector() {
return diagnosticCollectorManager.get().getOptionalDiagnosticsCollector();
}
public String getResourceDiagnostics(ResourceCalculator rc, Resource required,
Resource available) {
Optional<DiagnosticsCollector> dcOpt = getOptionalDiagnosticsCollector();
if (dcOpt.isPresent()) {
dcOpt.get().collectResourceDiagnostics(rc, required, available);
return getDiagnostics(dcOpt.get());
}
return EMPTY_DIAGNOSTICS;
}
public static String getDiagnostics(Optional<DiagnosticsCollector> dcOpt) {
if (dcOpt != null && dcOpt.isPresent()) {
DiagnosticsCollector dc = dcOpt.get();
if (dc != null && dc.getDiagnostics() != null) {
return getDiagnostics(dc);
}
}
return EMPTY_DIAGNOSTICS;
}
private static String getDiagnostics(DiagnosticsCollector dc) {
StringBuilder sb = new StringBuilder();
sb.append(", ").append(dc.getDiagnostics());
if (dc.getDetails() != null) {
sb.append(DIAGNOSTICS_DETAILS_SEPARATOR).append(dc.getDetails());
}
return sb.toString();
}
@VisibleForTesting
public int getAppActivitiesMaxQueueLength() {
return appActivitiesMaxQueueLength;
}
}
|
DiagnosticsCollectorManager
|
java
|
grpc__grpc-java
|
interop-testing/src/main/java/io/grpc/testing/integration/XdsTestServer.java
|
{
"start": 12353,
"end": 13310
}
|
class ____ extends TestServiceGrpc.TestServiceImplBase {
private final String serverId;
private final String host;
private TestServiceImpl(String serverId, String host) {
this.serverId = serverId;
this.host = host;
}
@Override
public void emptyCall(
EmptyProtos.Empty req, StreamObserver<EmptyProtos.Empty> responseObserver) {
responseObserver.onNext(EmptyProtos.Empty.getDefaultInstance());
responseObserver.onCompleted();
}
@Override
public void unaryCall(SimpleRequest req, StreamObserver<SimpleResponse> responseObserver) {
responseObserver.onNext(SimpleResponse.newBuilder()
.setServerId(serverId)
.setHostname(host)
.setPayload(Payload.newBuilder()
.setBody(ByteString.copyFrom(new byte[req.getResponseSize()]))
.build())
.build());
responseObserver.onCompleted();
}
}
private static
|
TestServiceImpl
|
java
|
square__retrofit
|
retrofit/java-test/src/test/java/retrofit2/RequestFactoryTest.java
|
{
"start": 64609,
"end": 65509
}
|
class ____ {
@Multipart //
@POST("/foo/bar/") //
Call<ResponseBody> method(@Part MultipartBody.Part part) {
return null;
}
}
MultipartBody.Part part = MultipartBody.Part.createFormData("kit", "kat");
Request request = buildRequest(Example.class, part);
assertThat(request.method()).isEqualTo("POST");
assertThat(request.headers().size()).isEqualTo(0);
assertThat(request.url().toString()).isEqualTo("http://example.com/foo/bar/");
RequestBody body = request.body();
Buffer buffer = new Buffer();
body.writeTo(buffer);
String bodyString = buffer.readUtf8();
assertThat(bodyString).contains("Content-Disposition: form-data;");
assertThat(bodyString).contains("name=\"kit\"\r\n");
assertThat(bodyString).contains("\r\nkat\r\n--");
}
@Test
public void multipartOkHttpIterablePart() throws IOException {
|
Example
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bootstrap/binding/annotations/associationOverride/EmbeddedOverrideTests.java
|
{
"start": 3535,
"end": 4302
}
|
class ____ {
@Id
private Integer id;
private String name;
@Embedded
@AttributeOverride( name = "streetAddress", column = @Column( name = "home_street_addr" ) )
@AttributeOverride( name = "city", column = @Column( name = "home_city" ) )
@AssociationOverride( name = "state", joinColumns = @JoinColumn( name = "home_state_id" ) )
private EmbeddedOverrideAddress homeAddress;
@Embedded
@AttributeOverride( name = "streetAddress", column = @Column( name = "work_street_addr" ) )
@AttributeOverride( name = "city", column = @Column( name = "work_city" ) )
@AssociationOverride( name = "state", joinColumns = @JoinColumn( name = "work_state_id" ) )
private EmbeddedOverrideAddress workAddress;
}
@Embeddable
public static
|
EmbeddedOverrideContact
|
java
|
quarkusio__quarkus
|
integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftWithLocalDockerAndDeploymentResourceTest.java
|
{
"start": 957,
"end": 3498
}
|
class ____ {
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class).addClasses(GreetingResource.class))
.setApplicationName("openshift-with-local-docker-and-deployment-resource")
.setApplicationVersion("0.1-SNAPSHOT")
.overrideConfigKey("quarkus.container-image.builder", "docker")
.overrideConfigKey("quarkus.container-image.group", "testme")
.setLogFileName("k8s.log")
.setForcedDependencies(List.of(
Dependency.of("io.quarkus", "quarkus-openshift", Version.getVersion()),
Dependency.of("io.quarkus", "quarkus-container-image-docker", Version.getVersion())));
@ProdBuildResults
private ProdModeTestResults prodModeTestResults;
@Test
public void assertGeneratedResources() throws IOException {
final Path kubernetesDir = prodModeTestResults.getBuildDir().resolve("kubernetes");
assertThat(kubernetesDir)
.isDirectoryContaining(p -> p.getFileName().endsWith("openshift.json"))
.isDirectoryContaining(p -> p.getFileName().endsWith("openshift.yml"));
List<HasMetadata> kubernetesList = DeserializationUtil.deserializeAsList(kubernetesDir.resolve("openshift.yml"));
assertThat(kubernetesList.get(0)).isInstanceOfSatisfying(Deployment.class, d -> {
assertThat(d.getMetadata()).satisfies(m -> {
assertThat(m.getName()).isEqualTo("openshift-with-local-docker-and-deployment-resource");
});
assertThat(kubernetesList).filteredOn(h -> "BuildConfig".equals(h.getKind())).hasSize(0);
assertThat(kubernetesList).filteredOn(h -> "ImageStream".equals(h.getKind())).hasSize(0);
assertThat(d.getSpec()).satisfies(deploymentSpec -> {
assertThat(deploymentSpec.getTemplate()).satisfies(t -> {
assertThat(t.getSpec()).satisfies(podSpec -> {
assertThat(podSpec.getContainers()).singleElement().satisfies(container -> {
assertThat(container.getImage())
.isEqualTo(
"docker.io/testme/openshift-with-local-docker-and-deployment-resource:0.1-SNAPSHOT");
});
});
});
});
});
}
}
|
OpenshiftWithLocalDockerAndDeploymentResourceTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/exception/User.java
|
{
"start": 256,
"end": 951
}
|
class ____ {
private Long id;
private String username;
private Set memberships = new HashSet();
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public Set getMemberships() {
return memberships;
}
public void setMemberships(Set memberships) {
this.memberships = memberships;
}
public void addMembership(Group membership) {
if (membership == null) {
throw new IllegalArgumentException("Membership to add cannot be null");
}
this.memberships.add(membership);
membership.getMembers().add(this);
}
}
|
User
|
java
|
apache__thrift
|
lib/java/src/test/java/org/apache/thrift/async/TestTAsyncClientManager.java
|
{
"start": 11763,
"end": 13635
}
|
class ____ implements Runnable {
private final int numCalls_;
private int numSuccesses_ = 0;
private final Srv.AsyncClient client_;
public JankyRunnable(int numCalls) throws Exception {
numCalls_ = numCalls;
client_ = getClient();
client_.setTimeout(20000);
}
public int getNumSuccesses() {
return numSuccesses_;
}
public void run() {
for (int i = 0; i < numCalls_ && !client_.hasError(); i++) {
final int iteration = i;
try {
// connect an async client
final CountDownLatch latch = new CountDownLatch(1);
final AtomicBoolean returned = new AtomicBoolean(false);
client_.Janky(
1,
new AsyncMethodCallback<Integer>() {
@Override
public void onComplete(Integer result) {
assertEquals(3, result.intValue());
returned.set(true);
latch.countDown();
}
@Override
public void onError(Exception exception) {
try {
StringWriter sink = new StringWriter();
exception.printStackTrace(new PrintWriter(sink, true));
Assertions.fail(
"unexpected onError on iteration " + iteration + ": " + sink.toString());
} finally {
latch.countDown();
}
}
});
boolean calledBack = latch.await(30, TimeUnit.SECONDS);
assertTrue(calledBack, "wasn't called back in time on iteration " + iteration);
assertTrue(returned.get(), "onComplete not called on iteration " + iteration);
this.numSuccesses_++;
} catch (Exception e) {
fail(e);
}
}
}
}
}
|
JankyRunnable
|
java
|
quarkusio__quarkus
|
extensions/security/deployment/src/test/java/io/quarkus/security/test/permissionsallowed/RolesAllowedWithPermissionsAllowedTest.java
|
{
"start": 2494,
"end": 2650
}
|
class ____ {
@PermissionsAllowed(value = "permission")
public String securedBean() {
return SUCCESS;
}
}
}
|
SecuredBean
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schema/ColumnDefaultTest.java
|
{
"start": 1485,
"end": 2210
}
|
class ____ {
@Id
private Long id;
@ColumnDefault("'N/A'")
private String name;
@ColumnDefault("-1")
private Long clientId;
//Getter and setters omitted for brevity
//end::schema-generation-column-default-value-mapping-example[]
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Long getClientId() {
return clientId;
}
public void setClientId(Long clientId) {
this.clientId = clientId;
}
//tag::schema-generation-column-default-value-mapping-example[]
}
//end::schema-generation-column-default-value-mapping-example[]
}
|
Person
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-datajoin/src/main/java/org/apache/hadoop/contrib/utils/join/ArrayListBackedIterator.java
|
{
"start": 1075,
"end": 1778
}
|
class ____ implements ResetableIterator {
private Iterator iter;
private ArrayList<Object> data;
public ArrayListBackedIterator() {
this(new ArrayList<Object>());
}
public ArrayListBackedIterator(ArrayList<Object> data) {
this.data = data;
this.iter = this.data.iterator();
}
public void add(Object item) {
this.data.add(item);
}
public boolean hasNext() {
return this.iter.hasNext();
}
public Object next() {
return this.iter.next();
}
public void remove() {
}
public void reset() {
this.iter = this.data.iterator();
}
public void close() throws IOException {
this.iter = null;
this.data = null;
}
}
|
ArrayListBackedIterator
|
java
|
apache__camel
|
core/camel-management-api/src/main/java/org/apache/camel/api/management/mbean/ManagedAsyncProcessorAwaitManagerMBean.java
|
{
"start": 1021,
"end": 2982
}
|
interface ____ extends ManagedServiceMBean {
@ManagedAttribute(description = "Whether to interrupt any blocking threads during stopping.")
boolean isInterruptThreadsWhileStopping();
@ManagedAttribute(description = "Whether to interrupt any blocking threads during stopping.")
void setInterruptThreadsWhileStopping(boolean interruptThreadsWhileStopping);
@ManagedAttribute(description = "Number of threads that are blocked waiting for other threads to trigger the callback when they are done processing the exchange")
int getSize();
@ManagedOperation(description = "Lists all the exchanges which are currently inflight, having a blocked thread awaiting for other threads to trigger the callback when they are done")
TabularData browse();
@ManagedOperation(description = "To interrupt an exchange which may seem as stuck, to force the exchange to continue, allowing any blocking thread to be released.")
void interrupt(String exchangeId);
@ManagedAttribute(description = "Number of threads that has been blocked")
long getThreadsBlocked();
@ManagedAttribute(description = "Number of threads that has been interrupted")
long getThreadsInterrupted();
@ManagedAttribute(description = "Total wait time in msec.")
long getTotalDuration();
@ManagedAttribute(description = "The minimum wait time in msec.")
long getMinDuration();
@ManagedAttribute(description = "The maximum wait time in msec.")
long getMaxDuration();
@ManagedAttribute(description = "The average wait time in msec.")
long getMeanDuration();
@ManagedOperation(description = "Resets the statistics")
void resetStatistics();
@ManagedAttribute(description = "Utilization statistics enabled")
boolean isStatisticsEnabled();
@ManagedAttribute(description = "Utilization statistics enabled")
void setStatisticsEnabled(boolean statisticsEnabled);
}
|
ManagedAsyncProcessorAwaitManagerMBean
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/testkit/Player.java
|
{
"start": 695,
"end": 2052
}
|
class ____ {
private Name name;
public Name nickname;
private int pointsPerGame;
private int assistsPerGame;
private int reboundsPerGame;
private String team;
// used to test private field access
@SuppressWarnings("unused")
private int highestScore;
public Player() {}
public Player(Name name, String team) {
setName(name);
setTeam(team);
}
public Name getName() {
return name;
}
public void setName(Name name) {
this.name = name;
}
public int getPointsPerGame() {
return pointsPerGame;
}
public void setPointsPerGame(int pointsPerGame) {
this.pointsPerGame = pointsPerGame;
}
public int getAssistsPerGame() {
return assistsPerGame;
}
public void setAssistsPerGame(int assistsPerGame) {
this.assistsPerGame = assistsPerGame;
}
public int getReboundsPerGame() {
return reboundsPerGame;
}
public void setReboundsPerGame(int reboundsPerGame) {
this.reboundsPerGame = reboundsPerGame;
}
public String getTeam() {
return team;
}
public void setTeam(String team) {
this.team = team;
}
public void setHighestScore(int highestScore) {
this.highestScore = highestScore;
}
@Override
public String toString() {
return "%s[%s %s, team=%s]".formatted(getClass().getSimpleName(), name.getFirst(), name.getLast(), team);
}
}
|
Player
|
java
|
apache__dubbo
|
dubbo-metrics/dubbo-tracing/src/main/java/org/apache/dubbo/tracing/metrics/ObservationMeter.java
|
{
"start": 1067,
"end": 1500
}
|
class ____ {
public static void addMeterRegistry(ObservationRegistry registry, ApplicationModel applicationModel) {
MeterRegistry meterRegistry = MetricsGlobalRegistry.getCompositeRegistry(applicationModel);
registry.observationConfig()
.observationHandler(
new io.micrometer.core.instrument.observation.DefaultMeterObservationHandler(meterRegistry));
}
}
|
ObservationMeter
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/kstream/internals/KStreamPrint.java
|
{
"start": 1434,
"end": 1844
}
|
class ____ implements Processor<K, V, Void, Void> {
@Override
public void process(final Record<K, V> record) {
action.apply(record.key(), record.value());
}
@Override
public void close() {
if (action instanceof PrintForeachAction) {
((PrintForeachAction<K, V>) action).close();
}
}
}
}
|
KStreamPrintProcessor
|
java
|
quarkusio__quarkus
|
integration-tests/main/src/test/java/io/quarkus/it/main/CustomConfigSourceTestCase.java
|
{
"start": 204,
"end": 387
}
|
class ____ {
@Test
public void testCustomConfig() {
RestAssured.when().get("/core/config-test").then()
.body(is("OK"));
}
}
|
CustomConfigSourceTestCase
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/jdbc/SqlGroup.java
|
{
"start": 1238,
"end": 1488
}
|
class ____ method, implicitly generating this container annotation.
*
* <p>This annotation may be used as a <em>meta-annotation</em> to create custom
* <em>composed annotations</em>.
*
* <p>This annotation will be inherited from an enclosing test
|
or
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/support/BootstrapTestUtilsMergedConfigTests.java
|
{
"start": 21115,
"end": 21308
}
|
class ____ extends DuplicateConfigAppleConfigTestCase {
}
@ContextConfiguration(classes = AppleConfig.class)
@ActiveProfiles({"oranges", "apples"})
static
|
SubDuplicateConfigAppleConfigTestCase
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/search/DistanceMetric.java
|
{
"start": 118,
"end": 169
}
|
enum ____ {
L2,
IP,
COSINE
}
|
DistanceMetric
|
java
|
quarkusio__quarkus
|
independent-projects/qute/core/src/main/java/io/quarkus/qute/Expression.java
|
{
"start": 3326,
"end": 3422
}
|
interface ____ extends Part {
List<Expression> getParameters();
}
}
|
VirtualMethodPart
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestIngressPortBasedResolver.java
|
{
"start": 1032,
"end": 1079
}
|
class ____ IngressPortBasedResolver.
*/
public
|
for
|
java
|
elastic__elasticsearch
|
x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeFieldBlockLoaderTests.java
|
{
"start": 1382,
"end": 4339
}
|
class ____ extends BlockLoaderTestCase {
public GeoShapeFieldBlockLoaderTests(Params params) {
super("geo_shape", List.of(new GeoShapeDataSourceHandler()), params);
}
@Override
public void testBlockLoaderOfMultiField() throws IOException {
// Multi fields are noop for geo_shape.
}
@Override
@SuppressWarnings("unchecked")
protected Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext) {
if (value instanceof List<?> == false) {
return convert(value);
}
// TODO FieldExtractPreference.EXTRACT_SPATIAL_BOUNDS is currently not covered, it needs special logic
// As a result we always load from source (stored or fallback synthetic) and they should work the same.
var resultList = ((List<Object>) value).stream().map(this::convert).filter(Objects::nonNull).toList();
return maybeFoldList(resultList);
}
private Object convert(Object value) {
if (value instanceof String s) {
return toWKB(fromWKT(s));
}
if (value instanceof Map<?, ?> m) {
return toWKB(fromGeoJson(m));
}
// Malformed values are excluded
return null;
}
private Geometry fromWKT(String s) {
try {
var geometry = WellKnownText.fromWKT(GeographyValidator.instance(true), false, s);
return normalize(geometry);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("unchecked")
private Geometry fromGeoJson(Map<?, ?> map) {
try {
var parser = new MapXContentParser(
xContentRegistry(),
LoggingDeprecationHandler.INSTANCE,
(Map<String, Object>) map,
XContentType.JSON
);
parser.nextToken();
var geometry = GeoJson.fromXContent(GeographyValidator.instance(true), false, true, parser);
return normalize(geometry);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private Geometry normalize(Geometry geometry) {
if (GeometryNormalizer.needsNormalize(Orientation.RIGHT, geometry)) {
return GeometryNormalizer.apply(Orientation.RIGHT, geometry);
}
return geometry;
}
private BytesRef toWKB(Geometry geometry) {
return new BytesRef(WellKnownBinary.toWKB(geometry, ByteOrder.LITTLE_ENDIAN));
}
@Override
protected Collection<? extends Plugin> getPlugins() {
var plugin = new LocalStateSpatialPlugin();
plugin.loadExtensions(new ExtensiblePlugin.ExtensionLoader() {
@Override
public <T> List<T> loadExtensions(Class<T> extensionPointType) {
return List.of();
}
});
return Collections.singletonList(plugin);
}
}
|
GeoShapeFieldBlockLoaderTests
|
java
|
apache__camel
|
components/camel-mina/src/test/java/org/apache/camel/component/mina/MinaConverterTest.java
|
{
"start": 1306,
"end": 3541
}
|
class ____ {
@Test
public void testToByteArray() {
byte[] in = "Hello World".getBytes();
IoBuffer bb = IoBuffer.wrap(in);
byte[] out = MinaConverter.toByteArray(bb);
for (int i = 0; i < out.length; i++) {
assertEquals(in[i], out[i]);
}
}
@Test
public void testToString() throws UnsupportedEncodingException {
String in = "Hello World \u4f60\u597d";
IoBuffer bb = IoBuffer.wrap(in.getBytes(StandardCharsets.UTF_8));
Exchange exchange = new DefaultExchange(new DefaultCamelContext());
exchange.setProperty(Exchange.CHARSET_NAME, "UTF-8");
String out = MinaConverter.toString(bb, exchange);
assertEquals("Hello World \u4f60\u597d", out);
}
@Test
public void testToStringTwoTimes() throws UnsupportedEncodingException {
String in = "Hello World \u4f60\u597d";
IoBuffer bb = IoBuffer.wrap(in.getBytes(StandardCharsets.UTF_8));
Exchange exchange = new DefaultExchange(new DefaultCamelContext());
exchange.setProperty(Exchange.CHARSET_NAME, "UTF-8");
String out = MinaConverter.toString(bb, exchange);
assertEquals("Hello World \u4f60\u597d", out);
// should NOT be possible to convert to string without affecting the ByteBuffer
out = MinaConverter.toString(bb, exchange);
assertEquals("", out);
}
@Test
public void testToInputStream() throws Exception {
byte[] in = "Hello World".getBytes();
IoBuffer bb = IoBuffer.wrap(in);
try (InputStream is = MinaConverter.toInputStream(bb)) {
for (byte b : in) {
int out = is.read();
assertEquals(b, out);
}
}
}
@Test
public void testToByteBuffer() {
byte[] in = "Hello World".getBytes();
IoBuffer bb = MinaConverter.toIoBuffer(in);
assertNotNull(bb);
// convert back to byte[] and see if the bytes are equal
bb.flip(); // must flip to change direction to read
byte[] out = MinaConverter.toByteArray(bb);
for (int i = 0; i < out.length; i++) {
assertEquals(in[i], out[i]);
}
}
}
|
MinaConverterTest
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/bean/override/BeanOverrideHandlerTests.java
|
{
"start": 10824,
"end": 10929
}
|
class ____ {
@DummyBean
String message;
@DummyBean
Integer counter;
}
static
|
MultipleAnnotations
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.