language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
apache__dubbo
dubbo-remoting/dubbo-remoting-netty4/src/test/java/org/apache/dubbo/remoting/transport/netty4/NettyBackedChannelBufferTest.java
{ "start": 1123, "end": 2040 }
class ____ { private static final int CAPACITY = 4096; private ChannelBuffer buffer; @BeforeEach public void init() { buffer = new NettyBackedChannelBuffer(Unpooled.buffer(CAPACITY, CAPACITY * 2)); } @AfterEach public void dispose() { buffer = null; } @Test void testBufferTransfer() { byte[] tmp1 = {1, 2}; byte[] tmp2 = {3, 4}; ChannelBuffer source = new NettyBackedChannelBuffer(Unpooled.buffer(2, 4)); source.writeBytes(tmp1); buffer.writeBytes(tmp2); assertEquals(2, buffer.readableBytes()); source.setBytes(0, tmp1, 0, 2); buffer.setBytes(0, source, 0, 2); assertEquals(2, buffer.readableBytes()); byte[] actual = new byte[2]; buffer.getBytes(0, actual); assertEquals(1, actual[0]); assertEquals(2, actual[1]); } }
NettyBackedChannelBufferTest
java
spring-projects__spring-framework
spring-jdbc/src/main/java/org/springframework/jdbc/core/simple/AbstractJdbcCall.java
{ "start": 1935, "end": 2080 }
class ____ { /** Logger available to subclasses. */ protected final Log logger = LogFactory.getLog(getClass()); /** Lower-level
AbstractJdbcCall
java
apache__rocketmq
common/src/main/java/org/apache/rocketmq/common/PopAckConstants.java
{ "start": 903, "end": 1951 }
class ____ { public static long ackTimeInterval = 1000; public static final long SECOND = 1000; public static long lockTime = 5000; public static int retryQueueNum = 1; public static final String REVIVE_GROUP = MixAll.CID_RMQ_SYS_PREFIX + "REVIVE_GROUP"; public static final String LOCAL_HOST = "127.0.0.1"; public static final String REVIVE_TOPIC = TopicValidator.SYSTEM_TOPIC_PREFIX + "REVIVE_LOG_"; public static final String CK_TAG = "ck"; public static final String ACK_TAG = "ack"; public static final String BATCH_ACK_TAG = "bAck"; public static final String SPLIT = "@"; /** * Build cluster revive topic * * @param clusterName cluster name * @return revive topic */ public static String buildClusterReviveTopic(String clusterName) { return PopAckConstants.REVIVE_TOPIC + clusterName; } public static boolean isStartWithRevivePrefix(String topicName) { return topicName != null && topicName.startsWith(REVIVE_TOPIC); } }
PopAckConstants
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/state/StateBackendTestBase.java
{ "start": 232592, "end": 235605 }
class ____ implements Serializable { private String strField; private Integer intField; private TestNestedPojoClassA kryoClassAField; private TestNestedPojoClassB kryoClassBField; public TestPojo() {} public TestPojo(String strField, Integer intField) { this.strField = strField; this.intField = intField; this.kryoClassAField = null; this.kryoClassBField = null; } public TestPojo( String strField, Integer intField, TestNestedPojoClassA classAField, TestNestedPojoClassB classBfield) { this.strField = strField; this.intField = intField; this.kryoClassAField = classAField; this.kryoClassBField = classBfield; } public String getStrField() { return strField; } public void setStrField(String strField) { this.strField = strField; } public Integer getIntField() { return intField; } public void setIntField(Integer intField) { this.intField = intField; } public TestNestedPojoClassA getKryoClassAField() { return kryoClassAField; } public void setKryoClassAField(TestNestedPojoClassA kryoClassAField) { this.kryoClassAField = kryoClassAField; } public TestNestedPojoClassB getKryoClassBField() { return kryoClassBField; } public void setKryoClassBField(TestNestedPojoClassB kryoClassBField) { this.kryoClassBField = kryoClassBField; } @Override public String toString() { return "TestPojo{" + "strField='" + strField + '\'' + ", intField=" + intField + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TestPojo testPojo = (TestPojo) o; return strField.equals(testPojo.strField) && intField.equals(testPojo.intField) && ((kryoClassAField == null && testPojo.kryoClassAField == null) || kryoClassAField.equals(testPojo.kryoClassAField)) && ((kryoClassBField == null && testPojo.kryoClassBField == null) || kryoClassBField.equals(testPojo.kryoClassBField)); } @Override public int hashCode() { int result = strField.hashCode(); result = 31 * result + intField.hashCode(); if (kryoClassAField != null) { result = 31 * result + kryoClassAField.hashCode(); } if (kryoClassBField != null) { result = 31 * result + kryoClassBField.hashCode(); } return result; } } public static
TestPojo
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/MultiSingleTableLoadTest.java
{ "start": 3782, "end": 4209 }
class ____ implements Serializable { @Id private long id; @ManyToOne(optional = false, cascade = CascadeType.ALL) @JoinColumn(name = "a_id") private A a; public Holder() { } public Holder(long id, A a) { this.id = id; this.a = a; } public A getA() { return a; } } @Entity(name = "A") @Table(name = "tbl_a") @Inheritance(strategy = InheritanceType.SINGLE_TABLE) public static abstract
Holder
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/io/network/netty/InboundChannelHandlerFactory.java
{ "start": 1554, "end": 2424 }
interface ____ { /** * Gives back priority of the {@link ChannelHandler}. The bigger the value is, the earlier it is * executed. If multiple handlers have the same priority then the order is not defined. * * @return the priority of the {@link ChannelHandler}. */ int priority(); /** * Creates new instance of {@link ChannelHandler} * * @param configuration The Flink {@link Configuration}. * @param responseHeaders The response headers. * @return {@link ChannelHandler} or null if no custom handler need to be created. * @throws ConfigurationException Thrown, if the handler configuration is incorrect. */ Optional<ChannelHandler> createHandler( Configuration configuration, Map<String, String> responseHeaders) throws ConfigurationException; }
InboundChannelHandlerFactory
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/query/internal/DelegatingDomainQueryExecutionContext.java
{ "start": 445, "end": 1197 }
class ____ implements DomainQueryExecutionContext { private final DomainQueryExecutionContext delegate; public DelegatingDomainQueryExecutionContext(DomainQueryExecutionContext delegate) { this.delegate = delegate; } @Override public QueryOptions getQueryOptions() { return delegate.getQueryOptions(); } @Override public QueryParameterBindings getQueryParameterBindings() { return delegate.getQueryParameterBindings(); } @Override public Callback getCallback() { return delegate.getCallback(); } @Override public boolean hasCallbackActions() { return delegate.hasCallbackActions(); } @Override public SharedSessionContractImplementor getSession() { return delegate.getSession(); } }
DelegatingDomainQueryExecutionContext
java
grpc__grpc-java
api/src/context/java/io/grpc/Deadline.java
{ "start": 1494, "end": 9787 }
class ____ implements Comparable<Deadline> { private static final Ticker SYSTEM_TICKER = new SystemTicker(); // nanoTime has a range of just under 300 years. Only allow up to 100 years in the past or future // to prevent wraparound as long as process runs for less than ~100 years. private static final long MAX_OFFSET = TimeUnit.DAYS.toNanos(100 * 365); private static final long MIN_OFFSET = -MAX_OFFSET; private static final long NANOS_PER_SECOND = TimeUnit.SECONDS.toNanos(1); /** * Returns the ticker that's based on system clock. * * <p>This is <strong>EXPERIMENTAL</strong> API and may subject to change. If you'd like it to be * stabilized or have any feedback, please * <a href="https://github.com/grpc/grpc-java/issues/6030">let us know</a>. * * @since 1.24.0 */ public static Ticker getSystemTicker() { return SYSTEM_TICKER; } /** * Create a deadline that will expire at the specified offset based on the {@link #getSystemTicker * system ticker}. * * <p>If the given offset is extraordinarily long, say 100 years, the actual deadline created * might saturate. * * @param duration A non-negative duration. * @param units The time unit for the duration. * @return A new deadline. */ public static Deadline after(long duration, TimeUnit units) { return after(duration, units, SYSTEM_TICKER); } /** * Create a deadline that will expire at the specified offset based on the given {@link Ticker}. * * <p>If the given offset is extraordinarily long, say 100 years, the actual deadline created * might saturate. * * <p><strong>CAUTION</strong>: Only deadlines created with the same {@link Ticker} instance can * be compared by methods like {@link #minimum}, {@link #isBefore} and {@link #compareTo}. Custom * Tickers should only be used in tests where you fake out the clock. Always use the {@link * #getSystemTicker system ticker} in production, or serious errors may occur. * * <p>This is <strong>EXPERIMENTAL</strong> API and may subject to change. If you'd like it to be * stabilized or have any feedback, please * <a href="https://github.com/grpc/grpc-java/issues/6030">let us know</a>. * * @param duration A non-negative duration. * @param units The time unit for the duration. * @param ticker Where this deadline refer the current time * @return A new deadline. * * @since 1.24.0 */ public static Deadline after(long duration, TimeUnit units, Ticker ticker) { requireNonNull(units, "units"); return new Deadline(ticker, units.toNanos(duration), true); } private final Ticker ticker; private final long deadlineNanos; private volatile boolean expired; private Deadline(Ticker ticker, long offset, boolean baseInstantAlreadyExpired) { this(ticker, ticker.nanoTime(), offset, baseInstantAlreadyExpired); } private Deadline(Ticker ticker, long baseInstant, long offset, boolean baseInstantAlreadyExpired) { this.ticker = ticker; // Clamp to range [MIN_OFFSET, MAX_OFFSET] offset = Math.min(MAX_OFFSET, Math.max(MIN_OFFSET, offset)); deadlineNanos = baseInstant + offset; expired = baseInstantAlreadyExpired && offset <= 0; } /** * Returns whether this has deadline expired. * * @return {@code true} if it has, otherwise {@code false}. */ public boolean isExpired() { if (!expired) { if (deadlineNanos - ticker.nanoTime() <= 0) { expired = true; } else { return false; } } return true; } /** * Is {@code this} deadline before another. Two deadlines must be created using the same {@link * Ticker}. */ public boolean isBefore(Deadline other) { checkTicker(other); return this.deadlineNanos - other.deadlineNanos < 0; } /** * Return the minimum deadline of {@code this} or an other deadline. They must be created using * the same {@link Ticker}. * * @param other deadline to compare with {@code this}. */ public Deadline minimum(Deadline other) { checkTicker(other); return isBefore(other) ? this : other; } /** * Create a new deadline that is offset from {@code this}. * * <p>If the given offset is extraordinarily long, say 100 years, the actual deadline created * might saturate. */ // TODO(ejona): This method can cause deadlines to grow too far apart. For example: // Deadline.after(100 * 365, DAYS).offset(100 * 365, DAYS) would be less than // Deadline.after(-100 * 365, DAYS) public Deadline offset(long offset, TimeUnit units) { // May already be expired if (offset == 0) { return this; } return new Deadline(ticker, deadlineNanos, units.toNanos(offset), isExpired()); } /** * How much time is remaining in the specified time unit. Internal units are maintained as * nanoseconds and conversions are subject to the constraints documented for * {@link TimeUnit#convert}. If there is no time remaining, the returned duration is how * long ago the deadline expired. */ public long timeRemaining(TimeUnit unit) { final long nowNanos = ticker.nanoTime(); if (!expired && deadlineNanos - nowNanos <= 0) { expired = true; } return unit.convert(deadlineNanos - nowNanos, TimeUnit.NANOSECONDS); } /** * Schedule a task to be run when the deadline expires. * * <p>Note if this deadline was created with a custom {@link Ticker}, the {@code scheduler}'s * underlying clock should be synchronized with that Ticker. Otherwise the task won't be run at * the expected point of time. * * @param task to run on expiration * @param scheduler used to execute the task * @return {@link ScheduledFuture} which can be used to cancel execution of the task */ public ScheduledFuture<?> runOnExpiration(Runnable task, ScheduledExecutorService scheduler) { requireNonNull(task, "task"); requireNonNull(scheduler, "scheduler"); return scheduler.schedule(task, deadlineNanos - ticker.nanoTime(), TimeUnit.NANOSECONDS); } @Override public String toString() { long remainingNanos = timeRemaining(TimeUnit.NANOSECONDS); long seconds = Math.abs(remainingNanos) / NANOS_PER_SECOND; long nanos = Math.abs(remainingNanos) % NANOS_PER_SECOND; StringBuilder buf = new StringBuilder(); if (remainingNanos < 0) { buf.append('-'); } buf.append(seconds); if (nanos > 0) { buf.append(String.format(Locale.US, ".%09d", nanos)); } buf.append("s from now"); if (ticker != SYSTEM_TICKER) { buf.append(" (ticker=" + ticker + ")"); } return buf.toString(); } /** * {@inheritDoc} * * <p>Both deadlines must be created with the same {@link Ticker}. */ @Override public int compareTo(Deadline that) { checkTicker(that); return Long.compare(this.deadlineNanos, that.deadlineNanos); } @Override public int hashCode() { return Objects.hash(this.ticker, this.deadlineNanos); } @Override public boolean equals(final Object object) { if (object == this) { return true; } if (!(object instanceof Deadline)) { return false; } final Deadline that = (Deadline) object; if (this.ticker == null ? that.ticker != null : this.ticker != that.ticker) { return false; } return this.deadlineNanos == that.deadlineNanos; } /** * Time source representing nanoseconds since fixed but arbitrary point in time. * * <p>DO NOT use custom {@link Ticker} implementations in production, because deadlines created * with custom tickers are incompatible with those created with the system ticker. Always use * the {@link #getSystemTicker system ticker} whenever you need to provide one in production code. * * <p>This is <strong>EXPERIMENTAL</strong> API and may subject to change. If you'd like it to be * stabilized or have any feedback, please * <a href="https://github.com/grpc/grpc-java/issues/6030">let us know</a>. * * <p>In general implementations should be thread-safe, unless it's implemented and used in a * localized environment (like unit tests) where you are sure the usages are synchronized. * * @since 1.24.0 */ public abstract static
Deadline
java
apache__dubbo
dubbo-common/src/main/java/org/apache/dubbo/common/utils/ReflectionUtils.java
{ "start": 4367, "end": 6214 }
class ____ implement the * given interface */ public static List<Class<?>> getClassGenerics(Class<?> clazz, Class<?> interfaceClass) { List<Class<?>> generics = new ArrayList<>(); Type[] genericInterfaces = clazz.getGenericInterfaces(); for (Type genericInterface : genericInterfaces) { if (genericInterface instanceof ParameterizedType) { ParameterizedType parameterizedType = (ParameterizedType) genericInterface; Type rawType = parameterizedType.getRawType(); if (rawType instanceof Class && interfaceClass.isAssignableFrom((Class<?>) rawType)) { Type[] actualTypeArguments = parameterizedType.getActualTypeArguments(); for (Type actualTypeArgument : actualTypeArguments) { if (actualTypeArgument instanceof Class) { generics.add((Class<?>) actualTypeArgument); } } } } } Type genericSuperclass = clazz.getGenericSuperclass(); if (genericSuperclass instanceof ParameterizedType) { ParameterizedType parameterizedType = (ParameterizedType) genericSuperclass; Type[] actualTypeArguments = parameterizedType.getActualTypeArguments(); for (Type actualTypeArgument : actualTypeArguments) { if (actualTypeArgument instanceof Class) { generics.add((Class<?>) actualTypeArgument); } } } Class<?> superclass = clazz.getSuperclass(); if (superclass != null) { generics.addAll(getClassGenerics(superclass, interfaceClass)); } return generics.stream().distinct().collect(Collectors.toList()); } public static
that
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/issue_1400/Issue1474.java
{ "start": 741, "end": 1363 }
class ____{ private String name; private String id; @JSONField(unwrapped=true) private Object extraData; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getId() { return id; } public void setId(String id) { this.id = id; } public Object getExtraData() { return extraData; } public void setExtraData(Object extraData) { this.extraData = extraData; } } }
People
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java
{ "start": 645, "end": 1559 }
class ____ implements AggregatorFunctionSupplier { public MaxIntAggregatorFunctionSupplier() { } @Override public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() { return MaxIntAggregatorFunction.intermediateStateDesc(); } @Override public List<IntermediateStateDesc> groupingIntermediateStateDesc() { return MaxIntGroupingAggregatorFunction.intermediateStateDesc(); } @Override public MaxIntAggregatorFunction aggregator(DriverContext driverContext, List<Integer> channels) { return MaxIntAggregatorFunction.create(driverContext, channels); } @Override public MaxIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext, List<Integer> channels) { return MaxIntGroupingAggregatorFunction.create(channels, driverContext); } @Override public String describe() { return "max of ints"; } }
MaxIntAggregatorFunctionSupplier
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/ComponentHealthThresholdMonitor.java
{ "start": 1272, "end": 6875 }
class ____ implements Runnable { private static final Logger LOG = LoggerFactory .getLogger(ComponentHealthThresholdMonitor.class); private final Component component; private final int healthThresholdPercent; private final long healthThresholdWindowSecs; private final long healthThresholdWindowNanos; private long firstOccurrenceTimestamp = 0; // Sufficient logging happens when component health is below threshold. // However, there has to be some logging when it is above threshold, otherwise // service owners have no idea how the health is fluctuating. So let's log // whenever there is a change in component health, thereby preventing // excessive logging on every poll. private float prevReadyContainerFraction = 0; public ComponentHealthThresholdMonitor(Component component, int healthThresholdPercent, long healthThresholdWindowSecs) { this.component = component; this.healthThresholdPercent = healthThresholdPercent; this.healthThresholdWindowSecs = healthThresholdWindowSecs; this.healthThresholdWindowNanos = TimeUnit.NANOSECONDS .convert(healthThresholdWindowSecs, TimeUnit.SECONDS); } @Override public void run() { LOG.debug("ComponentHealthThresholdMonitor run method"); // Perform container health checks against desired threshold long desiredContainerCount = component.getNumDesiredInstances(); // If desired container count for this component is 0 then nothing to do if (desiredContainerCount == 0) { return; } long readyContainerCount = component.getNumReadyInstances(); float thresholdFraction = (float) healthThresholdPercent / 100; // No possibility of div by 0 since desiredContainerCount won't be 0 here float readyContainerFraction = (float) readyContainerCount / desiredContainerCount; boolean healthChanged = false; if (Math.abs( readyContainerFraction - prevReadyContainerFraction) > .0000001) { prevReadyContainerFraction = readyContainerFraction; healthChanged = true; } String readyContainerPercentStr = String.format("%.2f", readyContainerFraction * 100); // Check if the current ready container percent is less than the // threshold percent if (readyContainerFraction < thresholdFraction) { // Check if it is the first occurrence and if yes set the timestamp long currentTimestamp = System.nanoTime(); if (firstOccurrenceTimestamp == 0) { firstOccurrenceTimestamp = currentTimestamp; Date date = new Date(); LOG.info( "[COMPONENT {}] Health has gone below threshold. Starting health " + "threshold timer at ts = {} ({})", component.getName(), date.getTime(), date); } long elapsedTime = currentTimestamp - firstOccurrenceTimestamp; long elapsedTimeSecs = TimeUnit.SECONDS.convert(elapsedTime, TimeUnit.NANOSECONDS); LOG.warn( "[COMPONENT {}] Current health {}% is below health threshold of " + "{}% for {} secs (threshold window = {} secs)", component.getName(), readyContainerPercentStr, healthThresholdPercent, elapsedTimeSecs, healthThresholdWindowSecs); if (elapsedTime > healthThresholdWindowNanos) { LOG.warn( "[COMPONENT {}] Current health {}% has been below health " + "threshold of {}% for {} secs (threshold window = {} secs)", component.getName(), readyContainerPercentStr, healthThresholdPercent, elapsedTimeSecs, healthThresholdWindowSecs); // Trigger service stop String exitDiag = String.format( "Service is being killed because container health for component " + "%s was %s%% (health threshold = %d%%) for %d secs " + "(threshold window = %d secs)", component.getName(), readyContainerPercentStr, healthThresholdPercent, elapsedTimeSecs, healthThresholdWindowSecs); // Append to global diagnostics that will be reported to RM. component.getScheduler().getDiagnostics().append(exitDiag); LOG.warn(exitDiag); // Sleep for 5 seconds in hope that the state can be recorded in ATS. // In case there's a client polling the component state, it can be // notified. try { Thread.sleep(5000); } catch (InterruptedException e) { LOG.error("Interrupted on sleep while exiting.", e); } ExitUtil.terminate(-1); } } else { String logMsg = "[COMPONENT {}] Health threshold = {}%, Current health " + "= {}% (Current Ready count = {}, Desired count = {})"; if (healthChanged) { LOG.info(logMsg, component.getName(), healthThresholdPercent, readyContainerPercentStr, readyContainerCount, desiredContainerCount); } else { LOG.debug(logMsg, component.getName(), healthThresholdPercent, readyContainerPercentStr, readyContainerCount, desiredContainerCount); } // The container health might have recovered above threshold after being // below for less than the threshold window amount of time. So we need // to reset firstOccurrenceTimestamp to 0. if (firstOccurrenceTimestamp != 0) { Date date = new Date(); LOG.info( "[COMPONENT {}] Health recovered above threshold at ts = {} ({})", component.getName(), date.getTime(), date); firstOccurrenceTimestamp = 0; } } } }
ComponentHealthThresholdMonitor
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/jobmaster/JobManagerRunnerResultTest.java
{ "start": 1262, "end": 3272 }
class ____ { private final ExecutionGraphInfo executionGraphInfo = new ExecutionGraphInfo(new ArchivedExecutionGraphBuilder().build()); private final FlinkException testException = new FlinkException("test exception"); @Test void testSuccessfulJobManagerResult() { final JobManagerRunnerResult jobManagerRunnerResult = JobManagerRunnerResult.forSuccess(executionGraphInfo); assertThat(jobManagerRunnerResult.isSuccess()).isTrue(); assertThat(jobManagerRunnerResult.isInitializationFailure()).isFalse(); } @Test void testInitializationFailureJobManagerResult() { final JobManagerRunnerResult jobManagerRunnerResult = JobManagerRunnerResult.forInitializationFailure(executionGraphInfo, testException); assertThat(jobManagerRunnerResult.isInitializationFailure()).isTrue(); assertThat(jobManagerRunnerResult.isSuccess()).isFalse(); } @Test void testGetArchivedExecutionGraphFromSuccessfulJobManagerResult() { final JobManagerRunnerResult jobManagerRunnerResult = JobManagerRunnerResult.forSuccess(executionGraphInfo); assertThat(jobManagerRunnerResult.getExecutionGraphInfo()).isEqualTo(executionGraphInfo); } @Test void testGetInitializationFailureFromFailedJobManagerResult() { final JobManagerRunnerResult jobManagerRunnerResult = JobManagerRunnerResult.forInitializationFailure(executionGraphInfo, testException); assertThat(jobManagerRunnerResult.getInitializationFailure()).isEqualTo(testException); } @Test void testGetInitializationFailureFromSuccessfulJobManagerResult() { final JobManagerRunnerResult jobManagerRunnerResult = JobManagerRunnerResult.forSuccess(executionGraphInfo); assertThatThrownBy(jobManagerRunnerResult::getInitializationFailure) .isInstanceOf(IllegalStateException.class); } }
JobManagerRunnerResultTest
java
netty__netty
example/src/main/java/io/netty/example/http2/helloworld/server/HelloWorldHttp1Handler.java
{ "start": 1892, "end": 3690 }
class ____ extends SimpleChannelInboundHandler<FullHttpRequest> { private final String establishApproach; public HelloWorldHttp1Handler(String establishApproach) { this.establishApproach = checkNotNull(establishApproach, "establishApproach"); } @Override public void channelRead0(ChannelHandlerContext ctx, FullHttpRequest req) throws Exception { if (HttpUtil.is100ContinueExpected(req)) { ctx.write(new DefaultFullHttpResponse(HTTP_1_1, CONTINUE, Unpooled.EMPTY_BUFFER)); } ByteBuf content = ctx.alloc().buffer(); content.writeBytes(HelloWorldHttp2Handler.RESPONSE_BYTES.duplicate()); ByteBufUtil.writeAscii(content, " - via " + req.protocolVersion() + " (" + establishApproach + ")"); FullHttpResponse response = new DefaultFullHttpResponse(HTTP_1_1, OK, content); response.headers().set(CONTENT_TYPE, "text/plain; charset=UTF-8"); response.headers().setInt(CONTENT_LENGTH, response.content().readableBytes()); boolean keepAlive = HttpUtil.isKeepAlive(req); if (keepAlive) { if (req.protocolVersion().equals(HTTP_1_0)) { response.headers().set(CONNECTION, KEEP_ALIVE); } ctx.write(response); } else { // Tell the client we're going to close the connection. response.headers().set(CONNECTION, CLOSE); ctx.write(response).addListener(ChannelFutureListener.CLOSE); } } @Override public void channelReadComplete(ChannelHandlerContext ctx) throws Exception { ctx.flush(); } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { cause.printStackTrace(); ctx.close(); } }
HelloWorldHttp1Handler
java
google__dagger
javatests/dagger/internal/codegen/MapRequestRepresentationWithGuavaTest.java
{ "start": 9388, "end": 10381 }
interface ____ {", " ListenableFuture<Map<String, String>> stringMap();", "}"); CompilerTests.daggerCompiler(mapModuleFile, componentFile) .withProcessingOptions(compilerMode.processorOptions()) .compile( subject -> { subject.hasErrorCount(0); subject.generatedSource(goldenFileRule.goldenSource("test/DaggerTestComponent")); }); } @Test public void setAndMapBindings() throws Exception { Source moduleFile = CompilerTests.javaSource( "test.MapModule", "package test;", "", "import dagger.Module;", "import dagger.Provides;", "import dagger.multibindings.IntoMap;", "import dagger.multibindings.IntoSet;", "import dagger.multibindings.IntKey;", "import java.util.Map;", "import java.util.Set;", "", "@Module", "
TestComponent
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/entrypoint/ClusterEntrypoint.java
{ "start": 5260, "end": 5334 }
class ____ the Flink cluster entry points. * * <p>Specialization of this
for
java
apache__flink
flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/tasks/SourceStreamTaskTest.java
{ "start": 40014, "end": 41011 }
class ____<SRC extends SourceFunction<String>> extends StreamSource<String, SRC> implements BoundedOneInput { private final String name; public OutputRecordInCloseTestSource(String name, SRC sourceFunction) { super(sourceFunction); this.name = name; } @Override public void endInput() { output("[" + name + "]: End of input"); } @Override public void finish() throws Exception { ProcessingTimeService timeService = getProcessingTimeService(); timeService.registerTimer( timeService.getCurrentProcessingTime(), t -> output("[" + name + "]: Timer registered in close")); output("[" + name + "]: Finish"); super.finish(); } private void output(String record) { output.collect(new StreamRecord<>(record)); } } private static
OutputRecordInCloseTestSource
java
apache__camel
core/camel-support/src/main/java/org/apache/camel/saga/CamelSagaService.java
{ "start": 1068, "end": 1317 }
interface ____ extends Service, CamelContextAware { CompletableFuture<CamelSagaCoordinator> newSaga(Exchange exchange); CompletableFuture<CamelSagaCoordinator> getSaga(String id); void registerStep(CamelSagaStep step); }
CamelSagaService
java
google__dagger
hilt-compiler/main/java/dagger/hilt/android/processor/internal/androidentrypoint/AndroidEntryPointMetadata.java
{ "start": 2799, "end": 2844 }
class ____ { /** The
AndroidEntryPointMetadata
java
spring-projects__spring-security
saml2/saml2-service-provider/src/test/java/org/springframework/security/saml2/provider/service/servlet/HttpSessionSaml2AuthenticationRequestRepositoryTests.java
{ "start": 1453, "end": 6476 }
class ____ { private static final String IDP_SSO_URL = "https://sso-url.example.com/IDP/SSO"; private MockHttpServletRequest request; private MockHttpServletResponse response; private HttpSessionSaml2AuthenticationRequestRepository authenticationRequestRepository; @BeforeEach public void setup() { this.request = new MockHttpServletRequest(); this.response = new MockHttpServletResponse(); this.authenticationRequestRepository = new HttpSessionSaml2AuthenticationRequestRepository(); } @Test public void loadAuthenticationRequestWhenInvalidSessionThenNull() { AbstractSaml2AuthenticationRequest authenticationRequest = this.authenticationRequestRepository .loadAuthenticationRequest(this.request); assertThat(authenticationRequest).isNull(); } @Test public void loadAuthenticationRequestWhenNoAttributeInSessionThenNull() { this.request.getSession(); AbstractSaml2AuthenticationRequest authenticationRequest = this.authenticationRequestRepository .loadAuthenticationRequest(this.request); assertThat(authenticationRequest).isNull(); } @Test public void loadAuthenticationRequestWhenAttributeInSessionThenReturnsAuthenticationRequest() { AbstractSaml2AuthenticationRequest mockAuthenticationRequest = mock(AbstractSaml2AuthenticationRequest.class); given(mockAuthenticationRequest.getAuthenticationRequestUri()).willReturn(IDP_SSO_URL); this.request.getSession(); this.authenticationRequestRepository.saveAuthenticationRequest(mockAuthenticationRequest, this.request, this.response); AbstractSaml2AuthenticationRequest authenticationRequest = this.authenticationRequestRepository .loadAuthenticationRequest(this.request); assertThat(authenticationRequest.getAuthenticationRequestUri()).isEqualTo(IDP_SSO_URL); } @Test public void saveAuthenticationRequestWhenSessionDontExistsThenCreateAndSave() { AbstractSaml2AuthenticationRequest mockAuthenticationRequest = mock(AbstractSaml2AuthenticationRequest.class); this.authenticationRequestRepository.saveAuthenticationRequest(mockAuthenticationRequest, this.request, this.response); AbstractSaml2AuthenticationRequest authenticationRequest = this.authenticationRequestRepository .loadAuthenticationRequest(this.request); assertThat(authenticationRequest).isNotNull(); } @Test public void saveAuthenticationRequestWhenSessionExistsThenSave() { AbstractSaml2AuthenticationRequest mockAuthenticationRequest = mock(AbstractSaml2AuthenticationRequest.class); this.request.getSession(); this.authenticationRequestRepository.saveAuthenticationRequest(mockAuthenticationRequest, this.request, this.response); AbstractSaml2AuthenticationRequest authenticationRequest = this.authenticationRequestRepository .loadAuthenticationRequest(this.request); assertThat(authenticationRequest).isNotNull(); } @Test public void saveAuthenticationRequestWhenNullAuthenticationRequestThenDontSave() { this.request.getSession(); this.authenticationRequestRepository.saveAuthenticationRequest(null, this.request, this.response); AbstractSaml2AuthenticationRequest authenticationRequest = this.authenticationRequestRepository .loadAuthenticationRequest(this.request); assertThat(authenticationRequest).isNull(); } @Test public void removeAuthenticationRequestWhenInvalidSessionThenReturnNull() { AbstractSaml2AuthenticationRequest authenticationRequest = this.authenticationRequestRepository .removeAuthenticationRequest(this.request, this.response); assertThat(authenticationRequest).isNull(); } @Test public void removeAuthenticationRequestWhenAttributeInSessionThenRemoveAuthenticationRequest() { AbstractSaml2AuthenticationRequest mockAuthenticationRequest = mock(AbstractSaml2AuthenticationRequest.class); given(mockAuthenticationRequest.getAuthenticationRequestUri()).willReturn(IDP_SSO_URL); this.request.getSession(); this.authenticationRequestRepository.saveAuthenticationRequest(mockAuthenticationRequest, this.request, this.response); AbstractSaml2AuthenticationRequest authenticationRequest = this.authenticationRequestRepository .removeAuthenticationRequest(this.request, this.response); AbstractSaml2AuthenticationRequest authenticationRequestAfterRemove = this.authenticationRequestRepository .loadAuthenticationRequest(this.request); assertThat(authenticationRequest.getAuthenticationRequestUri()).isEqualTo(IDP_SSO_URL); assertThat(authenticationRequestAfterRemove).isNull(); } @Test public void removeAuthenticationRequestWhenValidSessionNoAttributeThenReturnsNull() { MockHttpSession session = mock(MockHttpSession.class); MockHttpServletRequest request = new MockHttpServletRequest(); request.setSession(session); AbstractSaml2AuthenticationRequest authenticationRequest = this.authenticationRequestRepository .removeAuthenticationRequest(request, this.response); verify(session).getAttribute(anyString()); assertThat(authenticationRequest).isNull(); } }
HttpSessionSaml2AuthenticationRequestRepositoryTests
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/contract/hdfs/TestHDFSContractRename.java
{ "start": 1148, "end": 1558 }
class ____ extends AbstractContractRenameTest { @BeforeAll public static void createCluster() throws IOException { HDFSContract.createCluster(); } @AfterAll public static void teardownCluster() throws IOException { HDFSContract.destroyCluster(); } @Override protected AbstractFSContract createContract(Configuration conf) { return new HDFSContract(conf); } }
TestHDFSContractRename
java
grpc__grpc-java
binder/src/main/java/io/grpc/binder/BinderChannelBuilder.java
{ "start": 1580, "end": 16507 }
class ____ extends ForwardingChannelBuilder<BinderChannelBuilder> { /** * Creates a channel builder that will bind to a remote Android service. * * <p>The underlying Android binding will be torn down when the channel becomes idle. This happens * after 30 minutes without use by default but can be configured via {@link * ManagedChannelBuilder#idleTimeout(long, TimeUnit)} or triggered manually with {@link * ManagedChannel#enterIdle()}. * * <p>You the caller are responsible for managing the lifecycle of any channels built by the * resulting builder. They will not be shut down automatically. * * @param directAddress the {@link AndroidComponentAddress} referencing the service to bind to. * @param sourceContext the context to bind from (e.g. The current Activity or Application). * @return a new builder */ public static BinderChannelBuilder forAddress( AndroidComponentAddress directAddress, Context sourceContext) { return new BinderChannelBuilder( checkNotNull(directAddress, "directAddress"), null, sourceContext, BinderChannelCredentials.forDefault()); } /** * Creates a channel builder that will bind to a remote Android service with provided * BinderChannelCredentials. * * <p>The underlying Android binding will be torn down when the channel becomes idle. This happens * after 30 minutes without use by default but can be configured via {@link * ManagedChannelBuilder#idleTimeout(long, TimeUnit)} or triggered manually with {@link * ManagedChannel#enterIdle()}. * * <p>You the caller are responsible for managing the lifecycle of any channels built by the * resulting builder. They will not be shut down automatically. * * @param directAddress the {@link AndroidComponentAddress} referencing the service to bind to. * @param sourceContext the context to bind from (e.g. The current Activity or Application). * @param channelCredentials the arbitrary binder specific channel credentials to be used to * establish a binder connection. * @return a new builder */ @ExperimentalApi("https://github.com/grpc/grpc-java/issues/10173") public static BinderChannelBuilder forAddress( AndroidComponentAddress directAddress, Context sourceContext, BinderChannelCredentials channelCredentials) { return new BinderChannelBuilder( checkNotNull(directAddress, "directAddress"), null, sourceContext, channelCredentials); } /** * Creates a channel builder that will bind to a remote Android service, via a string target name * which will be resolved. * * <p>The underlying Android binding will be torn down when the channel becomes idle. This happens * after 30 minutes without use by default but can be configured via {@link * ManagedChannelBuilder#idleTimeout(long, TimeUnit)} or triggered manually with {@link * ManagedChannel#enterIdle()}. * * <p>You the caller are responsible for managing the lifecycle of any channels built by the * resulting builder. They will not be shut down automatically. * * @param target A target uri which should resolve into an {@link AndroidComponentAddress} * referencing the service to bind to. * @param sourceContext the context to bind from (e.g. The current Activity or Application). * @return a new builder */ public static BinderChannelBuilder forTarget(String target, Context sourceContext) { return new BinderChannelBuilder( null, checkNotNull(target, "target"), sourceContext, BinderChannelCredentials.forDefault()); } /** * Creates a channel builder that will bind to a remote Android service, via a string target name * which will be resolved. * * <p>The underlying Android binding will be torn down when the channel becomes idle. This happens * after 30 minutes without use by default but can be configured via {@link * ManagedChannelBuilder#idleTimeout(long, TimeUnit)} or triggered manually with {@link * ManagedChannel#enterIdle()}. * * <p>You the caller are responsible for managing the lifecycle of any channels built by the * resulting builder. They will not be shut down automatically. * * @param target A target uri which should resolve into an {@link AndroidComponentAddress} * referencing the service to bind to. * @param sourceContext the context to bind from (e.g. The current Activity or Application). * @param channelCredentials the arbitrary binder specific channel credentials to be used to * establish a binder connection. * @return a new builder */ @ExperimentalApi("https://github.com/grpc/grpc-java/issues/10173") public static BinderChannelBuilder forTarget( String target, Context sourceContext, BinderChannelCredentials channelCredentials) { return new BinderChannelBuilder( null, checkNotNull(target, "target"), sourceContext, channelCredentials); } /** Always fails. Call {@link #forAddress(AndroidComponentAddress, Context)} instead. */ @DoNotCall("Unsupported. Use forAddress(AndroidComponentAddress, Context) instead") public static BinderChannelBuilder forAddress(String name, int port) { throw new UnsupportedOperationException( "call forAddress(AndroidComponentAddress, Context) instead"); } /** Always fails. Call {@link #forAddress(AndroidComponentAddress, Context)} instead. */ @DoNotCall("Unsupported. Use forTarget(String, Context) instead") public static BinderChannelBuilder forTarget(String target) { throw new UnsupportedOperationException( "call forAddress(AndroidComponentAddress, Context) instead"); } private final ManagedChannelImplBuilder managedChannelImplBuilder; private final BinderClientTransportFactory.Builder transportFactoryBuilder; private boolean strictLifecycleManagement; private BinderChannelBuilder( @Nullable AndroidComponentAddress directAddress, @Nullable String target, Context sourceContext, BinderChannelCredentials channelCredentials) { transportFactoryBuilder = new BinderClientTransportFactory.Builder() .setSourceContext(sourceContext) .setChannelCredentials(channelCredentials); if (directAddress != null) { managedChannelImplBuilder = new ManagedChannelImplBuilder( directAddress, directAddress.getAuthority(), transportFactoryBuilder, null); } else { managedChannelImplBuilder = new ManagedChannelImplBuilder(target, transportFactoryBuilder, null); } idleTimeout(60, TimeUnit.SECONDS); } @Override @SuppressWarnings("deprecation") // Not extending ForwardingChannelBuilder2 to preserve ABI. protected ManagedChannelBuilder<?> delegate() { return managedChannelImplBuilder; } /** Specifies certain optional aspects of the underlying Android Service binding. */ public BinderChannelBuilder setBindServiceFlags(BindServiceFlags bindServiceFlags) { transportFactoryBuilder.setBindServiceFlags(bindServiceFlags); return this; } /** * Provides a custom scheduled executor service. * * <p>This is an optional parameter. If the user has not provided a scheduled executor service * when the channel is built, the builder will use a static cached thread pool. * * @return this */ public BinderChannelBuilder scheduledExecutorService( ScheduledExecutorService scheduledExecutorService) { transportFactoryBuilder.setScheduledExecutorPool( new FixedObjectPool<>(checkNotNull(scheduledExecutorService, "scheduledExecutorService"))); return this; } /** * Provides a custom {@link Executor} for accessing this application's main thread. * * <p>Optional. A default implementation will be used if no custom Executor is provided. * * @return this */ public BinderChannelBuilder mainThreadExecutor(Executor mainThreadExecutor) { transportFactoryBuilder.setMainThreadExecutor(mainThreadExecutor); return this; } /** * Provides a custom security policy. * * <p>This is optional. If the user has not provided a security policy, this channel will only * communicate with the same application UID. * * @return this */ public BinderChannelBuilder securityPolicy(SecurityPolicy securityPolicy) { transportFactoryBuilder.setSecurityPolicy(securityPolicy); return this; } /** Sets the policy for inbound parcelable objects. */ public BinderChannelBuilder inboundParcelablePolicy( InboundParcelablePolicy inboundParcelablePolicy) { transportFactoryBuilder.setInboundParcelablePolicy(inboundParcelablePolicy); return this; } /** * Disables the channel idle timeout and prevents it from being enabled. This allows a centralized * application method to configure the channel builder and return it, without worrying about * another part of the application accidentally enabling the idle timeout. */ public BinderChannelBuilder strictLifecycleManagement() { strictLifecycleManagement = true; super.idleTimeout(1000, TimeUnit.DAYS); // >30 days disables timeouts entirely. return this; } /** * Checks servers against this Channel's {@link SecurityPolicy} *before* binding. * * <p>Android users can be tricked into installing a malicious app with the same package name as a * legitimate server. That's why we don't send calls to a server until it has been authorized by * an appropriate {@link SecurityPolicy}. But merely binding to a malicious server can enable * "keep-alive" and "background activity launch" abuse, even if it's ultimately unauthorized. * Pre-authorization mitigates these threats by performing a preliminary {@link SecurityPolicy} * check against a server app's PackageManager-registered identity without actually creating an * instance of it. This is especially important for security when the server's direct address * isn't known in advance but rather resolved via target URI or discovered by other means. * * <p>Note that, unlike ordinary authorization, pre-authorization is performed against the server * app's UID, not the UID of the process hosting the bound Service. These can be different, most * commonly due to services that set `android:isolatedProcess=true`. * * <p>Pre-authorization is strongly recommended but it remains optional for now because of this * behavior change and the small performance cost. * * <p>The default value of this property is false but it will become true in a future release. * Clients that require a particular behavior should configure it explicitly using this method * rather than relying on the default. */ @ExperimentalApi("https://github.com/grpc/grpc-java/issues/12191") public BinderChannelBuilder preAuthorizeServers(boolean preAuthorize) { transportFactoryBuilder.setPreAuthorizeServers(preAuthorize); return this; } /** * Specifies how and when to authorize a server against this Channel's {@link SecurityPolicy}. * * <p>This method selects the original "legacy" authorization strategy, which is no longer * preferred for two reasons: First, the legacy strategy considers the UID of the server *process* * we connect to. This is problematic for services using the `android:isolatedProcess` attribute, * which runs them under a different "ephemeral" UID. This UID lacks all the privileges of the * hosting app -- any non-trivial SecurityPolicy would fail to authorize it. Second, the legacy * authorization strategy performs SecurityPolicy checks later in the connection handshake, which * means the calling UID must be rechecked on every subsequent RPC. For these reasons, prefer * {@link #useV2AuthStrategy} instead. * * <p>The server does not know which authorization strategy a client is using. Both strategies * work with all versions of the grpc-binder server. * * <p>Callers need not specify an authorization strategy, but the default is unspecified and will * eventually become {@link #useV2AuthStrategy()}. Clients that require the legacy strategy should * configure it explicitly using this method. Eventually, however, legacy support will be * deprecated and removed. * * @return this */ @ExperimentalApi("https://github.com/grpc/grpc-java/issues/12397") public BinderChannelBuilder useLegacyAuthStrategy() { transportFactoryBuilder.setUseLegacyAuthStrategy(true); return this; } /** * Specifies how and when to authorize a server against this Channel's {@link SecurityPolicy}. * * <p>This method selects the v2 authorization strategy. It improves on the original strategy * ({@link #useLegacyAuthStrategy}), by considering the UID of the server *app* we connect to, * rather than the server *process*. This allows clients to connect to services configured with * the `android:isolatedProcess` attribute, which run with the same authority as the hosting app, * but under a different "ephemeral" UID that any non-trivial SecurityPolicy would fail to * authorize. * * <p>Furthermore, the v2 authorization strategy performs SecurityPolicy checks earlier in the * connection handshake, which allows subsequent RPCs over that connection to proceed securely * without further UID checks. For these reasons, clients should prefer the v2 strategy. * * <p>The server does not know which authorization strategy a client is using. Both strategies * work with all versions of the grpc-binder server. * * <p>Callers need not specify an authorization strategy, but the default is unspecified and can * change over time. Clients that require the v2 strategy should configure it explicitly using * this method. Eventually, this strategy will become the default and legacy support will be * removed. * * <p>If moving to the new authorization strategy causes a robolectric test to fail, ensure your * fake Service component is registered with `ShadowPackageManager` using `addOrUpdateService()`. * * @return this */ @ExperimentalApi("https://github.com/grpc/grpc-java/issues/12397") public BinderChannelBuilder useV2AuthStrategy() { transportFactoryBuilder.setUseLegacyAuthStrategy(false); return this; } @Override public BinderChannelBuilder idleTimeout(long value, TimeUnit unit) { checkState( !strictLifecycleManagement, "Idle timeouts are not supported when strict lifecycle management is enabled"); super.idleTimeout(value, unit); return this; } @Override public ManagedChannel build() { transportFactoryBuilder.setOffloadExecutorPool( managedChannelImplBuilder.getOffloadExecutorPool()); setNameResolverArg( ApiConstants.SOURCE_ANDROID_CONTEXT, transportFactoryBuilder.getSourceContext()); return super.build(); } }
BinderChannelBuilder
java
spring-projects__spring-boot
module/spring-boot-data-commons/src/test/java/org/springframework/boot/data/autoconfigure/metrics/DataRepositoryMetricsAutoConfigurationIntegrationTests.java
{ "start": 3626, "end": 3734 }
class ____ extends AbstractJdbcConfiguration { } @Configuration(proxyBeanMethods = false) static
TestConfig
java
elastic__elasticsearch
x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/FirstLineWithLettersCharFilterTests.java
{ "start": 464, "end": 5514 }
class ____ extends ESTestCase { public void testEmpty() throws IOException { String input = ""; FirstLineWithLettersCharFilter filter = new FirstLineWithLettersCharFilter(new StringReader(input)); assertThat(filter.read(), equalTo(-1)); } public void testAllBlankOneLine() throws IOException { String input = randomFrom("!@#$%^&*()1234567890{}[]\\;':,./<>?`~", "\t", " ", ""); if (randomBoolean()) { input = " " + input; } if (randomBoolean()) { input = input + " "; } FirstLineWithLettersCharFilter filter = new FirstLineWithLettersCharFilter(new StringReader(input)); assertThat(filter.read(), equalTo(-1)); } public void testNonBlankNoNewlines() throws IOException { String input = "the quick brown fox jumped over the lazy dog"; if (randomBoolean()) { input = " " + input; } if (randomBoolean()) { input = input + " "; } FirstLineWithLettersCharFilter filter = new FirstLineWithLettersCharFilter(new StringReader(input)); char[] output = new char[input.length()]; assertThat(filter.read(output, 0, output.length), equalTo(input.length())); assertThat(filter.read(), equalTo(-1)); assertThat(new String(output), equalTo(input)); } public void testNonBlankMultiline() throws IOException { StringBuilder input = new StringBuilder(); String lineEnding = randomBoolean() ? "\n" : "\r\n"; for (int lineBeforeNum = randomIntBetween(2, 5); lineBeforeNum > 0; --lineBeforeNum) { for (int charNum = randomIntBetween(0, 5); charNum > 0; --charNum) { input.append(randomBoolean() ? " " : "\t"); } input.append(lineEnding); } String lineToKeep = "the quick brown fox jumped over the lazy dog"; if (randomBoolean()) { lineToKeep = " " + lineToKeep; } if (randomBoolean()) { lineToKeep = lineToKeep + " "; } input.append(lineToKeep).append(lineEnding); for (int lineAfterNum = randomIntBetween(2, 5); lineAfterNum > 0; --lineAfterNum) { for (int charNum = randomIntBetween(0, 5); charNum > 0; --charNum) { input.append(randomBoolean() ? " " : "more"); } if (lineAfterNum > 1 || randomBoolean()) { input.append(lineEnding); } } FirstLineWithLettersCharFilter filter = new FirstLineWithLettersCharFilter(new StringReader(input.toString())); char[] output = new char[lineToKeep.length()]; assertThat(filter.read(output, 0, output.length), equalTo(lineToKeep.length())); assertThat(filter.read(), equalTo(-1)); assertThat(new String(output), equalTo(lineToKeep)); } public void testNoLinesWithLetters() throws IOException { StringBuilder input = new StringBuilder(); String lineEnding = randomBoolean() ? "\n" : "\r\n"; for (int lineNum = randomIntBetween(2, 5); lineNum > 0; --lineNum) { for (int charNum = randomIntBetween(0, 5); charNum > 0; --charNum) { input.append(randomFrom("!@#$%^&*()1234567890{}[]\\;':,./<>?`~", "\t", " ", "")); } if (lineNum > 1 || randomBoolean()) { input.append(lineEnding); } } FirstLineWithLettersCharFilter filter = new FirstLineWithLettersCharFilter(new StringReader(input.toString())); assertThat(filter.read(), equalTo(-1)); } public void testCorrect() throws IOException { String input = """ -------------------------------------------------------------------------------- Alias 'foo' already exists and this prevents setting up ILM for logs --------------------------------------------------------------------------------"""; FirstLineWithLettersCharFilter filter = new FirstLineWithLettersCharFilter(new StringReader(input)); String expectedOutput = "Alias 'foo' already exists and this prevents setting up ILM for logs"; char[] output = new char[expectedOutput.length()]; assertThat(filter.read(output, 0, output.length), equalTo(expectedOutput.length())); assertThat(filter.read(), equalTo(-1)); assertThat(new String(output), equalTo(expectedOutput)); int expectedOutputIndex = input.indexOf(expectedOutput); for (int i = 0; i < expectedOutput.length(); ++i) { assertThat(filter.correctOffset(i), equalTo(expectedOutputIndex + i)); } // When the input gets chopped by a char filter immediately after a token, that token must be reported as // ending at the very end of the original input, otherwise multi-message analysis will have incorrect offsets assertThat(filter.correctOffset(expectedOutput.length()), equalTo(input.length())); } }
FirstLineWithLettersCharFilterTests
java
apache__maven
impl/maven-core/src/main/java/org/apache/maven/lifecycle/internal/BuildListCalculator.java
{ "start": 1325, "end": 2640 }
class ____ { public ProjectBuildList calculateProjectBuilds(MavenSession session, List<TaskSegment> taskSegments) { List<ProjectSegment> projectBuilds = new ArrayList<>(); MavenProject rootProject = session.getTopLevelProject(); for (TaskSegment taskSegment : taskSegments) { List<MavenProject> projects; if (taskSegment.isAggregating()) { projects = Collections.singletonList(rootProject); } else { projects = session.getProjects(); } for (MavenProject project : projects) { ClassLoader tccl = Thread.currentThread().getContextClassLoader(); MavenProject currentProject = session.getCurrentProject(); try { BuilderCommon.attachToThread(project); // Not totally sure if this is needed for anything session.setCurrentProject(project); projectBuilds.add(new ProjectSegment(project, taskSegment, session)); } finally { session.setCurrentProject(currentProject); Thread.currentThread().setContextClassLoader(tccl); } } } return new ProjectBuildList(projectBuilds); } }
BuildListCalculator
java
apache__hadoop
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/NativeAzureFileSystemBaseTest.java
{ "start": 12968, "end": 15002 }
enum ____ { NormalFileName, SourceInAFolder, SourceWithSpace, SourceWithPlusAndPercent } @Test public void testRename() throws Exception { for (RenameVariation variation : RenameVariation.values()) { System.out.printf("Rename variation: %s\n", variation); Path originalFile; switch (variation) { case NormalFileName: originalFile = new Path("fileToRename"); break; case SourceInAFolder: originalFile = new Path("file/to/rename"); break; case SourceWithSpace: originalFile = new Path("file to rename"); break; case SourceWithPlusAndPercent: originalFile = new Path("file+to%rename"); break; default: throw new Exception("Unknown variation"); } Path destinationFile = new Path("file/resting/destination"); assertTrue(fs.createNewFile(originalFile)); assertTrue(fs.exists(originalFile)); assertFalse(fs.rename(originalFile, destinationFile)); // Parent directory // doesn't exist assertTrue(fs.mkdirs(destinationFile.getParent())); boolean result = fs.rename(originalFile, destinationFile); assertTrue(result); assertTrue(fs.exists(destinationFile)); assertFalse(fs.exists(originalFile)); fs.delete(destinationFile.getParent(), true); } } @Test public void testRenameImplicitFolder() throws Exception { Path testFile = new Path("deep/file/rename/test"); FsPermission permission = FsPermission.createImmutable((short) 644); createEmptyFile(testFile, permission); boolean renameResult = fs.rename(new Path("deep/file"), new Path("deep/renamed")); assertTrue(renameResult); assertFalse(fs.exists(testFile)); FileStatus newStatus = fs.getFileStatus(new Path("deep/renamed/rename/test")); assertNotNull(newStatus); assertEqualsIgnoreStickyBit(permission, newStatus.getPermission()); assertTrue(fs.delete(new Path("deep"), true)); } private
RenameVariation
java
elastic__elasticsearch
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ValidateYamlAgainstSchemaTask.java
{ "start": 719, "end": 977 }
class ____ extends ValidateJsonAgainstSchemaTask { @Override protected String getFileType() { return "YAML"; } protected ObjectMapper getMapper() { return new ObjectMapper(new YAMLFactory()); } }
ValidateYamlAgainstSchemaTask
java
quarkusio__quarkus
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/builtin/beans/InjectionPointBuiltInBeanTest.java
{ "start": 1198, "end": 2265 }
class ____ { @Produces @Dependent public MyPojo produce(InjectionPoint injectionPoint, Instance<Object> lookup) { Field field = ((AnnotatedField<?>) injectionPoint.getAnnotated()).getJavaMember(); String f = field.getDeclaringClass().getSimpleName() + "." + field.getName(); // producer method parameters are injection points, so looking up `InjectionPoint` from `lookup` // must return the injection point corresponding to the `lookup` producer method parameter InjectionPoint lookupInjectionPoint = lookup.select(InjectionPoint.class).get(); AnnotatedParameter<?> parameter = (AnnotatedParameter<?>) lookupInjectionPoint.getAnnotated(); Executable method = parameter.getJavaParameter().getDeclaringExecutable(); String m = method.getDeclaringClass().getSimpleName() + "." + method.getName() + "(" + parameter.getPosition() + ")"; return new MyPojo(f + "|" + m); } } @Singleton static
MyProducer
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/internal/util/collections/LockModeEnumMap.java
{ "start": 608, "end": 955 }
class ____<V> extends LazyIndexedMap<LockMode,V> { private static final int ENUM_DIMENSION = LockMode.values().length; public LockModeEnumMap() { super( ENUM_DIMENSION ); } public V computeIfAbsent(LockMode key, Function<LockMode,V> valueGenerator) { return super.computeIfAbsent( key.ordinal(), key, valueGenerator ); } }
LockModeEnumMap
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsIgnoringWhitespaces_Test.java
{ "start": 1764, "end": 7308 }
class ____ { @ParameterizedTest @ValueSource(strings = { "Yo", "a n dLuke", "YodaandLuke", "Yoda\tand\nLuke" }) void should_pass_if_actual_contains_value_when_whitespaces_are_ignored(String value) { // GIVEN String actual = "Yoda and Luke"; // WHEN / THEN assertThat(actual).containsIgnoringWhitespaces(value); } @Test void should_pass_if_actual_contains_all_given_strings() { // GIVEN String actual = "Yoda and Luke"; String[] values = array("Yo", "da", "a n d", "L u k e"); // WHEN / THEN assertThat(actual).containsIgnoringWhitespaces(values); } @Test void should_fail_if_actual_does_not_contain_value() { // GIVEN String actual = "Yoda"; String value = "Luke"; // WHEN var assertionError = expectAssertionError(() -> assertThat(actual).containsIgnoringWhitespaces(value)); // THEN then(assertionError).hasMessage(shouldContainIgnoringWhitespaces("Yoda", "Luke", StandardComparisonStrategy.instance()).create()); } @Test void should_fail_if_actual_contains_value_but_in_different_case() { // GIVEN String actual = "Yoda"; String value = "yo"; // WHEN var assertionError = expectAssertionError(() -> assertThat(actual).containsIgnoringWhitespaces(value)); // THEN then(assertionError).hasMessage(shouldContainIgnoringWhitespaces("Yoda", "yo", StandardComparisonStrategy.instance()).create()); } @Test void should_fail_if_actual_contains_value_with_whitespaces_but_in_different_case() { // GIVEN String actual = "Yoda and Luke"; String value = "a n dluke"; // WHEN var assertionError = expectAssertionError(() -> assertThat(actual).containsIgnoringWhitespaces(value)); // THEN then(assertionError).hasMessage(shouldContainIgnoringWhitespaces("Yoda and Luke", "a n dluke", StandardComparisonStrategy.instance()).create()); } @Test void should_throw_error_if_value_is_null() { // GIVEN String actual = "Yoda"; String value = null; // WHEN / THEN assertThatNullPointerException().isThrownBy(() -> assertThat(actual).containsIgnoringWhitespaces(value)) .withMessage(charSequenceToLookForIsNull()); } @Test void should_fail_if_actual_is_null() { // GIVEN String actual = null; String value = "Yoda"; // WHEN var assertionError = expectAssertionError(() -> assertThat(actual).containsIgnoringWhitespaces(value)); // THEN then(assertionError).hasMessage(actualIsNull()); } @Test void should_fail_if_actual_does_not_contain_all_given_strings() { // GIVEN String actual = "Yoda"; String[] values = array("Yo", "da", "Han"); // WHEN var assertionError = expectAssertionError(() -> assertThat(actual).containsIgnoringWhitespaces(values)); // THEN then(assertionError).hasMessage(shouldContainIgnoringWhitespaces(actual, values, newLinkedHashSet("Han"), StandardComparisonStrategy.instance()).create()); } @ParameterizedTest @ValueSource(strings = { "Yo", "yo", "YO", "a n dluke", "A N Dluke", "and L u k" }) void should_pass_if_actual_contains_value_according_to_custom_comparison_strategy(String value) { // GIVEN String actual = "Yoda and Luke"; // WHEN / THEN assertThat(actual).usingComparator(CaseInsensitiveStringComparator.INSTANCE) .containsIgnoringWhitespaces(value); } @Test void should_pass_if_actual_contains_all_given_strings_according_to_custom_comparison_strategy() { // GIVEN String actual = "Yoda and Luke"; String[] values = array("YO", "dA", "Aa", " n d l"); // WHEN / THEN assertThat(actual).usingComparator(CaseInsensitiveStringComparator.INSTANCE) .containsIgnoringWhitespaces(values); } @Test void should_fail_if_actual_does_not_contain_value_according_to_custom_comparison_strategy() { // GIVEN String actual = "Yoda"; String value = "Luke"; Comparator<String> comparator = CASE_INSENSITIVE_ORDER; // WHEN var assertionError = expectAssertionError(() -> assertThat(actual).usingComparator(comparator) .containsIgnoringWhitespaces(value)); // THEN then(assertionError).hasMessage(shouldContainIgnoringWhitespaces("Yoda", "Luke", new ComparatorBasedComparisonStrategy(comparator)).create()); } @Test void should_fail_if_actual_does_not_contain_all_given_strings_according_to_custom_comparison_strategy() { // GIVEN String actual = "Yoda"; String[] values = array("Yo", "da", "Han"); Comparator<String> comparator = CASE_INSENSITIVE_ORDER; // WHEN var assertionError = expectAssertionError(() -> assertThat(actual).usingComparator(comparator) .containsIgnoringWhitespaces(values)); // THEN then(assertionError).hasMessage(shouldContainIgnoringWhitespaces(actual, values, newLinkedHashSet("Han"), new ComparatorBasedComparisonStrategy(comparator)).create()); } }
CharSequenceAssert_containsIgnoringWhitespaces_Test
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/memory/MemoryManagerConcurrentModReleaseTest.java
{ "start": 1116, "end": 2676 }
class ____ { @Test void testConcurrentModificationOnce() throws MemoryAllocationException { final int numSegments = 10000; final int segmentSize = 4096; MemoryManager memMan = MemoryManagerBuilder.newBuilder() .setMemorySize(numSegments * segmentSize) .setPageSize(segmentSize) .build(); ArrayList<MemorySegment> segs = new ListWithConcModExceptionOnFirstAccess<>(); memMan.allocatePages(this, segs, numSegments); memMan.release(segs); } @Test void testConcurrentModificationWhileReleasing() throws Exception { final int numSegments = 10000; final int segmentSize = 4096; MemoryManager memMan = MemoryManagerBuilder.newBuilder() .setMemorySize(numSegments * segmentSize) .setPageSize(segmentSize) .build(); ArrayList<MemorySegment> segs = new ArrayList<>(numSegments); memMan.allocatePages(this, segs, numSegments); // start a thread that performs concurrent modifications Modifier mod = new Modifier(segs); Thread modRunner = new Thread(mod); modRunner.start(); // give the thread some time to start working Thread.sleep(500); try { memMan.release(segs); } finally { mod.cancel(); } modRunner.join(); } private static
MemoryManagerConcurrentModReleaseTest
java
greenrobot__greendao
DaoCore/src/main/java/org/greenrobot/greendao/DaoLog.java
{ "start": 779, "end": 850 }
class ____ a static Log Tag. * * @author markus * */ public
providing
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/SwitchDefaultTest.java
{ "start": 1726, "end": 2160 }
class ____ { void f(int i) { switch (i) { case 1: return; case 0: default: return; } } } """) .doTest(); } @Test public void refactoring_case() { testHelper .addInputLines( "in/Test.java", """
Test
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/processor/enricher/PollEnrichExpressionTest.java
{ "start": 982, "end": 1944 }
class ____ extends ContextTestSupport { @Test public void testPollEnrichExpression() throws Exception { getMockEndpoint("mock:result").expectedBodiesReceived("Hello World", "Bye World", "Hi World"); template.sendBody("seda:foo", "Hello World"); template.sendBody("seda:bar", "Bye World"); template.sendBody("seda:foo", "Hi World"); template.sendBodyAndHeader("direct:start", null, "source", "seda:foo"); template.sendBodyAndHeader("direct:start", null, "source", "seda:bar"); template.sendBodyAndHeader("direct:start", null, "source", "seda:foo"); assertMockEndpointsSatisfied(); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { from("direct:start").pollEnrich().header("source").to("mock:result"); } }; } }
PollEnrichExpressionTest
java
apache__hadoop
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthJob.java
{ "start": 2086, "end": 3171 }
class ____ implements JobStory { @SuppressWarnings("StaticVariableName") private static Logger LOG = LoggerFactory.getLogger(SynthJob.class); private static final long MIN_MEMORY = 1024; private static final long MIN_VCORES = 1; private final Configuration conf; private final int id; @SuppressWarnings("ConstantName") private static final AtomicInteger sequence = new AtomicInteger(0); private final String name; private final String queueName; private final SynthTraceJobProducer.JobDefinition jobDef; private String type; // job timing private final long submitTime; private final long duration; private final long deadline; private Map<String, String> params; private long totalSlotTime = 0; // task information private List<SynthTask> tasks = new ArrayList<>(); private Map<String, List<SynthTask>> taskByType = new HashMap<>(); private Map<String, Integer> taskCounts = new HashMap<>(); private Map<String, Long> taskMemory = new HashMap<>(); private Map<String, Long> taskVcores = new HashMap<>(); /** * Nested
SynthJob
java
quarkusio__quarkus
integration-tests/reactive-messaging-context-propagation/src/main/java/io/quarkus/it/kafka/FlowerProducer.java
{ "start": 485, "end": 1213 }
class ____ { List<String> received = new CopyOnWriteArrayList<>(); @Channel("flowers-out") MutinyEmitter<String> emitter; @Inject RequestBean reqBean; @POST @Path("/produce") @Consumes(MediaType.TEXT_PLAIN) public void produce(String flower) { reqBean.setName(flower); Log.infof("bean: %s, id: %s", reqBean, reqBean.getId()); emitter.sendAndAwait(flower); } void addReceived(String flower) { received.add(flower); } public List<String> getReceived() { return received; } @GET @Path("/received") @Produces(MediaType.APPLICATION_JSON) public List<String> received() { return received; } }
FlowerProducer
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/internal/SessionLogging.java
{ "start": 1097, "end": 6299 }
interface ____ extends BasicLogger { String NAME = SubSystemLogging.BASE + ".session"; SessionLogging SESSION_LOGGER = Logger.getMessageLogger( MethodHandles.lookup(), SessionLogging.class, NAME ); @LogMessage(level = DEBUG) @Message("Session creation specified 'autoJoinTransactions', " + "which is invalid in conjunction with sharing JDBC connection between sessions; ignoring") void invalidAutoJoinTransactionsWithSharedConnection(); @LogMessage(level = DEBUG) @Message("Session creation specified a 'PhysicalConnectionHandlingMode', " + "which is invalid in conjunction with sharing JDBC connection between sessions; ignoring") void invalidPhysicalConnectionHandlingModeWithSharedConnection(); @LogMessage(level = TRACE) @Message("Opened Session [%s] at timestamp: %s") void openedSession(UUID sessionIdentifier, long timestamp); @LogMessage(level = TRACE) @Message("Already closed") void alreadyClosed(); @LogMessage(level = TRACE) @Message("Closing session [%s]") void closingSession(UUID sessionIdentifier); @LogMessage(level = WARN) @Message(id = 90010101, value = "Closing shared session with unprocessed transaction completion actions") void closingSharedSessionWithUnprocessedTxCompletions(); @LogMessage(level = TRACE) @Message("Forcing-closing session since factory is already closed") void forcingCloseBecauseFactoryClosed(); @LogMessage(level = TRACE) @Message("Skipping auto-flush since the session is closed") void skippingAutoFlushSessionClosed(); @LogMessage(level = TRACE) @Message("Automatically flushing session") void automaticallyFlushingSession(); @LogMessage(level = TRACE) @Message("Automatically flushing child session") void automaticallyFlushingChildSession(); @LogMessage(level = TRACE) @Message("Automatically closing session") void automaticallyClosingSession(); @LogMessage(level = TRACE) @Message("Automatically closing child session") void automaticallyClosingChildSession(); @LogMessage(level = TRACE) @Message("%s remove orphan before updates: [%s]") void removeOrphanBeforeUpdates(String timing, String entityInfo); @LogMessage(level = TRACE) @Message("Initializing proxy: %s") void initializingProxy(String entityInfo); @LogMessage(level = TRACE) @Message("Clearing effective entity graph for subsequent select") void clearingEffectiveEntityGraph(); @LogMessage(level = TRACE) @Message("Flushing to force deletion of re-saved object: %s") void flushingToForceDeletion(String entityInfo); @LogMessage(level = TRACE) @Message("Before transaction completion processing") void beforeTransactionCompletion(); @LogMessage(level = TRACE) @Message("After transaction completion processing (successful=%s, delayed=%s)") void afterTransactionCompletion(boolean successful, boolean delayed); @LogMessage(level = ERROR) @Message(id = 90010102, value = "JDBC exception executing SQL; transaction rolled back") void jdbcExceptionThrownWithTransactionRolledBack(@Cause Exception e); @LogMessage(level = DEBUG) @Message(id = 90010103, value = "Ignoring EntityNotFoundException for '%s.%s'") void ignoringEntityNotFound(String entityName, Object id); @LogMessage(level = WARN) @Message(id = 90010104, value = "Property '%s' is not serializable, value won't be set") void nonSerializableProperty(String propertyName); @LogMessage(level = WARN) @Message(id = 90010105, value = "Property having key null is illegal, value won't be set") void nullPropertyKey(); @LogMessage(level = TRACE) @Message("Serializing Session [%s]") void serializingSession(UUID sessionIdentifier); @LogMessage(level = TRACE) @Message("Deserializing Session [%s]") void deserializingSession(UUID sessionIdentifier); @LogMessage(level = ERROR) @Message(id = 90010106, value = "Exception in interceptor beforeTransactionCompletion()") void exceptionInBeforeTransactionCompletionInterceptor(@Cause Throwable e); @LogMessage(level = ERROR) @Message(id = 90010107, value = "Exception in interceptor afterTransactionCompletion()") void exceptionInAfterTransactionCompletionInterceptor(@Cause Throwable e); // StatelessSession-specific @LogMessage(level = TRACE) @Message("Refreshing transient %s") void refreshingTransient(String entityInfo); @LogMessage(level = TRACE) @Message("Initializing collection %s") void initializingCollection(String collectionInfo); @LogMessage(level = TRACE) @Message("Collection initialized from cache") void collectionInitializedFromCache(); @LogMessage(level = TRACE) @Message("Collection initialized") void collectionInitialized(); @LogMessage(level = TRACE) @Message("Entity proxy found in session cache") void entityProxyFoundInSessionCache(); @LogMessage(level = DEBUG) @Message("Ignoring NO_PROXY to honor laziness") void ignoringNoProxyToHonorLaziness(); @LogMessage(level = TRACE) @Message("Creating a HibernateProxy for to-one association with subclasses to honor laziness") void creatingHibernateProxyToHonorLaziness(); @LogMessage(level = TRACE) @Message("Collection fetched from cache") void collectionFetchedFromCache(); @LogMessage(level = TRACE) @Message("Collection fetched") void collectionFetched(); }
SessionLogging
java
micronaut-projects__micronaut-core
management/src/main/java/io/micronaut/management/endpoint/beans/BeansEndpoint.java
{ "start": 997, "end": 1580 }
class ____ { private final BeanDefinitionDataCollector<Object> beanDefinitionDataCollector; /** * @param beanDefinitionDataCollector The {@link BeanDefinitionDataCollector} */ public BeansEndpoint(BeanDefinitionDataCollector<Object> beanDefinitionDataCollector) { this.beanDefinitionDataCollector = beanDefinitionDataCollector; } /** * @return A {@link org.reactivestreams.Publisher} with the beans */ @Read @SingleResult public Object getBeans() { return beanDefinitionDataCollector.getData(); } }
BeansEndpoint
java
spring-projects__spring-framework
spring-web/src/main/java/org/springframework/http/client/OutputStreamPublisher.java
{ "start": 3870, "end": 10064 }
class ____<T> extends OutputStream implements Flow.Subscription { private static final Object READY = new Object(); private final Flow.Subscriber<? super T> actual; private final OutputStreamHandler outputStreamHandler; private final ByteMapper<T> byteMapper; private final int chunkSize; private final AtomicLong requested = new AtomicLong(); private final AtomicReference<@Nullable Object> parkedThread = new AtomicReference<>(); private volatile @Nullable Throwable error; private long produced; OutputStreamSubscription( Flow.Subscriber<? super T> actual, OutputStreamHandler outputStreamHandler, ByteMapper<T> byteMapper, int chunkSize) { this.actual = actual; this.outputStreamHandler = outputStreamHandler; this.byteMapper = byteMapper; this.chunkSize = chunkSize; } @Override public void write(int b) throws IOException { checkDemandAndAwaitIfNeeded(); T next = this.byteMapper.map(b); this.actual.onNext(next); this.produced++; } @Override public void write(byte[] b) throws IOException { write(b, 0, b.length); } @Override public void write(byte[] b, int off, int len) throws IOException { checkDemandAndAwaitIfNeeded(); T next = this.byteMapper.map(b, off, len); this.actual.onNext(next); this.produced++; } private void checkDemandAndAwaitIfNeeded() throws IOException { long r = this.requested.get(); if (isTerminated(r) || isCancelled(r)) { throw new IOException("Subscription has been terminated"); } long p = this.produced; if (p == r) { if (p > 0) { r = tryProduce(p); this.produced = 0; } while (true) { if (isTerminated(r) || isCancelled(r)) { throw new IOException("Subscription has been terminated"); } if (r != 0) { return; } await(); r = this.requested.get(); } } } private void invokeHandler() { // assume sync write within try-with-resource block // use BufferedOutputStream, so that written bytes are buffered // before publishing as byte buffer try (OutputStream outputStream = new BufferedOutputStream(this, this.chunkSize)) { this.outputStreamHandler.handle(outputStream); } catch (Exception ex) { long previousState = tryTerminate(); if (isCancelled(previousState)) { return; } if (isTerminated(previousState)) { // failure due to illegal requestN Throwable error = this.error; if (error != null) { this.actual.onError(error); return; } } this.actual.onError(ex); return; } long previousState = tryTerminate(); if (isCancelled(previousState)) { return; } if (isTerminated(previousState)) { // failure due to illegal requestN Throwable error = this.error; if (error != null) { this.actual.onError(error); return; } } this.actual.onComplete(); } @Override public void request(long n) { if (n <= 0) { this.error = new IllegalArgumentException("request should be a positive number"); long previousState = tryTerminate(); if (isTerminated(previousState) || isCancelled(previousState)) { return; } if (previousState > 0) { // error should eventually be observed and propagated return; } // resume parked thread, so it can observe error and propagate it resume(); return; } if (addCap(n) == 0) { // resume parked thread so it can continue the work resume(); } } @Override public void cancel() { long previousState = tryCancel(); if (isCancelled(previousState) || previousState > 0) { return; } // resume parked thread, so it can be unblocked and close all the resources resume(); } private void await() { Thread toUnpark = Thread.currentThread(); while (true) { Object current = this.parkedThread.get(); if (current == READY) { break; } if (current != null && current != toUnpark) { throw new IllegalStateException("Only one (Virtual)Thread can await!"); } if (this.parkedThread.compareAndSet(null, toUnpark)) { LockSupport.park(); // we don't just break here because park() can wake up spuriously // if we got a proper resume, get() == READY and the loop will quit above } } // clear the resume indicator so that the next await call will park without a resume() this.parkedThread.lazySet(null); } private void resume() { if (this.parkedThread.get() != READY) { Object old = this.parkedThread.getAndSet(READY); if (old != READY) { LockSupport.unpark((Thread)old); } } } private long tryCancel() { while (true) { long r = this.requested.get(); if (isCancelled(r)) { return r; } if (this.requested.compareAndSet(r, Long.MIN_VALUE)) { return r; } } } private long tryTerminate() { while (true) { long r = this.requested.get(); if (isCancelled(r) || isTerminated(r)) { return r; } if (this.requested.compareAndSet(r, Long.MIN_VALUE | Long.MAX_VALUE)) { return r; } } } private long tryProduce(long n) { while (true) { long current = this.requested.get(); if (isTerminated(current) || isCancelled(current)) { return current; } if (current == Long.MAX_VALUE) { return Long.MAX_VALUE; } long update = current - n; if (update < 0L) { update = 0L; } if (this.requested.compareAndSet(current, update)) { return update; } } } private long addCap(long n) { while (true) { long r = this.requested.get(); if (isTerminated(r) || isCancelled(r) || r == Long.MAX_VALUE) { return r; } long u = addCap(r, n); if (this.requested.compareAndSet(r, u)) { return r; } } } private static boolean isTerminated(long state) { return state == (Long.MIN_VALUE | Long.MAX_VALUE); } private static boolean isCancelled(long state) { return state == Long.MIN_VALUE; } private static long addCap(long a, long b) { long res = a + b; if (res < 0L) { return Long.MAX_VALUE; } return res; } } }
OutputStreamSubscription
java
hibernate__hibernate-orm
tooling/hibernate-gradle-plugin/src/main/java/org/hibernate/orm/tooling/gradle/misc/TransformationNaming.java
{ "start": 651, "end": 1646 }
class ____ { public TransformationNaming() { } /** * A prefix to apply to the file name. * <p> * E.g. given an {@code `hbm.xml`} file named {@code `my-mappings.hbm.xml`} * and a configured prefix of {@code `transformed-`}, the copy file's * name would be {@code `transformed-my-mappings.hbm.xml`} * * @see #getExtension() */ @Input @Optional abstract public Property<String> getPrefix(); /** * A suffix to apply to the file name. * <p> * E.g. given an {@code `hbm.xml`} file named {@code `my-mappings.hbm.xml`} * and a configured suffix of {@code `-transformed`}, the copy file's * name would be {@code `my-mappings-transformed.hbm.xml`} * * @see #getExtension() */ @Input @Optional abstract public Property<String> getSuffix(); @Input @Optional abstract public Property<String> getExtension(); public boolean areNoneDefined() { return !getPrefix().isPresent() && !getSuffix().isPresent() && !getExtension().isPresent(); } }
TransformationNaming
java
spring-projects__spring-framework
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/MethodValidationTests.java
{ "start": 18739, "end": 21016 }
class ____ implements jakarta.validation.Validator, Validator { private final SpringValidatorAdapter delegate; private int maxInvocationsExpected = 1; private int validationCount; private int methodValidationCount; /** * Constructor with maxCount=1. */ private InvocationCountingValidator(SpringValidatorAdapter delegate) { this.delegate = delegate; } public void setMaxInvocationsExpected(int maxInvocationsExpected) { this.maxInvocationsExpected = maxInvocationsExpected; } /** * Total number of times Bean Validation was invoked. */ public int getValidationCount() { return this.validationCount; } /** * Number of times method level Bean Validation was invoked. */ public int getMethodValidationCount() { return this.methodValidationCount; } @Override public <T> Set<ConstraintViolation<T>> validate(T object, Class<?>... groups) { throw new UnsupportedOperationException(); } @Override public <T> Set<ConstraintViolation<T>> validateProperty(T object, String propertyName, Class<?>... groups) { throw new UnsupportedOperationException(); } @Override public <T> Set<ConstraintViolation<T>> validateValue(Class<T> beanType, String propertyName, Object value, Class<?>... groups) { throw new UnsupportedOperationException(); } @Override public BeanDescriptor getConstraintsForClass(Class<?> clazz) { throw new UnsupportedOperationException(); } @Override public <T> T unwrap(Class<T> type) { throw new UnsupportedOperationException(); } @Override public ExecutableValidator forExecutables() { this.methodValidationCount++; assertCountAndIncrement(); return this.delegate.forExecutables(); } @Override public boolean supports(Class<?> clazz) { return true; } @Override public void validate(Object target, Errors errors) { assertCountAndIncrement(); this.delegate.validate(target, errors); } private void assertCountAndIncrement() { assertThat(this.validationCount++).as("Too many calls to Bean Validation").isLessThan(this.maxInvocationsExpected); } } @Constraint(validatedBy = TestConstraintValidator.class) @Target({ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) public @
InvocationCountingValidator
java
ReactiveX__RxJava
src/test/java/io/reactivex/rxjava3/internal/operators/single/SingleFlatMapBiSelectorTest.java
{ "start": 1027, "end": 6654 }
class ____ extends RxJavaTest { BiFunction<Integer, Integer, String> stringCombine() { return new BiFunction<Integer, Integer, String>() { @Override public String apply(Integer a, Integer b) throws Exception { return a + ":" + b; } }; } @Test public void normal() { Single.just(1) .flatMap(new Function<Integer, SingleSource<Integer>>() { @Override public SingleSource<Integer> apply(Integer v) throws Exception { return Single.just(2); } }, stringCombine()) .test() .assertResult("1:2"); } @Test public void errorWithJust() { final int[] call = { 0 }; Single.<Integer>error(new TestException()) .flatMap(new Function<Integer, SingleSource<Integer>>() { @Override public SingleSource<Integer> apply(Integer v) throws Exception { call[0]++; return Single.just(1); } }, stringCombine()) .test() .assertFailure(TestException.class); assertEquals(0, call[0]); } @Test public void justWithError() { final int[] call = { 0 }; Single.just(1) .flatMap(new Function<Integer, SingleSource<Integer>>() { @Override public SingleSource<Integer> apply(Integer v) throws Exception { call[0]++; return Single.<Integer>error(new TestException()); } }, stringCombine()) .test() .assertFailure(TestException.class); assertEquals(1, call[0]); } @Test public void dispose() { TestHelper.checkDisposed(SingleSubject.create() .flatMap(new Function<Object, SingleSource<Integer>>() { @Override public SingleSource<Integer> apply(Object v) throws Exception { return Single.just(1); } }, new BiFunction<Object, Integer, Object>() { @Override public Object apply(Object a, Integer b) throws Exception { return b; } })); } @Test public void doubleOnSubscribe() { TestHelper.checkDoubleOnSubscribeSingle(new Function<Single<Object>, SingleSource<Object>>() { @Override public SingleSource<Object> apply(Single<Object> v) throws Exception { return v.flatMap(new Function<Object, SingleSource<Integer>>() { @Override public SingleSource<Integer> apply(Object v) throws Exception { return Single.just(1); } }, new BiFunction<Object, Integer, Object>() { @Override public Object apply(Object a, Integer b) throws Exception { return b; } }); } }); } @Test public void mapperThrows() { Single.just(1) .flatMap(new Function<Integer, SingleSource<Integer>>() { @Override public SingleSource<Integer> apply(Integer v) throws Exception { throw new TestException(); } }, stringCombine()) .test() .assertFailure(TestException.class); } @Test public void mapperReturnsNull() { Single.just(1) .flatMap(new Function<Integer, SingleSource<Integer>>() { @Override public SingleSource<Integer> apply(Integer v) throws Exception { return null; } }, stringCombine()) .test() .assertFailure(NullPointerException.class); } @Test public void resultSelectorThrows() { Single.just(1) .flatMap(new Function<Integer, SingleSource<Integer>>() { @Override public SingleSource<Integer> apply(Integer v) throws Exception { return Single.just(2); } }, new BiFunction<Integer, Integer, Object>() { @Override public Object apply(Integer a, Integer b) throws Exception { throw new TestException(); } }) .test() .assertFailure(TestException.class); } @Test public void resultSelectorReturnsNull() { Single.just(1) .flatMap(new Function<Integer, SingleSource<Integer>>() { @Override public SingleSource<Integer> apply(Integer v) throws Exception { return Single.just(2); } }, new BiFunction<Integer, Integer, Object>() { @Override public Object apply(Integer a, Integer b) throws Exception { return null; } }) .test() .assertFailure(NullPointerException.class); } @Test public void mapperCancels() { final TestObserver<Integer> to = new TestObserver<>(); Single.just(1) .flatMap(new Function<Integer, SingleSource<Integer>>() { @Override public SingleSource<Integer> apply(Integer v) throws Exception { to.dispose(); return Single.just(2); } }, new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer a, Integer b) throws Exception { throw new IllegalStateException(); } }) .subscribeWith(to) .assertEmpty(); } }
SingleFlatMapBiSelectorTest
java
apache__camel
components/camel-sql/src/main/java/org/apache/camel/component/sql/stored/BatchCallableStatementCreatorFactory.java
{ "start": 1381, "end": 4150 }
class ____ { final CallableStatementCreatorFactory callableStatementCreatorFactory; final List<SqlParameter> sqlParameterList; final Template template; public BatchCallableStatementCreatorFactory(Template template) { this.template = template; this.sqlParameterList = createParams(); this.callableStatementCreatorFactory = new CallableStatementCreatorFactory(formatSql(), createParams()); } public void addParameter(CallableStatement callableStatement, Map<String, ?> batchRow) throws SQLException { int i = 1; for (SqlParameter parameter : getSqlParameterList()) { StatementCreatorUtils.setParameterValue(callableStatement, i, parameter, batchRow.get(parameter.getName())); i++; } } private String formatSql() { return "{call " + this.template.getProcedureName() + "(" + repeatParameter(this.template.getParameterList() .size()) + ")}"; } private String repeatParameter(int size) { StringBuilder ret = new StringBuilder(); for (int i = 0; i < size; i++) { ret.append('?'); if (i + 1 < size) { ret.append(','); } } return ret.toString(); } private List<SqlParameter> createParams() { List<SqlParameter> params = new ArrayList<>(); for (Object parameter : template.getParameterList()) { if (parameter instanceof InParameter inputParameter) { SqlParameter sqlParameter; if (inputParameter.getScale() != null) { sqlParameter = new SqlParameter( inputParameter.getName(), inputParameter.getSqlType(), inputParameter.getScale()); } else if (inputParameter.getTypeName() != null) { sqlParameter = new SqlParameter( inputParameter.getName(), inputParameter.getSqlType(), inputParameter.getTypeName()); } else { sqlParameter = new SqlParameter(inputParameter.getName(), inputParameter.getSqlType()); } params.add(sqlParameter); } else { throw new UnsupportedOperationException("Only IN parameters supported by batch!"); } } return params; } public CallableStatementCreator newCallableStatementCreator(Map<String, ?> params) { return this.callableStatementCreatorFactory.newCallableStatementCreator(params); } public List<SqlParameter> getSqlParameterList() { return sqlParameterList; } public Template getTemplate() { return template; } }
BatchCallableStatementCreatorFactory
java
micronaut-projects__micronaut-core
core/src/main/java/io/micronaut/core/util/StreamUtils.java
{ "start": 4644, "end": 6813 }
class ____ { A acc; T obj; boolean hasAny; /** * Constructor. * @param acc accumulator */ Container(A acc) { this.acc = acc; } } Supplier<Container> supplier = () -> new Container(downstreamSupplier.get()); BiConsumer<Container, T> accumulator = (acc, t) -> { if (!acc.hasAny) { downstreamAccumulator.accept(acc.acc, t); acc.obj = t; acc.hasAny = true; } else { int cmp = comparator.compare(t, acc.obj); if (cmp < 0) { acc.acc = downstreamSupplier.get(); acc.obj = t; } if (cmp <= 0) { downstreamAccumulator.accept(acc.acc, t); } } }; BinaryOperator<Container> combiner = (acc1, acc2) -> { if (!acc2.hasAny) { return acc1; } if (!acc1.hasAny) { return acc2; } int cmp = comparator.compare(acc1.obj, acc2.obj); if (cmp < 0) { return acc1; } if (cmp > 0) { return acc2; } acc1.acc = downstreamCombiner.apply(acc1.acc, acc2.acc); return acc1; }; Function<Container, D> finisher = acc -> downstream.finisher().apply(acc.acc); return Collector.of(supplier, accumulator, combiner, finisher); } /** * @param collectionFactory The collection factory * @param <T> The type of the input elements * @param <A> The accumulation type * @return An immutable collection */ public static <T, A extends Collection<T>> Collector<T, A, Collection<T>> toImmutableCollection(Supplier<A> collectionFactory) { return Collector.of(collectionFactory, Collection::add, (left, right) -> { left.addAll(right); return left; }, Collections::unmodifiableCollection); } }
Container
java
spring-projects__spring-security
saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/core/Saml2X509Credential.java
{ "start": 8189, "end": 8286 }
enum ____ { VERIFICATION, ENCRYPTION, SIGNING, DECRYPTION, } }
Saml2X509CredentialType
java
mapstruct__mapstruct
core/src/main/java/org/mapstruct/Mapper.java
{ "start": 1286, "end": 1608 }
class ____ { * public String mapMark(String mark) { * return mark.toUpperCase(); * } * } * // we have CarMapper * &#64;Mapper( * componentModel = MappingConstants.ComponentModel.SPRING, * uses = MarkMapper.class, * injectionStrategy = InjectionStrategy.CONSTRUCTOR) * public
MarkMapper
java
apache__camel
dsl/camel-componentdsl/src/test/java/org/apache/camel/builder/component/ComponentsBuilderFactoryTest.java
{ "start": 1399, "end": 3754 }
class ____ extends CamelTestSupport { @Test public void testIfCreateComponentCorrectlyWithoutContextProvided() { final TimerComponent timerComponent = ComponentsBuilderFactory.timer().build(); assertNotNull(timerComponent); } @Test public void testNegativeDelay() throws Exception { final MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(10); context.getRouteController().startAllRoutes(); MockEndpoint.assertIsSatisfied(context); } @Test public void testIfResolvePlaceholdersCorrectly() { context.getPropertiesComponent().setLocation("classpath:application.properties"); final KafkaComponent kafkaComponent = ComponentsBuilderFactory.kafka() .brokers("{{kafka.host}}:{{kafka.port}}") .build(context); assertNotNull(kafkaComponent.getConfiguration()); assertEquals("localhost:9092", kafkaComponent.getConfiguration().getBrokers()); } @Test public void testIfSetsSettingsCorrectly() { final KafkaConfiguration kafkaConfiguration = new KafkaConfiguration(); kafkaConfiguration.setGroupId("testGroup"); kafkaConfiguration.setConsumerRequestTimeoutMs(5000); kafkaConfiguration.setBrokers("localhost:9092"); final KafkaComponent kafkaComponent = ComponentsBuilderFactory.kafka() .configuration(kafkaConfiguration) .allowManualCommit(true) .build(); assertNotNull(kafkaComponent); assertEquals("localhost:9092", kafkaComponent.getConfiguration().getBrokers()); assertTrue(kafkaComponent.getConfiguration().isAllowManualCommit()); assertEquals("testGroup", kafkaComponent.getConfiguration().getGroupId()); assertEquals(5000, kafkaComponent.getConfiguration().getConsumerRequestTimeoutMs().intValue()); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() throws Exception { ComponentsBuilderFactory.timer().register(context, "awesomeTimer"); from("awesomeTimer:foo?delay=-1&repeatCount=10") .to("mock:result"); } }; } }
ComponentsBuilderFactoryTest
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/aot/nativex/ReflectionHintsAttributes.java
{ "start": 2127, "end": 6260 }
class ____ { private static final Comparator<JdkProxyHint> JDK_PROXY_HINT_COMPARATOR = (left, right) -> { String leftSignature = left.getProxiedInterfaces().stream() .map(TypeReference::getCanonicalName).collect(Collectors.joining(",")); String rightSignature = right.getProxiedInterfaces().stream() .map(TypeReference::getCanonicalName).collect(Collectors.joining(",")); return leftSignature.compareTo(rightSignature); }; public List<Map<String, Object>> reflection(RuntimeHints hints) { List<Map<String, Object>> reflectionHints = new ArrayList<>(); reflectionHints.addAll(hints.reflection().typeHints() .sorted(Comparator.comparing(TypeHint::getType)) .map(this::toAttributes).toList()); reflectionHints.addAll(hints.proxies().jdkProxyHints() .sorted(JDK_PROXY_HINT_COMPARATOR) .map(this::toAttributes).toList()); return reflectionHints; } public List<Map<String, Object>> jni(RuntimeHints hints) { List<Map<String, Object>> jniHints = new ArrayList<>(); jniHints.addAll(hints.jni().typeHints() .sorted(Comparator.comparing(TypeHint::getType)) .map(this::toAttributes).toList()); return jniHints; } private Map<String, Object> toAttributes(TypeHint hint) { Map<String, Object> attributes = new LinkedHashMap<>(); attributes.put("type", hint.getType()); handleCondition(attributes, hint); handleCategories(attributes, hint.getMemberCategories()); handleFields(attributes, hint.fields()); handleExecutables(attributes, Stream.concat( hint.constructors(), hint.methods()).sorted().toList()); return attributes; } private void handleCondition(Map<String, Object> attributes, ConditionalHint hint) { if (hint.getReachableType() != null) { attributes.put("condition", Map.of("typeReached", hint.getReachableType())); } } private void handleFields(Map<String, Object> attributes, Stream<FieldHint> fields) { addIfNotEmpty(attributes, "fields", fields .sorted(Comparator.comparing(FieldHint::getName, String::compareToIgnoreCase)) .map(fieldHint -> Map.of("name", fieldHint.getName())) .toList()); } private void handleExecutables(Map<String, Object> attributes, List<ExecutableHint> hints) { addIfNotEmpty(attributes, "methods", hints.stream() .filter(h -> h.getMode().equals(ExecutableMode.INVOKE)) .map(this::toAttributes).toList()); } private Map<String, Object> toAttributes(ExecutableHint hint) { Map<String, Object> attributes = new LinkedHashMap<>(); attributes.put("name", hint.getName()); attributes.put("parameterTypes", hint.getParameterTypes()); return attributes; } @SuppressWarnings("removal") private void handleCategories(Map<String, Object> attributes, Set<MemberCategory> categories) { categories.stream().sorted().forEach(category -> { switch (category) { case ACCESS_PUBLIC_FIELDS, PUBLIC_FIELDS -> attributes.put("allPublicFields", true); case ACCESS_DECLARED_FIELDS, DECLARED_FIELDS -> attributes.put("allDeclaredFields", true); case INVOKE_PUBLIC_CONSTRUCTORS -> attributes.put("allPublicConstructors", true); case INVOKE_DECLARED_CONSTRUCTORS -> attributes.put("allDeclaredConstructors", true); case INVOKE_PUBLIC_METHODS -> attributes.put("allPublicMethods", true); case INVOKE_DECLARED_METHODS -> attributes.put("allDeclaredMethods", true); case PUBLIC_CLASSES -> attributes.put("allPublicClasses", true); case DECLARED_CLASSES -> attributes.put("allDeclaredClasses", true); case UNSAFE_ALLOCATED -> attributes.put("unsafeAllocated", true); } } ); } private void addIfNotEmpty(Map<String, Object> attributes, String name, @Nullable Object value) { if (value != null && (value instanceof Collection<?> collection && !collection.isEmpty())) { attributes.put(name, value); } } private Map<String, Object> toAttributes(JdkProxyHint hint) { Map<String, Object> attributes = new LinkedHashMap<>(); handleCondition(attributes, hint); attributes.put("type", Map.of("proxy", hint.getProxiedInterfaces())); return attributes; } }
ReflectionHintsAttributes
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/ext/javatime/ser/YearMonthSerializationTest.java
{ "start": 1216, "end": 1296 }
class ____ extends DateTimeTestBase { private static
YearMonthSerializationTest
java
quarkusio__quarkus
integration-tests/virtual-threads/resteasy-reactive-virtual-threads/src/test/java/io/quarkus/virtual/rr/RunOnVirtualThreadTest.java
{ "start": 391, "end": 599 }
class ____ { @Test void testGet() { // test all variations: // - MyResource ("/"): simple JAX-RS bean // - ResourceImpl ("/itf"): bean implementing a JAX-RS
RunOnVirtualThreadTest
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/StaticAssignmentOfThrowableTest.java
{ "start": 1210, "end": 1709 }
class ____ { static Throwable foo; public Test(int foo) {} public void foo() { // BUG: Diagnostic contains: [StaticAssignmentOfThrowable] foo = new NullPointerException("assign in method"); } } """) .doTest(); } @Test public void staticWithThrowableDuringInitialization_error() { helper .addSourceLines( "Test.java", """
Test
java
mockito__mockito
mockito-core/src/main/java/org/mockito/DoNotMock.java
{ "start": 1199, "end": 1297 }
class ____ {} * </code></pre> */ @Target({TYPE}) @Retention(RUNTIME) @Documented public @
DoNotMockMe
java
apache__camel
components/camel-jpa/src/main/java/org/apache/camel/component/jpa/JpaPollingConsumer.java
{ "start": 1674, "end": 9721 }
class ____ extends PollingConsumerSupport { private static final Logger LOG = LoggerFactory.getLogger(JpaPollingConsumer.class); private volatile ExecutorService executorService; private final EntityManagerFactory entityManagerFactory; private final TransactionStrategy transactionStrategy; private String query; private String namedQuery; private String nativeQuery; private LockModeType lockModeType = LockModeType.PESSIMISTIC_WRITE; private Class<?> resultClass; private QueryFactory queryFactory; private Map<String, Object> parameters; public JpaPollingConsumer(JpaEndpoint endpoint) { super(endpoint); this.entityManagerFactory = endpoint.getEntityManagerFactory(); this.transactionStrategy = endpoint.getTransactionStrategy(); } @Override public JpaEndpoint getEndpoint() { return (JpaEndpoint) super.getEndpoint(); } public String getQuery() { return query; } public void setQuery(String query) { this.query = query; } public String getNamedQuery() { return namedQuery; } public void setNamedQuery(String namedQuery) { this.namedQuery = namedQuery; } public String getNativeQuery() { return nativeQuery; } public void setNativeQuery(String nativeQuery) { this.nativeQuery = nativeQuery; } public LockModeType getLockModeType() { return lockModeType; } public void setLockModeType(LockModeType lockModeType) { this.lockModeType = lockModeType; } public Class<?> getResultClass() { return resultClass; } public void setResultClass(Class<?> resultClass) { this.resultClass = resultClass; } public QueryFactory getQueryFactory() { return queryFactory; } public void setQueryFactory(QueryFactory queryFactory) { this.queryFactory = queryFactory; } public Map<String, Object> getParameters() { return parameters; } public void setParameters(Map<String, Object> parameters) { this.parameters = parameters; } @Override public Exchange receive() { // resolve the entity manager before evaluating the expression final EntityManager entityManager = getTargetEntityManager(null, entityManagerFactory, getEndpoint().isUsePassedInEntityManager(), getEndpoint().isSharedEntityManager(), true); Exchange exchange = getEndpoint().createExchange(); exchange.getIn().setHeader(JpaConstants.ENTITY_MANAGER, entityManager); transactionStrategy.executeInTransaction(new Runnable() { @Override public void run() { if (getEndpoint().isJoinTransaction()) { entityManager.joinTransaction(); } Query innerQuery = getQueryFactory().createQuery(entityManager); configureParameters(innerQuery); if (getEndpoint().isConsumeLockEntity()) { innerQuery.setLockMode(getLockModeType()); } LOG.trace("Created query {}", innerQuery); Object answer; try { List<?> results = innerQuery.getResultList(); if (results != null && results.size() == 1) { // we only have 1 entity so return that answer = results.get(0); } else { // we have more data so return a list answer = results; } // commit LOG.debug("Flushing EntityManager"); entityManager.flush(); // must clear after flush entityManager.clear(); } catch (PersistenceException e) { LOG.info("Disposing EntityManager {} on {} due to coming transaction rollback", entityManager, this); entityManager.close(); throw e; } exchange.getIn().setBody(answer); } }); return exchange; } @Override public Exchange receiveNoWait() { // call receive as-is return receive(); } @Override public Exchange receive(long timeout) { // need to use a thread pool to perform the task so we can support timeout if (executorService == null) { executorService = getEndpoint().getComponent().getOrCreatePollingConsumerExecutorService(); } // the task is the receive method Future<Exchange> future = executorService.submit((Callable<Exchange>) this::receive); try { return future.get(timeout, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw RuntimeCamelException.wrapRuntimeCamelException(e); } catch (ExecutionException e) { throw RuntimeCamelException.wrapRuntimeCamelException(e); } catch (TimeoutException e) { // ignore as we hit timeout then return null } return null; } @Override protected void doInit() throws Exception { super.doInit(); if (queryFactory == null) { queryFactory = createQueryFactory(); if (queryFactory == null) { throw new IllegalArgumentException( "No queryType property configured on this consumer, nor an entityType configured on the endpoint so cannot consume"); } } } @Override protected void doStop() throws Exception { // noop } protected void configureParameters(Query query) { int maxResults = getEndpoint().getMaximumResults(); if (maxResults > 0) { query.setMaxResults(maxResults); } // setup the parameter if (parameters != null) { for (Map.Entry<String, Object> entry : parameters.entrySet()) { query.setParameter(entry.getKey(), entry.getValue()); } } } protected Exchange createExchange(Object result, EntityManager entityManager) { Exchange exchange = getEndpoint().createExchange(); exchange.getIn().setBody(result); exchange.getIn().setHeader(JpaConstants.ENTITY_MANAGER, entityManager); return exchange; } protected QueryFactory createQueryFactory() { if (query != null) { return QueryBuilder.query(query); } else if (namedQuery != null) { return QueryBuilder.namedQuery(namedQuery); } else if (nativeQuery != null) { if (resultClass != null) { return QueryBuilder.nativeQuery(nativeQuery, resultClass); } else { return QueryBuilder.nativeQuery(nativeQuery); } } else { Class<?> entityType = getEndpoint().getEntityType(); if (entityType == null) { return null; } else { // Check if we have a property name on the @Entity annotation String name = getEntityName(entityType); if (name != null) { return QueryBuilder.query("select x from " + name + " x"); } else { // Remove package name of the entity to be conform with JPA 1.0 spec return QueryBuilder.query("select x from " + entityType.getSimpleName() + " x"); } } } } protected String getEntityName(Class<?> clazz) { Entity entity = clazz.getAnnotation(Entity.class); // Check if the property name has been defined for Entity annotation if (entity != null && !entity.name().isEmpty()) { return entity.name(); } else { return null; } } }
JpaPollingConsumer
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/component/properties/PropertiesComponentOnlyUseDefaultValuesTest.java
{ "start": 1087, "end": 2543 }
class ____ extends ContextTestSupport { @Override public boolean isUseRouteBuilder() { return false; } @Test public void testOnlyDefaults() throws Exception { context.addRoutes(new RouteBuilder() { @Override public void configure() { from("direct:start").to("{{foo:mock:foo}}").to("{{bar:mock:bar}}"); } }); context.start(); getMockEndpoint("mock:foo").expectedMessageCount(1); getMockEndpoint("mock:bar").expectedMessageCount(1); template.sendBody("direct:start", "Hello World"); assertMockEndpointsSatisfied(); } @Test public void testOneMissing() throws Exception { context.addRoutes(new RouteBuilder() { @Override public void configure() { from("direct:start").to("{{foo:mock:foo}}").to("{{bar}}"); } }); assertThrows(Exception.class, () -> context.start(), "Should have thrown exception"); } @Test public void testAllMissing() throws Exception { context.addRoutes(new RouteBuilder() { @Override public void configure() { from("direct:start").to("{{foo:mock:foo}}").to("{{bar}}"); } }); Assertions.assertThrows(Exception.class, () -> context.start(), "Should have thrown exception"); } }
PropertiesComponentOnlyUseDefaultValuesTest
java
apache__camel
components/camel-ai/camel-langchain4j-embeddingstore/src/main/java/org/apache/camel/component/langchain4j/embeddingstore/LangChain4jEmbeddingStoreEndpoint.java
{ "start": 1757, "end": 3096 }
class ____ extends DefaultEndpoint { @Metadata(required = true) @UriPath(description = "The id of the embedding store") private final String embeddingStoreId; @UriParam private LangChain4jEmbeddingStoreConfiguration configuration; public LangChain4jEmbeddingStoreEndpoint(String endpointUri, Component component, String embeddingStoreId, LangChain4jEmbeddingStoreConfiguration configuration) { super(endpointUri, component); this.embeddingStoreId = embeddingStoreId; this.configuration = configuration; } public LangChain4jEmbeddingStoreConfiguration getConfiguration() { return configuration; } public String getEmbeddingId() { return this.embeddingStoreId; } @Override public Producer createProducer() throws Exception { return new LangChain4jEmbeddingStoreProducer(this); } @Override public Consumer createConsumer(Processor processor) throws Exception { throw new UnsupportedOperationException("Consumer is not implemented for this component"); } @Override public void doStart() throws Exception { super.doStart(); } @Override public void doStop() throws Exception { super.doStop(); } }
LangChain4jEmbeddingStoreEndpoint
java
apache__rocketmq
remoting/src/main/java/org/apache/rocketmq/remoting/protocol/body/KVTable.java
{ "start": 956, "end": 1234 }
class ____ extends RemotingSerializable { private HashMap<String, String> table = new HashMap<>(); public HashMap<String, String> getTable() { return table; } public void setTable(HashMap<String, String> table) { this.table = table; } }
KVTable
java
quarkusio__quarkus
integration-tests/grpc-interceptors/src/main/java/io/quarkus/grpc/examples/interceptors/ServerInterceptors.java
{ "start": 670, "end": 1038 }
class ____ implements ServerInterceptor { @Override public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(ServerCall<ReqT, RespT> call, Metadata metadata, ServerCallHandler<ReqT, RespT> next) { HelloWorldEndpoint.invoked.add(getClass().getName()); return next.startCall(call, metadata); } } }
Base
java
quarkusio__quarkus
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/codecs/Codec.java
{ "start": 149, "end": 371 }
interface ____ serialize and deserialize data to and from Redis. * A set of default codecs are provided for Strings, Integers, Doubles and Byte arrays. * For custom types, either there is a specific implementation of this
to
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/type/EnumUpdateTest.java
{ "start": 1625, "end": 1711 }
enum ____ { BLACK, BLONDE, BROWN; } @Entity(name = "Person") public static
HairColor
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/temporal/MySQLTimestampPropertyTest.java
{ "start": 5781, "end": 6200 }
class ____ { @GeneratedValue @Id private long id; @Temporal(value = TemporalType.TIMESTAMP) private Date ts; @Temporal(value = TemporalType.TIMESTAMP) @Generated @ColumnDefault(value = "CURRENT_TIMESTAMP(6)") private Date tsColumnDefault; @Temporal(value = TemporalType.TIMESTAMP) @Generated @Column(columnDefinition = "datetime(6) default NOW(6)") private Date tsColumnDefinition; } }
Entity
java
apache__dubbo
dubbo-remoting/dubbo-remoting-http12/src/main/java/org/apache/dubbo/remoting/http12/h1/Http1ServerTransportListenerFactory.java
{ "start": 1137, "end": 1299 }
interface ____ { Http1ServerTransportListener newInstance(HttpChannel httpChannel, URL url, FrameworkModel frameworkModel); }
Http1ServerTransportListenerFactory
java
apache__flink
flink-formats/flink-avro/src/test/java/org/apache/flink/formats/avro/AvroOutputFormatITCase.java
{ "start": 7036, "end": 8478 }
class ____ extends RichMapFunction<Tuple3<String, Integer, String>, User> { @Override public User map(Tuple3<String, Integer, String> value) { User user = new User(); user.setName(value.f0); user.setFavoriteNumber(value.f1); user.setFavoriteColor(value.f2); user.setTypeBoolTest(true); user.setTypeArrayString(Collections.emptyList()); user.setTypeArrayBoolean(Collections.emptyList()); user.setTypeEnum(Colors.BLUE); user.setTypeMap(Collections.emptyMap()); user.setTypeBytes(ByteBuffer.allocate(10)); user.setTypeDate(LocalDate.parse("2014-03-01")); user.setTypeTimeMillis(LocalTime.parse("12:12:12")); user.setTypeTimeMicros(LocalTime.ofSecondOfDay(0).plus(123456L, ChronoUnit.MICROS)); user.setTypeTimestampMillis(Instant.parse("2014-03-01T12:12:12.321Z")); user.setTypeTimestampMicros(Instant.ofEpochSecond(0).plus(123456L, ChronoUnit.MICROS)); // 20.00 user.setTypeDecimalBytes( ByteBuffer.wrap(BigDecimal.valueOf(2000, 2).unscaledValue().toByteArray())); // 20.00 user.setTypeDecimalFixed( new Fixed2(BigDecimal.valueOf(2000, 2).unscaledValue().toByteArray())); return user; } } private static final
ConvertToUser
java
apache__flink
flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/source/datagen/SequenceGenerator.java
{ "start": 1679, "end": 7058 }
class ____<T> implements DataGenerator<T> { private final long start; private final long end; private transient ListState<Long> checkpointedState; protected transient Deque<Long> valuesToEmit; /** * Creates a DataGenerator that emits all numbers from the given interval exactly once. * * @param start Start of the range of numbers to emit. * @param end End of the range of numbers to emit. */ public SequenceGenerator(long start, long end) { this.start = start; this.end = end; } @Override public void open( String name, FunctionInitializationContext context, RuntimeContext runtimeContext) throws Exception { Preconditions.checkState( this.checkpointedState == null, "The " + getClass().getSimpleName() + " has already been initialized."); this.checkpointedState = context.getOperatorStateStore() .getListState( new ListStateDescriptor<>( name + "-sequence-state", LongSerializer.INSTANCE)); this.valuesToEmit = new ArrayDeque<>(); if (context.isRestored()) { // upon restoring for (Long v : this.checkpointedState.get()) { this.valuesToEmit.add(v); } } else { // the first time the job is executed final int stepSize = runtimeContext.getTaskInfo().getNumberOfParallelSubtasks(); final int taskIdx = runtimeContext.getTaskInfo().getIndexOfThisSubtask(); final long congruence = start + taskIdx; long totalNoOfElements = Math.abs(end - start + 1); final int baseSize = safeDivide(totalNoOfElements, stepSize); final int toCollect = (totalNoOfElements % stepSize > taskIdx) ? baseSize + 1 : baseSize; for (long collected = 0; collected < toCollect; collected++) { this.valuesToEmit.add(collected * stepSize + congruence); } } } @Override public void snapshotState(FunctionSnapshotContext context) throws Exception { Preconditions.checkState( this.checkpointedState != null, "The " + getClass().getSimpleName() + " state has not been properly initialized."); this.checkpointedState.update(new ArrayList<>(this.valuesToEmit)); } @Override public boolean hasNext() { return !this.valuesToEmit.isEmpty(); } private static int safeDivide(long left, long right) { Preconditions.checkArgument(right > 0); Preconditions.checkArgument(left >= 0); Preconditions.checkArgument(left <= Integer.MAX_VALUE * right); return (int) (left / right); } public static SequenceGenerator<Long> longGenerator(long start, long end) { return new SequenceGenerator<Long>(start, end) { @Override public Long next() { return valuesToEmit.poll(); } }; } public static SequenceGenerator<Integer> intGenerator(int start, int end) { return new SequenceGenerator<Integer>(start, end) { @Override public Integer next() { return valuesToEmit.poll().intValue(); } }; } public static SequenceGenerator<Short> shortGenerator(short start, short end) { return new SequenceGenerator<Short>(start, end) { @Override public Short next() { return valuesToEmit.poll().shortValue(); } }; } public static SequenceGenerator<Byte> byteGenerator(byte start, byte end) { return new SequenceGenerator<Byte>(start, end) { @Override public Byte next() { return valuesToEmit.poll().byteValue(); } }; } public static SequenceGenerator<Float> floatGenerator(short start, short end) { return new SequenceGenerator<Float>(start, end) { @Override public Float next() { return valuesToEmit.poll().floatValue(); } }; } public static SequenceGenerator<Double> doubleGenerator(int start, int end) { return new SequenceGenerator<Double>(start, end) { @Override public Double next() { return valuesToEmit.poll().doubleValue(); } }; } public static SequenceGenerator<BigDecimal> bigDecimalGenerator( int start, int end, int precision, int scale) { return new SequenceGenerator<BigDecimal>(start, end) { @Override public BigDecimal next() { BigDecimal decimal = new BigDecimal( valuesToEmit.poll().doubleValue(), new MathContext(precision)); return decimal.setScale(scale, RoundingMode.DOWN); } }; } public static SequenceGenerator<String> stringGenerator(long start, long end) { return new SequenceGenerator<String>(start, end) { @Override public String next() { return valuesToEmit.poll().toString(); } }; } }
SequenceGenerator
java
apache__camel
core/camel-support/src/main/java/org/apache/camel/support/PropertyConfigurerHelper.java
{ "start": 4078, "end": 5435 }
class ____ first, then simple name, and root key last String[] names = new String[] { targetType.getName(), targetType.getSimpleName(), targetType.getName() + "-configurer", targetType.getSimpleName() + "-configurer" }; for (String n : names) { PropertyConfigurer configurer = PluginHelper.getConfigurerResolver(context).resolvePropertyConfigurer(n, context); if (configurer != null) { return configurer; } } return null; } /** * Resolves the given configurer. * * @param context the camel context * @param target the target object for which we need a {@link org.apache.camel.spi.PropertyConfigurer} * @param type the specific type of {@link org.apache.camel.spi.PropertyConfigurer} * @return the resolved configurer, or <tt>null</tt> if no configurer could be found */ public static <T> T resolvePropertyConfigurer(CamelContext context, Object target, Class<T> type) { ObjectHelper.notNull(target, "target"); ObjectHelper.notNull(context, "context"); PropertyConfigurer configurer = resolvePropertyConfigurer(context, target); if (type.isInstance(configurer)) { return type.cast(configurer); } return null; } }
name
java
google__guava
guava-gwt/src-super/com/google/common/cache/super/com/google/common/cache/LocalCache.java
{ "start": 18759, "end": 18874 }
class ____ by EntryIterator.next(), that relays setValue changes to the underlying * map. */ private final
used
java
elastic__elasticsearch
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java
{ "start": 41369, "end": 41962 }
class ____ extends FoldingRule<EsQueryExec> { @Override protected PhysicalPlan rule(EsQueryExec exec) { QueryContainer qContainer = exec.queryContainer(); // references (aka aggs) are in place if (qContainer.hasColumns()) { return exec; } for (Attribute attr : exec.output()) { qContainer = qContainer.addColumn(attr); } // after all attributes have been resolved return exec.with(qContainer); } } private static
PlanOutputToQueryRef
java
apache__flink
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/windowing/functions/InternalWindowFunctionTest.java
{ "start": 28909, "end": 29474 }
class ____ extends ProcessAllWindowFunction<Map<Long, Long>, String, TimeWindow> implements OutputTypeConfigurable<String> { private static final long serialVersionUID = 1L; @Override public void setOutputType( TypeInformation<String> outTypeInfo, ExecutionConfig executionConfig) {} @Override public void process(Context context, Iterable<Map<Long, Long>> input, Collector<String> out) throws Exception {} } private static
AggregateProcessAllWindowFunctionMock
java
quarkusio__quarkus
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/ExceptionUtil.java
{ "start": 64, "end": 352 }
class ____ { private static final StackTraceElement[] EMPTY_STACK_TRACE = new StackTraceElement[0]; private ExceptionUtil() { } public static <T extends Throwable> T removeStackTrace(T t) { t.setStackTrace(EMPTY_STACK_TRACE); return t; } }
ExceptionUtil
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/core/env/CompositePropertySource.java
{ "start": 1272, "end": 1848 }
class ____ {@link EnumerablePropertySource} instead * of plain {@link PropertySource}, exposing {@link #getPropertyNames()} based on the * accumulated property names from all contained sources - and failing with an * {@code IllegalStateException} against any non-{@code EnumerablePropertySource}. * <b>When used through the {@code EnumerablePropertySource} contract, all contained * sources are expected to be of type {@code EnumerablePropertySource} as well.</b> * * @author Chris Beams * @author Juergen Hoeller * @author Phillip Webb * @since 3.1.1 */ public
extends
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/id/PostInsertIdentifierGenerator.java
{ "start": 809, "end": 1417 }
interface ____ extends OnExecutionGenerator, Configurable { /** * @return {@link EventTypeSets#INSERT_ONLY} */ @Override default EnumSet<EventType> getEventTypes() { return INSERT_ONLY; } /** * @return {@code false}, since we don't usually have a meaningful property value * for generated identifiers */ @Override default boolean writePropertyValue() { return false; } /** * Noop default implementation. May be overridden by subtypes. */ @Override default void configure(Type type, Properties parameters, ServiceRegistry serviceRegistry) {} }
PostInsertIdentifierGenerator
java
apache__flink
flink-python/src/main/java/org/apache/flink/table/runtime/typeutils/serializers/python/MapDataSerializer.java
{ "start": 2128, "end": 6965 }
class ____ extends org.apache.flink.table.runtime.typeutils.MapDataSerializer { private static final long serialVersionUID = 1L; private final LogicalType keyType; private final LogicalType valueType; private final TypeSerializer keyTypeSerializer; private final TypeSerializer valueTypeSerializer; private final BinaryWriter.ValueSetter keySetter; private final BinaryWriter.ValueSetter valueSetter; private final ArrayData.ElementGetter keyGetter; private final ArrayData.ElementGetter valueGetter; private final BinaryArrayWriter.NullSetter nullValueSetter; private final int keySize; private final int valueSize; public MapDataSerializer( LogicalType keyType, LogicalType valueType, TypeSerializer keyTypeSerializer, TypeSerializer valueTypeSerializer) { super(keyType, valueType); this.keyType = keyType; this.valueType = valueType; this.keyTypeSerializer = keyTypeSerializer; this.valueTypeSerializer = valueTypeSerializer; this.keySize = BinaryArrayData.calculateFixLengthPartSize(this.keyType); this.valueSize = BinaryArrayData.calculateFixLengthPartSize(this.valueType); this.keyGetter = ArrayData.createElementGetter(keyType); this.valueGetter = ArrayData.createElementGetter(valueType); this.nullValueSetter = BinaryArrayWriter.createNullSetter(valueType); this.keySetter = BinaryWriter.createValueSetter(keyType); this.valueSetter = BinaryWriter.createValueSetter(valueType); } @Override public void serialize(MapData map, DataOutputView target) throws IOException { BinaryMapData binaryMap = toBinaryMap(map); final int size = binaryMap.size(); target.writeInt(size); BinaryArrayData keyArray = binaryMap.keyArray(); BinaryArrayData valueArray = binaryMap.valueArray(); for (int i = 0; i < size; i++) { if (keyArray.isNullAt(i)) { throw new IllegalArgumentException("The key of BinaryMapData must not be null."); } Object key = keyGetter.getElementOrNull(keyArray, i); keyTypeSerializer.serialize(key, target); if (valueArray.isNullAt(i)) { target.writeBoolean(true); } else { target.writeBoolean(false); Object value = valueGetter.getElementOrNull(valueArray, i); valueTypeSerializer.serialize(value, target); } } } @Override public MapData deserialize(DataInputView source) throws IOException { BinaryArrayData keyArray = new BinaryArrayData(); BinaryArrayData valueArray = new BinaryArrayData(); return deserializeInternal(source, keyArray, valueArray); } @Override public MapData deserialize(MapData reuse, DataInputView source) throws IOException { if (reuse instanceof GenericMapData) { return deserialize(source); } BinaryMapData binaryMap = (BinaryMapData) reuse; return deserializeInternal(source, binaryMap.keyArray(), binaryMap.valueArray()); } private MapData deserializeInternal( DataInputView source, BinaryArrayData keyArray, BinaryArrayData valueArray) throws IOException { final int size = source.readInt(); BinaryArrayWriter keyWriter = new BinaryArrayWriter(keyArray, size, keySize); BinaryArrayWriter valueWriter = new BinaryArrayWriter(valueArray, size, valueSize); for (int i = 0; i < size; i++) { Object key = keyTypeSerializer.deserialize(source); keySetter.setValue(keyWriter, i, key); boolean isNull = source.readBoolean(); if (isNull) { nullValueSetter.setNull(valueWriter, i); } else { Object value = valueTypeSerializer.deserialize(source); valueSetter.setValue(valueWriter, i, value); } } keyWriter.complete(); valueWriter.complete(); return BinaryMapData.valueOf(keyArray, valueArray); } @Override public void copy(DataInputView source, DataOutputView target) throws IOException { serialize(deserialize(source), target); } @Override public TypeSerializer<MapData> duplicate() { return new MapDataSerializer(keyType, valueType, keyTypeSerializer, valueTypeSerializer); } @Override public TypeSerializerSnapshot<MapData> snapshotConfiguration() { return new BaseMapSerializerSnapshot( keyType, valueType, keyTypeSerializer, valueTypeSerializer); } /** {@link TypeSerializerSnapshot} for {@link MapDataSerializer}. */ public static final
MapDataSerializer
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestXAttrCLI.java
{ "start": 1422, "end": 3174 }
class ____ extends CLITestHelperDFS { protected MiniDFSCluster dfsCluster = null; protected FileSystem fs = null; protected String namenode = null; @BeforeEach @Override public void setUp() throws Exception { super.setUp(); conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_XATTRS_ENABLED_KEY, true); conf.setClass(PolicyProvider.POLICY_PROVIDER_CONFIG, HDFSPolicyProvider.class, PolicyProvider.class); conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1); dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); dfsCluster.waitClusterUp(); namenode = conf.get(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "file:///"); username = System.getProperty("user.name"); fs = dfsCluster.getFileSystem(); assertTrue(fs instanceof DistributedFileSystem, "Not a HDFS: " + fs.getUri()); } @Override protected String getTestFile() { return "testXAttrConf.xml"; } @AfterEach @Override public void tearDown() throws Exception { if (fs != null) { fs.close(); fs = null; } if (dfsCluster != null) { dfsCluster.shutdown(); dfsCluster = null; } Thread.sleep(2000); super.tearDown(); } @Override protected String expandCommand(final String cmd) { String expCmd = cmd; expCmd = expCmd.replaceAll("NAMENODE", namenode); expCmd = expCmd.replaceAll("#LF#", System.getProperty("line.separator")); expCmd = super.expandCommand(expCmd); return expCmd; } @Override protected Result execute(CLICommand cmd) throws Exception { return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd()); } @Test @Override public void testAll () { super.testAll(); } }
TestXAttrCLI
java
alibaba__nacos
common/src/test/java/com/alibaba/nacos/common/http/BaseHttpMethodTest.java
{ "start": 886, "end": 3521 }
class ____ { @Test void testHttpGet() { BaseHttpMethod method = BaseHttpMethod.GET; HttpUriRequestBase request = method.init("http://example.com"); assertEquals("GET", request.getMethod()); } @Test void testHttpGetLarge() { BaseHttpMethod method = BaseHttpMethod.GET_LARGE; HttpUriRequestBase request = method.init("http://example.com"); assertEquals("GET", request.getMethod()); } @Test void testHttpPost() { BaseHttpMethod method = BaseHttpMethod.POST; HttpUriRequestBase request = method.init("http://example.com"); assertEquals("POST", request.getMethod()); } @Test void testHttpPut() { BaseHttpMethod method = BaseHttpMethod.PUT; HttpUriRequestBase request = method.init("http://example.com"); assertEquals("PUT", request.getMethod()); } @Test void testHttpDelete() { BaseHttpMethod method = BaseHttpMethod.DELETE; HttpUriRequestBase request = method.init("http://example.com"); assertEquals("DELETE", request.getMethod()); } @Test void testHttpDeleteLarge() { BaseHttpMethod method = BaseHttpMethod.DELETE_LARGE; HttpUriRequestBase request = method.init("http://example.com"); assertEquals("DELETE", request.getMethod()); } @Test void testHttpHead() { BaseHttpMethod method = BaseHttpMethod.HEAD; HttpUriRequestBase request = method.init("http://example.com"); assertEquals("HEAD", request.getMethod()); } @Test void testHttpTrace() { BaseHttpMethod method = BaseHttpMethod.TRACE; HttpUriRequestBase request = method.init("http://example.com"); assertEquals("TRACE", request.getMethod()); } @Test void testHttpPatch() { BaseHttpMethod method = BaseHttpMethod.PATCH; HttpUriRequestBase request = method.init("http://example.com"); assertEquals("PATCH", request.getMethod()); } @Test void testHttpOptions() { BaseHttpMethod method = BaseHttpMethod.OPTIONS; HttpUriRequestBase request = method.init("http://example.com"); assertEquals("TRACE", request.getMethod()); } @Test void testSourceOf() { BaseHttpMethod method = BaseHttpMethod.sourceOf("GET"); assertEquals(BaseHttpMethod.GET, method); } @Test void testSourceOfNotFound() { assertThrows(IllegalArgumentException.class, () -> { BaseHttpMethod.sourceOf("Not Found"); }); } }
BaseHttpMethodTest
java
playframework__playframework
transport/client/play-ahc-ws/src/main/java/play/libs/ws/ahc/AhcWSModule.java
{ "start": 1684, "end": 2124 }
class ____ implements Provider<StandaloneWSClient> { private final StandaloneWSClient standaloneWSClient; @Inject public StandaloneWSClientProvider(AsyncHttpClient asyncHttpClient, Materializer materializer) { this.standaloneWSClient = new StandaloneAhcWSClient(asyncHttpClient, materializer); } @Override public StandaloneWSClient get() { return standaloneWSClient; } } }
StandaloneWSClientProvider
java
eclipse-vertx__vert.x
vertx-core/src/main/java/io/vertx/core/net/SSLEngineOptions.java
{ "start": 695, "end": 1982 }
class ____ { /** * The default thread pool type for SSL blocking operations. */ public static final boolean DEFAULT_USE_WORKER_POOL = false; private boolean useWorkerThread; public abstract SSLEngineOptions copy(); public SSLEngineOptions() { this.useWorkerThread = DEFAULT_USE_WORKER_POOL; } public SSLEngineOptions(SSLEngineOptions that) { this.useWorkerThread = that.useWorkerThread; } public SSLEngineOptions(JsonObject json) { this.useWorkerThread = json.getBoolean("useWorkerThread", DEFAULT_USE_WORKER_POOL); } /** * @return a {@link SslContextFactory} that will be used to produce the Netty {@code SslContext} */ public abstract SslContextFactory sslContextFactory(); /** * @return whether to use the worker pool for SSL blocking operationsg */ public boolean getUseWorkerThread() { return useWorkerThread; } /** * Set the thread pool to use for SSL blocking operations. * * @param useWorkerThread whether to use the vertx internal worker pool for SSL blocking operations * @return a reference to this, so the API can be used fluently */ public SSLEngineOptions setUseWorkerThread(boolean useWorkerThread) { this.useWorkerThread = useWorkerThread; return this; } }
SSLEngineOptions
java
apache__camel
components/camel-huawei/camel-huaweicloud-frs/src/test/java/org/apache/camel/component/huaweicloud/frs/mock/FaceDetectionWithImageBae64AndMockClientTest.java
{ "start": 1476, "end": 3473 }
class ____ extends CamelTestSupport { TestConfiguration testConfiguration = new TestConfiguration(); @BindToRegistry("frsClient") FrsClientMock frsClient = new FrsClientMock(null); protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { from("direct:trigger_route") .setProperty(FaceRecognitionProperties.FACE_IMAGE_BASE64, constant(testConfiguration.getProperty("imageBase64"))) .to("hwcloud-frs:faceDetection?" + "accessKey=" + testConfiguration.getProperty("accessKey") + "&secretKey=" + testConfiguration.getProperty("secretKey") + "&projectId=" + testConfiguration.getProperty("projectId") + "&region=" + testConfiguration.getProperty("region") + "&ignoreSslVerification=true" + "&frsClient=#frsClient") .log("perform faceDetection successful") .to("mock:perform_face_detection_result"); } }; } /** * use imageBase64 to perform faceDetection * * @throws Exception */ @Test public void testFaceDetection() throws Exception { MockEndpoint mock = getMockEndpoint("mock:perform_face_detection_result"); mock.expectedMinimumMessageCount(1); template.sendBody("direct:trigger_route", ""); Exchange responseExchange = mock.getExchanges().get(0); mock.assertIsSatisfied(); assertTrue(responseExchange.getIn().getBody() instanceof DetectFaceByBase64Response); DetectFaceByBase64Response response = (DetectFaceByBase64Response) responseExchange.getIn().getBody(); assertEquals(response.getFaces(), MockResult.getFaceDetectionResult()); } }
FaceDetectionWithImageBae64AndMockClientTest
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/boot/model/source/spi/ToolingHint.java
{ "start": 226, "end": 1186 }
class ____ { private final String name; private final boolean inheritable; private final MetaAttribute metaAttribute; public ToolingHint(String name, boolean inheritable) { this.name = name; this.inheritable = inheritable; this.metaAttribute = new MetaAttribute( name ); } public String getName() { return name; } public boolean isInheritable() { return inheritable; } public java.util.List getValues() { return metaAttribute.getValues(); } public void addValue(String value) { metaAttribute.addValue( value ); } public String getValue() { return metaAttribute.getValue(); } public boolean isMultiValued() { return metaAttribute.isMultiValued(); } @Override public String toString() { return "ToolingHint{" + "name='" + name + '\'' + ", inheritable=" + inheritable + ", values=" + metaAttribute.getValues() + '}'; } public MetaAttribute asMetaAttribute() { return metaAttribute; } }
ToolingHint
java
hibernate__hibernate-orm
tooling/metamodel-generator/src/main/java/org/hibernate/processor/annotation/RepositoryConstructor.java
{ "start": 647, "end": 6452 }
class ____ implements MetaAttribute { private final AnnotationMetaEntity annotationMetaEntity; private final String constructorName; private final String methodName; private final String sessionTypeName; private final String sessionVariableName; private final @Nullable String dataStore; private final boolean addInjectAnnotation; private final boolean addNonnullAnnotation; private final boolean addOverrideAnnotation; private final boolean dataRepository; private final boolean quarkusInjection; public RepositoryConstructor( AnnotationMetaEntity annotationMetaEntity, String constructorName, String methodName, String sessionTypeName, String sessionVariableName, @Nullable String dataStore, boolean addInjectAnnotation, boolean addNonnullAnnotation, boolean addOverrideAnnotation, boolean dataRepository, boolean quarkusInjection) { this.annotationMetaEntity = annotationMetaEntity; this.constructorName = constructorName; this.methodName = methodName; this.sessionTypeName = sessionTypeName; this.sessionVariableName = sessionVariableName; this.dataStore = dataStore; this.addInjectAnnotation = addInjectAnnotation; this.addNonnullAnnotation = addNonnullAnnotation; this.addOverrideAnnotation = addOverrideAnnotation; this.dataRepository = dataRepository; this.quarkusInjection = quarkusInjection; } @Override public boolean hasTypedAttribute() { return true; } @Override public boolean hasStringAttribute() { return false; } @Override public String getAttributeDeclarationString() { final StringBuilder declaration = new StringBuilder(); declaration .append('\n'); if ( annotationMetaEntity.getSuperTypeElement() == null ) { declaration .append("protected "); if ( !dataRepository ) { // don't mark the field final // because it will be initialized // in @PostConstruct declaration .append("final "); } notNull( declaration ); declaration .append(annotationMetaEntity.importType(sessionTypeName)) .append(" ") .append(sessionVariableName) .append(";\n\n"); } inject( declaration ); declaration .append("public ") .append(constructorName) .append("("); notNull( declaration ); qualifier( declaration ); declaration .append(annotationMetaEntity.importType(sessionTypeName)) .append(" ") .append(sessionVariableName) .append(") {\n"); if ( annotationMetaEntity.getSuperTypeElement() != null ) { declaration .append("\tsuper(") .append(sessionVariableName) .append(");\n"); } else { declaration .append("\tthis.") .append(sessionVariableName) .append(" = ") .append(sessionVariableName) .append(";\n"); } declaration .append("}"); // resource accessor method a.k.a. session getter if ( annotationMetaEntity.getSuperTypeElement() == null ) { declaration .append("\n\n"); if (addOverrideAnnotation) { declaration.append("@Override\n"); } declaration .append("public "); notNull( declaration ); declaration .append(annotationMetaEntity.importType(providedSessionType())) .append(" ") .append(methodName) .append("() {") .append("\n\treturn ") .append(sessionVariableName); if ( annotationMetaEntity.isProvidedSessionAccess() ) { declaration .append( ".getObject()" ); } declaration .append(";\n}"); } return declaration.toString(); } private String providedSessionType() { return annotationMetaEntity.isProvidedSessionAccess() //TODO: assuming provided sessions are always StatelessSessions for now ? HIB_STATELESS_SESSION : sessionTypeName; } /** * In Quarkus we use the Quarkus-specific {@code @PersistenceUnit} * CDI qualifier annotation to inject the {@code StatelessSession} * directly. */ private void qualifier(StringBuilder declaration) { if ( addInjectAnnotation && quarkusInjection && dataStore != null ) { declaration .append('@') .append(annotationMetaEntity.importType("io.quarkus.hibernate.orm.PersistenceUnit")) .append("(\"") .append(dataStore) .append("\") "); } } /** * In Quarkus we inject the {@code StatelessSession} * directly via the constructor. But this doesn't work * in other CDI implementations, where we need to use * the JPA {@code @PersistenceUnit} annotation for * field injection of an {@code EntityManager}. In * that case, CDI will instantiate the repository via * a {@link DefaultConstructor default constructor}, * so we don't need to mark this one {@code @Inject}. */ private void inject(StringBuilder declaration) { // Jakarta Data repositories are instantiated // via the default constructor, so in that // case, this one is just for testing, unless // we are in Quarkus where we can use // constructor injection if ( addInjectAnnotation && !annotationMetaEntity.needsDefaultConstructor() ) { declaration .append('@') .append(annotationMetaEntity.importType(INJECT)) .append('\n'); } } private void notNull(StringBuilder declaration) { if ( addNonnullAnnotation ) { declaration .append('@') .append(annotationMetaEntity.importType(NONNULL)) .append(' '); } } @Override public String getAttributeNameDeclarationString() { throw new UnsupportedOperationException("operation not supported"); } @Override public String getMetaType() { throw new UnsupportedOperationException("operation not supported"); } @Override public String getPropertyName() { return methodName; } @Override public String getTypeDeclaration() { return Constants.ENTITY_MANAGER; } @Override public Metamodel getHostingEntity() { return annotationMetaEntity; } }
RepositoryConstructor
java
elastic__elasticsearch
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestResetJobAction.java
{ "start": 1285, "end": 3164 }
class ____ extends BaseRestHandler { @Override public List<Route> routes() { return List.of(new Route(POST, BASE_PATH + "anomaly_detectors/{" + Job.ID + "}/_reset")); } @Override public String getName() { return "ml_reset_job_action"; } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { ResetJobAction.Request request = new ResetJobAction.Request(restRequest.param(Job.ID.getPreferredName())); request.ackTimeout(getAckTimeout(restRequest)); request.masterNodeTimeout(getMasterNodeTimeout(restRequest)); request.setDeleteUserAnnotations(restRequest.paramAsBoolean("delete_user_annotations", false)); if (restRequest.paramAsBoolean("wait_for_completion", true)) { return channel -> client.execute(ResetJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); } else { request.setShouldStoreResult(true); Task task = client.executeLocally( ResetJobAction.INSTANCE, request, /* * We do not want to log anything due to a delete action. The response or error will be returned to the client when called * synchronously or it will be stored in the task result when called asynchronously. */ ActionListener.noop() ); return channel -> { try (XContentBuilder builder = channel.newBuilder()) { builder.startObject(); builder.field("task", client.getLocalNodeId() + ":" + task.getId()); builder.endObject(); channel.sendResponse(new RestResponse(RestStatus.OK, builder)); } }; } } }
RestResetJobAction
java
alibaba__nacos
common/src/test/java/com/alibaba/nacos/common/http/client/NacosRestTemplateTest.java
{ "start": 2065, "end": 18238 }
class ____ { @Mock private HttpClientRequest requestClient; @Mock private Logger logger; @Mock private HttpClientResponse mockResponse; @Mock private HttpClientRequestInterceptor interceptor; private NacosRestTemplate restTemplate; @BeforeEach void setUp() throws Exception { restTemplate = new NacosRestTemplate(logger, requestClient); when(logger.isDebugEnabled()).thenReturn(true); when(mockResponse.getHeaders()).thenReturn(Header.EMPTY); when(interceptor.isIntercept(any(), any(), any())).thenReturn(true); when(interceptor.intercept()).thenReturn(mockResponse); } @AfterEach void tearDown() throws Exception { restTemplate.close(); } @Test void testGetWithDefaultConfig() throws Exception { when(requestClient.execute(any(), eq("GET"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); HttpRestResult<String> result = restTemplate.get("http://127.0.0.1:8848/nacos/test", Header.EMPTY, Query.EMPTY, String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); } @Test void testGetWithCustomConfig() throws Exception { when(requestClient.execute(any(), eq("GET"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); HttpClientConfig config = HttpClientConfig.builder().setConTimeOutMillis(1000).build(); HttpRestResult<String> result = restTemplate.get("http://127.0.0.1:8848/nacos/test", config, Header.EMPTY, Query.EMPTY, String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); } @Test void testGetWithInterceptor() throws Exception { when(mockResponse.getStatusCode()).thenReturn(300); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test interceptor".getBytes())); restTemplate.setInterceptors(Collections.singletonList(interceptor)); HttpRestResult<String> result = restTemplate.get("http://127.0.0.1:8848/nacos/test", Header.EMPTY, Query.EMPTY, String.class); assertFalse(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test interceptor", result.getMessage()); } @Test void testGetWithException() throws Exception { assertThrows(RuntimeException.class, () -> { when(requestClient.execute(any(), eq("GET"), any())).thenThrow(new RuntimeException("test")); restTemplate.get("http://127.0.0.1:8848/nacos/test", Header.EMPTY, Query.EMPTY, String.class); }); } @Test void testGetLarge() throws Exception { when(requestClient.execute(any(), eq("GET-LARGE"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); HttpRestResult<String> result = restTemplate.getLarge("http://127.0.0.1:8848/nacos/test", Header.EMPTY, Query.EMPTY, new Object(), String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); } @Test void testDeleteWithDefaultConfig() throws Exception { when(requestClient.execute(any(), eq("DELETE"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); HttpRestResult<String> result = restTemplate.delete("http://127.0.0.1:8848/nacos/test", Header.EMPTY, Query.EMPTY, String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); } @Test void testDeleteWithCustomConfig() throws Exception { when(requestClient.execute(any(), eq("DELETE"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); HttpClientConfig config = HttpClientConfig.builder().setConTimeOutMillis(1000).build(); HttpRestResult<String> result = restTemplate.delete("http://127.0.0.1:8848/nacos/test", config, Header.EMPTY, Query.EMPTY, String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); } @Test void testPut() throws Exception { when(requestClient.execute(any(), eq("PUT"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); HttpRestResult<String> result = restTemplate.put("http://127.0.0.1:8848/nacos/test", Header.EMPTY, Query.EMPTY, new Object(), String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); } @Test void testPutJson() throws Exception { when(requestClient.execute(any(), eq("PUT"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); Header header = Header.newInstance().setContentType(MediaType.APPLICATION_XML); HttpRestResult<String> result = restTemplate.putJson("http://127.0.0.1:8848/nacos/test", header, "body", String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); assertEquals(MediaType.APPLICATION_JSON, header.getValue(HttpHeaderConsts.CONTENT_TYPE)); } @Test void testPutJsonWithQuery() throws Exception { when(requestClient.execute(any(), eq("PUT"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); Header header = Header.newInstance().setContentType(MediaType.APPLICATION_XML); HttpRestResult<String> result = restTemplate.putJson("http://127.0.0.1:8848/nacos/test", header, Query.EMPTY, "body", String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); assertEquals(MediaType.APPLICATION_JSON, header.getValue(HttpHeaderConsts.CONTENT_TYPE)); } @Test void testPutForm() throws Exception { when(requestClient.execute(any(), eq("PUT"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); Header header = Header.newInstance().setContentType(MediaType.APPLICATION_XML); HttpRestResult<String> result = restTemplate.putForm("http://127.0.0.1:8848/nacos/test", header, new HashMap<>(), String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); assertEquals(MediaType.APPLICATION_FORM_URLENCODED, header.getValue(HttpHeaderConsts.CONTENT_TYPE)); } @Test void testPutFormWithQuery() throws Exception { when(requestClient.execute(any(), eq("PUT"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); Header header = Header.newInstance().setContentType(MediaType.APPLICATION_XML); HttpRestResult<String> result = restTemplate.putForm("http://127.0.0.1:8848/nacos/test", header, Query.EMPTY, new HashMap<>(), String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); assertEquals(MediaType.APPLICATION_FORM_URLENCODED, header.getValue(HttpHeaderConsts.CONTENT_TYPE)); } @Test void testPutFormWithConfig() throws Exception { when(requestClient.execute(any(), eq("PUT"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); HttpClientConfig config = HttpClientConfig.builder().setConTimeOutMillis(1000).build(); Header header = Header.newInstance().setContentType(MediaType.APPLICATION_XML); HttpRestResult<String> result = restTemplate.putForm("http://127.0.0.1:8848/nacos/test", config, header, new HashMap<>(), String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); assertEquals(MediaType.APPLICATION_FORM_URLENCODED, header.getValue(HttpHeaderConsts.CONTENT_TYPE)); } @Test void testPost() throws Exception { when(requestClient.execute(any(), eq("POST"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); HttpRestResult<String> result = restTemplate.post("http://127.0.0.1:8848/nacos/test", Header.EMPTY, Query.EMPTY, new Object(), String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); } @Test void testPostJson() throws Exception { when(requestClient.execute(any(), eq("POST"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); Header header = Header.newInstance().setContentType(MediaType.APPLICATION_XML); HttpRestResult<String> result = restTemplate.postJson("http://127.0.0.1:8848/nacos/test", header, "body", String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); assertEquals(MediaType.APPLICATION_JSON, header.getValue(HttpHeaderConsts.CONTENT_TYPE)); } @Test void testPostJsonWithQuery() throws Exception { when(requestClient.execute(any(), eq("POST"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); Header header = Header.newInstance().setContentType(MediaType.APPLICATION_XML); HttpRestResult<String> result = restTemplate.postJson("http://127.0.0.1:8848/nacos/test", header, Query.EMPTY, "body", String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); assertEquals(MediaType.APPLICATION_JSON, header.getValue(HttpHeaderConsts.CONTENT_TYPE)); } @Test void testPostForm() throws Exception { when(requestClient.execute(any(), eq("POST"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); Header header = Header.newInstance().setContentType(MediaType.APPLICATION_XML); HttpRestResult<String> result = restTemplate.postForm("http://127.0.0.1:8848/nacos/test", header, new HashMap<>(), String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); assertEquals(MediaType.APPLICATION_FORM_URLENCODED, header.getValue(HttpHeaderConsts.CONTENT_TYPE)); } @Test void testPostFormWithQuery() throws Exception { when(requestClient.execute(any(), eq("POST"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); Header header = Header.newInstance().setContentType(MediaType.APPLICATION_XML); HttpRestResult<String> result = restTemplate.postForm("http://127.0.0.1:8848/nacos/test", header, Query.EMPTY, new HashMap<>(), String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); assertEquals(MediaType.APPLICATION_FORM_URLENCODED, header.getValue(HttpHeaderConsts.CONTENT_TYPE)); } @Test void testPostFormWithConfig() throws Exception { when(requestClient.execute(any(), eq("POST"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); HttpClientConfig config = HttpClientConfig.builder().setConTimeOutMillis(1000).build(); Header header = Header.newInstance().setContentType(MediaType.APPLICATION_XML); HttpRestResult<String> result = restTemplate.postForm("http://127.0.0.1:8848/nacos/test", config, header, new HashMap<>(), String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); assertEquals(MediaType.APPLICATION_FORM_URLENCODED, header.getValue(HttpHeaderConsts.CONTENT_TYPE)); } @Test void testExchangeForm() throws Exception { when(requestClient.execute(any(), eq("PUT"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); Header header = Header.newInstance().setContentType(MediaType.APPLICATION_XML); HttpRestResult<String> result = restTemplate.exchangeForm("http://127.0.0.1:8848/nacos/test", header, Query.EMPTY, new HashMap<>(), "PUT", String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); assertEquals(MediaType.APPLICATION_FORM_URLENCODED, header.getValue(HttpHeaderConsts.CONTENT_TYPE)); } @Test void testExchange() throws Exception { when(requestClient.execute(any(), eq("PUT"), any())).thenReturn(mockResponse); when(mockResponse.getStatusCode()).thenReturn(200); when(mockResponse.getBody()).thenReturn(new ByteArrayInputStream("test".getBytes())); HttpClientConfig config = HttpClientConfig.builder().setConTimeOutMillis(1000).build(); HttpRestResult<String> result = restTemplate.exchange("http://127.0.0.1:8848/nacos/test", config, Header.EMPTY, Query.EMPTY, new Object(), "PUT", String.class); assertTrue(result.ok()); assertEquals(Header.EMPTY, result.getHeader()); assertEquals("test", result.getData()); } @Test void testGetInterceptors() { assertTrue(restTemplate.getInterceptors().isEmpty()); restTemplate.setInterceptors(Collections.singletonList(interceptor)); assertEquals(1, restTemplate.getInterceptors().size()); } }
NacosRestTemplateTest
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/ql/TreatKeywordTest.java
{ "start": 11798, "end": 12205 }
class ____ extends JoinedEntitySubclass { public JoinedEntitySubSubclass() { } public JoinedEntitySubSubclass(Integer id, String name) { super( id, name ); } public JoinedEntitySubSubclass(Integer id, String name, JoinedEntity other) { super( id, name, other ); } } @Entity( name = "JoinedEntitySubclass2" ) @Table( name = "JoinedEntitySubclass2" ) public static
JoinedEntitySubSubclass
java
quarkusio__quarkus
extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/reactive/ReactiveMongoCollection.java
{ "start": 67346, "end": 73529 }
class ____ decode each document into * @param <D> the target document type of the iterable. * @return the stream of indexes */ <D> Multi<D> listIndexes(ClientSession clientSession, Class<D> clazz); /** * Drops the index given the keys used to create it. * * @param indexName the name of the index to remove * @return a {@link Uni} completed when the operation is done. */ Uni<Void> dropIndex(String indexName); /** * Drops the index given the keys used to create it. * * @param keys the keys of the index to remove * @return a {@link Uni} completed when the operation is done. */ Uni<Void> dropIndex(Bson keys); /** * Drops the index given the keys used to create it. * * @param indexName the name of the index to remove * @param dropIndexOptions options to use when dropping indexes * @return a {@link Uni} completed when the operation is done. */ Uni<Void> dropIndex(String indexName, DropIndexOptions dropIndexOptions); /** * Drops the index given the keys used to create it. * * @param keys the keys of the index to remove * @param dropIndexOptions options to use when dropping indexes * @return a {@link Uni} completed when the operation is done. */ Uni<Void> dropIndex(Bson keys, DropIndexOptions dropIndexOptions); /** * Drops the index given the keys used to create it. * * @param clientSession the client session with which to associate this operation * @param indexName the name of the index to remove * @return a {@link Uni} completed when the operation is done. */ Uni<Void> dropIndex(ClientSession clientSession, String indexName); /** * Drops the index given the keys used to create it. * * @param clientSession the client session with which to associate this operation * @param keys the keys of the index to remove * @return a {@link Uni} completed when the operation is done. */ Uni<Void> dropIndex(ClientSession clientSession, Bson keys); /** * Drops the index given the keys used to create it. * * @param clientSession the client session with which to associate this operation * @param indexName the name of the index to remove * @param dropIndexOptions options to use when dropping indexes * @return a {@link Uni} completed when the operation is done. */ Uni<Void> dropIndex(ClientSession clientSession, String indexName, DropIndexOptions dropIndexOptions); /** * Drops the index given the keys used to create it. * * @param clientSession the client session with which to associate this operation * @param keys the keys of the index to remove * @param dropIndexOptions options to use when dropping indexes * @return a {@link Uni} completed when the operation is done. */ Uni<Void> dropIndex(ClientSession clientSession, Bson keys, DropIndexOptions dropIndexOptions); /** * Drop all the indexes on this collection, except for the default on _id. * * @return a {@link Uni} completed when the operation is done. */ Uni<Void> dropIndexes(); /** * Drop all the indexes on this collection, except for the default on _id. * * @param dropIndexOptions options to use when dropping indexes * @return a {@link Uni} completed when the operation is done. */ Uni<Void> dropIndexes(DropIndexOptions dropIndexOptions); /** * Drop all the indexes on this collection, except for the default on _id. * * @param clientSession the client session with which to associate this operation * @return a {@link Uni} completed when the operation is done. */ Uni<Void> dropIndexes(ClientSession clientSession); /** * Drop all the indexes on this collection, except for the default on _id. * * @param clientSession the client session with which to associate this operation * @param dropIndexOptions options to use when dropping indexes * @return a {@link Uni} completed when the operation is done. */ Uni<Void> dropIndexes(ClientSession clientSession, DropIndexOptions dropIndexOptions); /** * Rename the collection with oldCollectionName to the newCollectionName. * * @param newCollectionNamespace the name the collection will be renamed to * @return a {@link Uni} completed when the operation is done. */ Uni<Void> renameCollection(MongoNamespace newCollectionNamespace); /** * Rename the collection with oldCollectionName to the newCollectionName. * * @param newCollectionNamespace the name the collection will be renamed to * @param options the options for renaming a collection * @return a {@link Uni} completed when the operation is done. */ Uni<Void> renameCollection(MongoNamespace newCollectionNamespace, RenameCollectionOptions options); /** * Rename the collection with oldCollectionName to the newCollectionName. * * @param clientSession the client session with which to associate this operation * @param newCollectionNamespace the name the collection will be renamed to * @return a {@link Uni} completed when the operation is done. */ Uni<Void> renameCollection(ClientSession clientSession, MongoNamespace newCollectionNamespace); /** * Rename the collection with oldCollectionName to the newCollectionName. * * @param clientSession the client session with which to associate this operation * @param newCollectionNamespace the name the collection will be renamed to * @param options the options for renaming a collection * @return a {@link Uni} completed when the operation is done. */ Uni<Void> renameCollection(ClientSession clientSession, MongoNamespace newCollectionNamespace, RenameCollectionOptions options); /** * Gets the codec registry of this collection. * * @return the codec registry */ CodecRegistry getCodecRegistry(); /** * Create a new ReactiveMongoCollection instance with a different default
to
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
{ "start": 38636, "end": 38879 }
class ____ the job. */ public Class<? extends MapRunnable> getMapRunnerClass() { return getClass("mapred.map.runner.class", MapRunner.class, MapRunnable.class); } /** * Expert: Set the {@link MapRunnable}
for
java
spring-projects__spring-security
oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/authentication/OAuth2AuthorizationCodeAuthenticationToken.java
{ "start": 1637, "end": 5856 }
class ____ extends AbstractAuthenticationToken { private static final long serialVersionUID = 620L; private Map<String, Object> additionalParameters = new HashMap<>(); private ClientRegistration clientRegistration; private OAuth2AuthorizationExchange authorizationExchange; private OAuth2AccessToken accessToken; private OAuth2RefreshToken refreshToken; /** * This constructor should be used when the Authorization Request/Response is * complete. * @param clientRegistration the client registration * @param authorizationExchange the authorization exchange */ public OAuth2AuthorizationCodeAuthenticationToken(ClientRegistration clientRegistration, OAuth2AuthorizationExchange authorizationExchange) { super(Collections.emptyList()); Assert.notNull(clientRegistration, "clientRegistration cannot be null"); Assert.notNull(authorizationExchange, "authorizationExchange cannot be null"); this.clientRegistration = clientRegistration; this.authorizationExchange = authorizationExchange; } /** * This constructor should be used when the Access Token Request/Response is complete, * which indicates that the Authorization Code Grant flow has fully completed. * @param clientRegistration the client registration * @param authorizationExchange the authorization exchange * @param accessToken the access token credential */ public OAuth2AuthorizationCodeAuthenticationToken(ClientRegistration clientRegistration, OAuth2AuthorizationExchange authorizationExchange, OAuth2AccessToken accessToken) { this(clientRegistration, authorizationExchange, accessToken, null); } /** * This constructor should be used when the Access Token Request/Response is complete, * which indicates that the Authorization Code Grant flow has fully completed. * @param clientRegistration the client registration * @param authorizationExchange the authorization exchange * @param accessToken the access token credential * @param refreshToken the refresh token credential */ public OAuth2AuthorizationCodeAuthenticationToken(ClientRegistration clientRegistration, OAuth2AuthorizationExchange authorizationExchange, OAuth2AccessToken accessToken, @Nullable OAuth2RefreshToken refreshToken) { this(clientRegistration, authorizationExchange, accessToken, refreshToken, Collections.emptyMap()); } public OAuth2AuthorizationCodeAuthenticationToken(ClientRegistration clientRegistration, OAuth2AuthorizationExchange authorizationExchange, OAuth2AccessToken accessToken, OAuth2RefreshToken refreshToken, Map<String, Object> additionalParameters) { this(clientRegistration, authorizationExchange); Assert.notNull(accessToken, "accessToken cannot be null"); this.accessToken = accessToken; this.refreshToken = refreshToken; this.setAuthenticated(true); this.additionalParameters.putAll(additionalParameters); } @Override public Object getPrincipal() { return this.clientRegistration.getClientId(); } @Override public Object getCredentials() { return (this.accessToken != null) ? this.accessToken.getTokenValue() : this.authorizationExchange.getAuthorizationResponse().getCode(); } /** * Returns the {@link ClientRegistration client registration}. * @return the {@link ClientRegistration} */ public ClientRegistration getClientRegistration() { return this.clientRegistration; } /** * Returns the {@link OAuth2AuthorizationExchange authorization exchange}. * @return the {@link OAuth2AuthorizationExchange} */ public OAuth2AuthorizationExchange getAuthorizationExchange() { return this.authorizationExchange; } /** * Returns the {@link OAuth2AccessToken access token}. * @return the {@link OAuth2AccessToken} */ public OAuth2AccessToken getAccessToken() { return this.accessToken; } /** * Returns the {@link OAuth2RefreshToken refresh token}. * @return the {@link OAuth2RefreshToken} */ public @Nullable OAuth2RefreshToken getRefreshToken() { return this.refreshToken; } /** * Returns the additional parameters * @return the additional parameters */ public Map<String, Object> getAdditionalParameters() { return this.additionalParameters; } }
OAuth2AuthorizationCodeAuthenticationToken
java
quarkusio__quarkus
integration-tests/main/src/main/java/io/quarkus/it/corestuff/serialization/ExternalizablePerson.java
{ "start": 279, "end": 811 }
class ____ implements Externalizable { private String name; public ExternalizablePerson() { } public String getName() { return name; } public void setName(String name) { this.name = name; } @Override public void writeExternal(ObjectOutput out) throws IOException { out.writeUTF(name); } @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { this.name = in.readUTF(); } }
ExternalizablePerson
java
apache__kafka
server/src/test/java/org/apache/kafka/server/ReconfigurableQuorumIntegrationTest.java
{ "start": 2035, "end": 15155 }
class ____ { static void checkKRaftVersions(Admin admin, short finalized) throws Exception { FeatureMetadata featureMetadata = admin.describeFeatures().featureMetadata().get(); if (finalized > 0) { assertTrue(featureMetadata.finalizedFeatures().containsKey(KRaftVersion.FEATURE_NAME), "finalizedFeatures does not contain " + KRaftVersion.FEATURE_NAME + ", finalizedFeatures: " + featureMetadata.finalizedFeatures()); assertEquals(finalized, featureMetadata.finalizedFeatures(). get(KRaftVersion.FEATURE_NAME).minVersionLevel()); assertEquals(finalized, featureMetadata.finalizedFeatures(). get(KRaftVersion.FEATURE_NAME).maxVersionLevel()); } else { assertFalse(featureMetadata.finalizedFeatures().containsKey(KRaftVersion.FEATURE_NAME)); } assertEquals((short) 0, featureMetadata.supportedFeatures(). get(KRaftVersion.FEATURE_NAME).minVersion()); assertEquals((short) 1, featureMetadata.supportedFeatures(). get(KRaftVersion.FEATURE_NAME).maxVersion()); } @Test public void testCreateAndDestroyNonReconfigurableCluster() throws Exception { try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder(). setNumBrokerNodes(1). setNumControllerNodes(1). build() ).build()) { cluster.format(); cluster.startup(); try (var admin = Admin.create(cluster.clientProperties())) { TestUtils.retryOnExceptionWithTimeout(30_000, () -> { checkKRaftVersions(admin, KRaftVersion.KRAFT_VERSION_0.featureLevel()); }); } } } @Test public void testCreateAndDestroyReconfigurableCluster() throws Exception { try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder(). setNumBrokerNodes(1). setNumControllerNodes(1). build() ).setStandalone(true).build()) { cluster.format(); cluster.startup(); try (var admin = Admin.create(cluster.clientProperties())) { TestUtils.retryOnExceptionWithTimeout(30_000, () -> { checkKRaftVersions(admin, KRaftVersion.KRAFT_VERSION_1.featureLevel()); }); } } } static Map<Integer, Uuid> findVoterDirs(Admin admin) throws Exception { QuorumInfo quorumInfo = admin.describeMetadataQuorum().quorumInfo().get(); Map<Integer, Uuid> result = new TreeMap<>(); quorumInfo.voters().forEach(v -> { result.put(v.replicaId(), v.replicaDirectoryId()); }); return result; } @Test public void testRemoveController() throws Exception { final var nodes = new TestKitNodes.Builder(). setNumBrokerNodes(1). setNumControllerNodes(3). build(); final Map<Integer, Uuid> initialVoters = new HashMap<>(); for (final var controllerNode : nodes.controllerNodes().values()) { initialVoters.put( controllerNode.id(), controllerNode.metadataDirectoryId() ); } try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder(nodes). setInitialVoterSet(initialVoters). build() ) { cluster.format(); cluster.startup(); try (var admin = Admin.create(cluster.clientProperties())) { TestUtils.retryOnExceptionWithTimeout(30_000, 10, () -> { Map<Integer, Uuid> voters = findVoterDirs(admin); assertEquals(Set.of(3000, 3001, 3002), voters.keySet()); for (int replicaId : new int[] {3000, 3001, 3002}) { assertNotEquals(Uuid.ZERO_UUID, voters.get(replicaId)); } }); admin.removeRaftVoter(3000, cluster.nodes(). controllerNodes().get(3000).metadataDirectoryId()).all().get(); } } } @Test public void testRemoveAndAddSameController() throws Exception { final var nodes = new TestKitNodes.Builder(). setNumBrokerNodes(1). setNumControllerNodes(4). build(); final Map<Integer, Uuid> initialVoters = new HashMap<>(); for (final var controllerNode : nodes.controllerNodes().values()) { initialVoters.put( controllerNode.id(), controllerNode.metadataDirectoryId() ); } try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder(nodes). setInitialVoterSet(initialVoters). build() ) { cluster.format(); cluster.startup(); try (var admin = Admin.create(cluster.clientProperties())) { TestUtils.retryOnExceptionWithTimeout(30_000, 10, () -> { Map<Integer, Uuid> voters = findVoterDirs(admin); assertEquals(Set.of(3000, 3001, 3002, 3003), voters.keySet()); for (int replicaId : new int[] {3000, 3001, 3002, 3003}) { assertNotEquals(Uuid.ZERO_UUID, voters.get(replicaId)); } }); Uuid dirId = cluster.nodes().controllerNodes().get(3000).metadataDirectoryId(); admin.removeRaftVoter(3000, dirId).all().get(); TestUtils.retryOnExceptionWithTimeout(30_000, 10, () -> { Map<Integer, Uuid> voters = findVoterDirs(admin); assertEquals(Set.of(3001, 3002, 3003), voters.keySet()); for (int replicaId : new int[] {3001, 3002, 3003}) { assertNotEquals(Uuid.ZERO_UUID, voters.get(replicaId)); } }); admin.addRaftVoter( 3000, dirId, Set.of(new RaftVoterEndpoint("CONTROLLER", "example.com", 8080)) ).all().get(); } } } @Test public void testControllersAutoJoinStandaloneVoter() throws Exception { final var nodes = new TestKitNodes.Builder(). setNumBrokerNodes(1). setNumControllerNodes(3). build(); try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder(nodes). setConfigProp(QuorumConfig.QUORUM_AUTO_JOIN_ENABLE_CONFIG, true). setStandalone(true). build() ) { cluster.format(); cluster.startup(); try (var admin = Admin.create(cluster.clientProperties())) { TestUtils.retryOnExceptionWithTimeout(30_000, 10, () -> { Map<Integer, Uuid> voters = findVoterDirs(admin); assertEquals(Set.of(3000, 3001, 3002), voters.keySet()); for (int replicaId : new int[] {3000, 3001, 3002}) { assertEquals(nodes.controllerNodes().get(replicaId).metadataDirectoryId(), voters.get(replicaId)); } }); } } } @Test public void testNewVoterAutoRemovesAndAdds() throws Exception { final var nodes = new TestKitNodes.Builder(). setNumBrokerNodes(1). setNumControllerNodes(3). build(); // Configure the initial voters with one voter having a different directory ID. // This simulates the case where the controller failed and is brought back up with a different directory ID. final Map<Integer, Uuid> initialVoters = new HashMap<>(); final var oldDirectoryId = Uuid.randomUuid(); for (final var controllerNode : nodes.controllerNodes().values()) { initialVoters.put( controllerNode.id(), controllerNode.id() == TestKitDefaults.CONTROLLER_ID_OFFSET ? oldDirectoryId : controllerNode.metadataDirectoryId() ); } try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder(nodes). setConfigProp(QuorumConfig.QUORUM_AUTO_JOIN_ENABLE_CONFIG, true). setInitialVoterSet(initialVoters). build() ) { cluster.format(); cluster.startup(); try (var admin = Admin.create(cluster.clientProperties())) { TestUtils.retryOnExceptionWithTimeout(30_000, 10, () -> { Map<Integer, Uuid> voters = findVoterDirs(admin); assertEquals(Set.of(3000, 3001, 3002), voters.keySet()); for (int replicaId : new int[] {3000, 3001, 3002}) { assertEquals(nodes.controllerNodes().get(replicaId).metadataDirectoryId(), voters.get(replicaId)); } }); } } } @Test public void testRemoveAndAddVoterWithValidClusterId() throws Exception { final var nodes = new TestKitNodes.Builder() .setClusterId("test-cluster") .setNumBrokerNodes(1) .setNumControllerNodes(3) .build(); final Map<Integer, Uuid> initialVoters = new HashMap<>(); for (final var controllerNode : nodes.controllerNodes().values()) { initialVoters.put( controllerNode.id(), controllerNode.metadataDirectoryId() ); } try (var cluster = new KafkaClusterTestKit.Builder(nodes).setInitialVoterSet(initialVoters).build()) { cluster.format(); cluster.startup(); try (var admin = Admin.create(cluster.clientProperties())) { TestUtils.retryOnExceptionWithTimeout(30_000, 10, () -> { Map<Integer, Uuid> voters = findVoterDirs(admin); assertEquals(Set.of(3000, 3001, 3002), voters.keySet()); for (int replicaId : new int[] {3000, 3001, 3002}) { assertNotEquals(Uuid.ZERO_UUID, voters.get(replicaId)); } }); Uuid dirId = cluster.nodes().controllerNodes().get(3000).metadataDirectoryId(); admin.removeRaftVoter( 3000, dirId, new RemoveRaftVoterOptions().setClusterId(Optional.of("test-cluster")) ).all().get(); TestUtils.retryOnExceptionWithTimeout(30_000, 10, () -> { Map<Integer, Uuid> voters = findVoterDirs(admin); assertEquals(Set.of(3001, 3002), voters.keySet()); for (int replicaId : new int[] {3001, 3002}) { assertNotEquals(Uuid.ZERO_UUID, voters.get(replicaId)); } }); admin.addRaftVoter( 3000, dirId, Set.of(new RaftVoterEndpoint("CONTROLLER", "example.com", 8080)), new AddRaftVoterOptions().setClusterId(Optional.of("test-cluster")) ).all().get(); } } } @Test public void testRemoveAndAddVoterWithInconsistentClusterId() throws Exception { final var nodes = new TestKitNodes.Builder() .setClusterId("test-cluster") .setNumBrokerNodes(1) .setNumControllerNodes(3) .build(); final Map<Integer, Uuid> initialVoters = new HashMap<>(); for (final var controllerNode : nodes.controllerNodes().values()) { initialVoters.put( controllerNode.id(), controllerNode.metadataDirectoryId() ); } try (var cluster = new KafkaClusterTestKit.Builder(nodes).setInitialVoterSet(initialVoters).build()) { cluster.format(); cluster.startup(); try (var admin = Admin.create(cluster.clientProperties())) { Uuid dirId = cluster.nodes().controllerNodes().get(3000).metadataDirectoryId(); var removeFuture = admin.removeRaftVoter( 3000, dirId, new RemoveRaftVoterOptions().setClusterId(Optional.of("inconsistent")) ).all(); TestUtils.assertFutureThrows(InconsistentClusterIdException.class, removeFuture); var addFuture = admin.addRaftVoter( 3000, dirId, Set.of(new RaftVoterEndpoint("CONTROLLER", "example.com", 8080)), new AddRaftVoterOptions().setClusterId(Optional.of("inconsistent")) ).all(); TestUtils.assertFutureThrows(InconsistentClusterIdException.class, addFuture); } } } }
ReconfigurableQuorumIntegrationTest
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/capacity/PreemptableResourceCalculator.java
{ "start": 1539, "end": 11164 }
class ____ extends AbstractPreemptableResourceCalculator { private static final Logger LOG = LoggerFactory.getLogger(PreemptableResourceCalculator.class); /** * PreemptableResourceCalculator constructor. * * @param preemptionContext context. * @param isReservedPreemptionCandidatesSelector this will be set by * different implementation of candidate selectors, please refer to * TempQueuePerPartition#offer for details. * @param allowQueuesBalanceAfterAllQueuesSatisfied * Should resources be preempted from an over-served queue when the * requesting queues are all at or over their guarantees? * An example is, there're 10 queues under root, guaranteed resource * of them are all 10%. * Assume there're two queues are using resources, queueA uses 10% * queueB uses 90%. For all queues are guaranteed, but it's not fair * for queueA. * We wanna make this behavior can be configured. By default it is * not allowed. */ public PreemptableResourceCalculator( CapacitySchedulerPreemptionContext preemptionContext, boolean isReservedPreemptionCandidatesSelector, boolean allowQueuesBalanceAfterAllQueuesSatisfied) { super(preemptionContext, isReservedPreemptionCandidatesSelector, allowQueuesBalanceAfterAllQueuesSatisfied); } /** * This method computes (for a single level in the tree, passed as a {@code * List<TempQueue>}) the ideal assignment of resources. This is done * recursively to allocate capacity fairly across all queues with pending * demands. It terminates when no resources are left to assign, or when all * demand is satisfied. * * @param rc resource calculator * @param queues a list of cloned queues to be assigned capacity to (this is * an out param) * @param totalPreemptionAllowed total amount of preemption we allow * @param tot_guarant the amount of capacity assigned to this pool of queues */ protected void computeIdealResourceDistribution(ResourceCalculator rc, List<TempQueuePerPartition> queues, Resource totalPreemptionAllowed, Resource tot_guarant) { // qAlloc tracks currently active queues (will decrease progressively as // demand is met) List<TempQueuePerPartition> qAlloc = new ArrayList<>(queues); // unassigned tracks how much resources are still to assign, initialized // with the total capacity for this set of queues Resource unassigned = Resources.clone(tot_guarant); // group queues based on whether they have non-zero guaranteed capacity Set<TempQueuePerPartition> nonZeroGuarQueues = new HashSet<>(); Set<TempQueuePerPartition> zeroGuarQueues = new HashSet<>(); for (TempQueuePerPartition q : qAlloc) { if (Resources.greaterThan(rc, tot_guarant, q.getGuaranteed(), Resources.none())) { nonZeroGuarQueues.add(q); } else { zeroGuarQueues.add(q); } } // first compute the allocation as a fixpoint based on guaranteed capacity computeFixpointAllocation(tot_guarant, new HashSet<>(nonZeroGuarQueues), unassigned, false); // if any capacity is left unassigned, distributed among zero-guarantee // queues uniformly (i.e., not based on guaranteed capacity, as this is zero) if (!zeroGuarQueues.isEmpty() && Resources.greaterThan(rc, tot_guarant, unassigned, Resources.none())) { computeFixpointAllocation(tot_guarant, zeroGuarQueues, unassigned, true); } // based on ideal assignment computed above and current assignment we derive // how much preemption is required overall Resource totPreemptionNeeded = Resource.newInstance(0, 0); for (TempQueuePerPartition t:queues) { if (Resources.greaterThan(rc, tot_guarant, t.getUsed(), t.idealAssigned)) { Resources.addTo(totPreemptionNeeded, Resources .subtract(t.getUsed(), t.idealAssigned)); } } /** * if we need to preempt more than is allowed, compute a factor (0<f<1) * that is used to scale down how much we ask back from each queue */ float scalingFactor = 1.0F; if (Resources.greaterThan(rc, tot_guarant, totPreemptionNeeded, totalPreemptionAllowed)) { scalingFactor = Resources.divide(rc, tot_guarant, totalPreemptionAllowed, totPreemptionNeeded); } // assign to each queue the amount of actual preemption based on local // information of ideal preemption and scaling factor for (TempQueuePerPartition t : queues) { t.assignPreemption(scalingFactor, rc, tot_guarant); } } /** * This method recursively computes the ideal assignment of resources to each * level of the hierarchy. This ensures that leafs that are over-capacity but * with parents within capacity will not be preemptionCandidates. Preemptions * are allowed within each subtree according to local over/under capacity. * * @param root the root of the cloned queue hierachy * @param totalPreemptionAllowed maximum amount of preemption allowed */ protected void recursivelyComputeIdealAssignment( TempQueuePerPartition root, Resource totalPreemptionAllowed) { if (root.getChildren() != null && root.getChildren().size() > 0) { // compute ideal distribution at this level computeIdealResourceDistribution(rc, root.getChildren(), totalPreemptionAllowed, root.idealAssigned); // compute recursively for lower levels and build list of leafs for (TempQueuePerPartition t : root.getChildren()) { recursivelyComputeIdealAssignment(t, totalPreemptionAllowed); } } } private void calculateResToObtainByPartitionForLeafQueues( Set<String> leafQueueNames, Resource clusterResource) { // Loop all leaf queues for (String queueName : leafQueueNames) { // check if preemption disabled for the queue if (context.getQueueByPartition(queueName, RMNodeLabelsManager.NO_LABEL).preemptionDisabled) { LOG.debug("skipping from queue={} because it's a non-preemptable" + " queue", queueName); continue; } // compute resToObtainByPartition considered inter-queue preemption for (TempQueuePerPartition qT : context.getQueuePartitions(queueName)) { // we act only if we are violating balance by more than // maxIgnoredOverCapacity if (Resources.greaterThan(rc, clusterResource, qT.getUsed(), Resources .multiply(qT.getGuaranteed(), 1.0 + context.getMaxIgnoreOverCapacity()))) { /* * We introduce a dampening factor naturalTerminationFactor that * accounts for natural termination of containers. * * This is added to control pace of preemption, let's say: * If preemption policy calculated a queue *should be* preempted 20 GB * And the nature_termination_factor set to 0.1. As a result, preemption * policy will select 20 GB * 0.1 = 2GB containers to be preempted. * * However, it doesn't work for YARN-4390: * For example, if a queue needs to be preempted 20GB for *one single* * large container, preempt 10% of such resource isn't useful. * So to make it simple, only apply nature_termination_factor when * selector is not reservedPreemptionCandidatesSelector. */ Resource resToObtain = qT.toBePreempted; if (!isReservedPreemptionCandidatesSelector) { if (Resources.greaterThan(rc, clusterResource, resToObtain, Resource.newInstance(0, 0))) { resToObtain = Resources.multiplyAndNormalizeUp(rc, qT.toBePreempted, context.getNaturalTerminationFactor(), Resource.newInstance(1, 1)); } } // Only add resToObtain when it >= 0 if (Resources.greaterThan(rc, clusterResource, resToObtain, Resources.none())) { LOG.debug("Queue={} partition={} resource-to-obtain={}", queueName, qT.partition, resToObtain); } qT.setActuallyToBePreempted(Resources.clone(resToObtain)); } else { qT.setActuallyToBePreempted(Resources.none()); } LOG.debug("{}", qT); } } } private void updatePreemptableExtras(TempQueuePerPartition cur) { if (cur.children == null || cur.children.isEmpty()) { cur.updatePreemptableExtras(rc); } else { for (TempQueuePerPartition child : cur.children) { updatePreemptableExtras(child); } cur.updatePreemptableExtras(rc); } } public void computeIdealAllocation(Resource clusterResource, Resource totalPreemptionAllowed) { for (String partition : context.getAllPartitions()) { TempQueuePerPartition tRoot = context.getQueueByPartition( CapacitySchedulerConfiguration.ROOT, partition); updatePreemptableExtras(tRoot); // compute the ideal distribution of resources among queues // updates cloned queues state accordingly tRoot.initializeRootIdealWithGuarangeed(); recursivelyComputeIdealAssignment(tRoot, totalPreemptionAllowed); } // based on ideal allocation select containers to be preempted from each // calculate resource-to-obtain by partition for each leaf queues calculateResToObtainByPartitionForLeafQueues(context.getLeafQueueNames(), clusterResource); } }
PreemptableResourceCalculator
java
apache__flink
flink-tests/src/test/java/org/apache/flink/test/accumulators/AccumulatorErrorITCase.java
{ "start": 5383, "end": 5850 }
class ____ extends RichMapFunction<Long, Long> { private static final long serialVersionUID = 42; @Override public void open(OpenContext openContext) throws Exception { getRuntimeContext().addAccumulator(INCOMPATIBLE_ACCUMULATORS_NAME, new LongCounter()); } @Override public Long map(Long value) throws Exception { return -1L; } } private static
IncompatibleAccumulatorTypesMapper
java
apache__flink
flink-clients/src/main/java/org/apache/flink/client/cli/CliFrontend.java
{ "start": 60326, "end": 60451 }
class ____ the classpath that implements the CustomCommandLine interface. * * @param className The fully-qualified
from
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java
{ "start": 5230, "end": 10485 }
class ____, because we should never be able to reach it. throw new AggregationExecutionException("Dense ords don't know how to collect from many buckets"); } return new DenseGlobalOrds<>(this.resultStrategy, excludeDeletedDocs); }); } } String descriptCollectionStrategy() { return collectionStrategy.describe(); } @Override public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, LeafBucketCollector sub) throws IOException { SortedSetDocValues globalOrds = valuesSource.globalOrdinalsValues(aggCtx.getLeafReaderContext()); collectionStrategy.globalOrdsReady(globalOrds); SortedDocValues singleValues = DocValues.unwrapSingleton(globalOrds); if (singleValues != null) { segmentsWithSingleValuedOrds++; if (acceptedGlobalOrdinals == ALWAYS_TRUE) { /* * Optimize when there isn't a filter because that is very * common and marginally faster. */ return resultStrategy.wrapCollector(new LeafBucketCollectorBase(sub, globalOrds) { @Override public void collect(int doc, long owningBucketOrd) throws IOException { if (false == singleValues.advanceExact(doc)) { return; } int globalOrd = singleValues.ordValue(); collectionStrategy.collectGlobalOrd(owningBucketOrd, doc, globalOrd, sub); } }); } return resultStrategy.wrapCollector(new LeafBucketCollectorBase(sub, globalOrds) { @Override public void collect(int doc, long owningBucketOrd) throws IOException { if (false == singleValues.advanceExact(doc)) { return; } int globalOrd = singleValues.ordValue(); if (false == acceptedGlobalOrdinals.test(globalOrd)) { return; } collectionStrategy.collectGlobalOrd(owningBucketOrd, doc, globalOrd, sub); } }); } segmentsWithMultiValuedOrds++; if (acceptedGlobalOrdinals == ALWAYS_TRUE) { /* * Optimize when there isn't a filter because that is very * common and marginally faster. */ return resultStrategy.wrapCollector(new LeafBucketCollectorBase(sub, globalOrds) { @Override public void collect(int doc, long owningBucketOrd) throws IOException { if (false == globalOrds.advanceExact(doc)) { return; } for (int i = 0; i < globalOrds.docValueCount(); i++) { long globalOrd = globalOrds.nextOrd(); collectionStrategy.collectGlobalOrd(owningBucketOrd, doc, globalOrd, sub); } } }); } return resultStrategy.wrapCollector(new LeafBucketCollectorBase(sub, globalOrds) { @Override public void collect(int doc, long owningBucketOrd) throws IOException { if (false == globalOrds.advanceExact(doc)) { return; } for (int i = 0; i < globalOrds.docValueCount(); i++) { long globalOrd = globalOrds.nextOrd(); if (false == acceptedGlobalOrdinals.test(globalOrd)) { continue; } collectionStrategy.collectGlobalOrd(owningBucketOrd, doc, globalOrd, sub); } } }); } @Override public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { if (valueCount == 0) { // no context in this reader return GlobalOrdinalsStringTermsAggregator.this.buildAggregations( Math.toIntExact(owningBucketOrds.size()), ordIdx -> resultStrategy.buildNoValuesResult(owningBucketOrds.get(ordIdx)) ); } return collectionStrategy.buildAggregations(owningBucketOrds); } @Override public InternalAggregation buildEmptyAggregation() { return resultStrategy.buildEmptyResult(); } @Override public void collectDebugInfo(BiConsumer<String, Object> add) { super.collectDebugInfo(add); add.accept("collection_strategy", collectionStrategy.describe()); add.accept("total_buckets", collectionStrategy.totalBuckets()); add.accept("result_strategy", resultStrategy.describe()); add.accept("segments_with_single_valued_ords", segmentsWithSingleValuedOrds); add.accept("segments_with_multi_valued_ords", segmentsWithMultiValuedOrds); add.accept("has_filter", acceptedGlobalOrdinals != ALWAYS_TRUE); } /** * This is used internally only, just for compare using global ordinal instead of term bytes in the PQ */ static
error
java
spring-projects__spring-framework
spring-expression/src/test/java/org/springframework/expression/spel/SpelDocumentationTests.java
{ "start": 9910, "end": 17383 }
class ____ { @Test void standardRelationalOperators() { boolean result = parser.parseExpression("2 == 2").getValue(Boolean.class); assertThat(result).isTrue(); // evaluates to false result = parser.parseExpression("2 < -5.0").getValue(Boolean.class); assertThat(result).isFalse(); // evaluates to true result = parser.parseExpression("'black' < 'block'").getValue(Boolean.class); assertThat(result).isTrue(); } @Test void otherRelationalOperators() { boolean result; // evaluates to true result = parser.parseExpression( "1 between {1, 5}").getValue(Boolean.class); assertThat(result).isTrue(); // evaluates to false result = parser.parseExpression( "1 between {10, 15}").getValue(Boolean.class); assertThat(result).isFalse(); // evaluates to true result = parser.parseExpression( "'elephant' between {'aardvark', 'zebra'}").getValue(Boolean.class); assertThat(result).isTrue(); // evaluates to false result = parser.parseExpression( "'elephant' between {'aardvark', 'cobra'}").getValue(Boolean.class); assertThat(result).isFalse(); // evaluates to true result = parser.parseExpression( "123 instanceof T(Integer)").getValue(Boolean.class); assertThat(result).isTrue(); // evaluates to false result = parser.parseExpression( "'xyz' instanceof T(Integer)").getValue(Boolean.class); assertThat(result).isFalse(); // evaluates to true result = parser.parseExpression( "'5.00' matches '^-?\\d+(\\.\\d{2})?$'").getValue(Boolean.class); assertThat(result).isTrue(); // evaluates to false result = parser.parseExpression( "'5.0067' matches '^-?\\d+(\\.\\d{2})?$'").getValue(Boolean.class); assertThat(result).isFalse(); } @Test void logicalOperators() { StandardEvaluationContext societyContext = new StandardEvaluationContext(); societyContext.setRootObject(new IEEE()); // -- AND -- // evaluates to false boolean falseValue = parser.parseExpression("true and false").getValue(Boolean.class); assertThat(falseValue).isFalse(); // evaluates to true String expression = "isMember('Nikola Tesla') and isMember('Mihajlo Pupin')"; boolean trueValue = parser.parseExpression(expression).getValue(societyContext, Boolean.class); // -- OR -- // evaluates to true trueValue = parser.parseExpression("true or false").getValue(Boolean.class); assertThat(trueValue).isTrue(); // evaluates to true expression = "isMember('Nikola Tesla') or isMember('Albert Einstien')"; trueValue = parser.parseExpression(expression).getValue(societyContext, Boolean.class); assertThat(trueValue).isTrue(); // -- NOT -- // evaluates to false falseValue = parser.parseExpression("!true").getValue(Boolean.class); assertThat(falseValue).isFalse(); // -- AND and NOT -- expression = "isMember('Nikola Tesla') and !isMember('Mihajlo Pupin')"; falseValue = parser.parseExpression(expression).getValue(societyContext, Boolean.class); assertThat(falseValue).isFalse(); } @Test void stringOperators() { // -- Concatenation -- // evaluates to "hello world" String helloWorld = parser.parseExpression("'hello' + ' ' + 'world'").getValue(String.class); assertThat(helloWorld).isEqualTo("hello world"); // -- Subtraction -- // evaluates to 'a' char ch = parser.parseExpression("'d' - 3").getValue(char.class); assertThat(ch).isEqualTo('a'); // -- Repeat -- // evaluates to "abcabc" String repeated = parser.parseExpression("'abc' * 2").getValue(String.class); assertThat(repeated).isEqualTo("abcabc"); } @Test void mathematicalOperators() { Inventor inventor = new Inventor(); EvaluationContext context = SimpleEvaluationContext.forReadWriteDataBinding().build(); // -- Addition -- int two = parser.parseExpression("1 + 1").getValue(int.class); // 2 assertThat(two).isEqualTo(2); // -- Subtraction -- int four = parser.parseExpression("1 - -3").getValue(int.class); // 4 assertThat(four).isEqualTo(4); double d = parser.parseExpression("1000.00 - 1e4").getValue(double.class); // -9000 assertThat(d).isCloseTo(-9000.0d, within((double) 0)); // -- Increment -- // The counter property in Inventor has an initial value of 0. // evaluates to 2; counter is now 1 two = parser.parseExpression("counter++ + 2").getValue(context, inventor, int.class); assertThat(two).isEqualTo(2); // evaluates to 5; counter is now 2 int five = parser.parseExpression("3 + ++counter").getValue(context, inventor, int.class); assertThat(five).isEqualTo(5); // -- Decrement -- // The counter property in Inventor has a value of 2. // evaluates to 6; counter is now 1 int six = parser.parseExpression("counter-- + 4").getValue(context, inventor, int.class); assertThat(six).isEqualTo(6); // evaluates to 5; counter is now 0 five = parser.parseExpression("5 + --counter").getValue(context, inventor, int.class); assertThat(five).isEqualTo(5); // -- Multiplication -- six = parser.parseExpression("-2 * -3").getValue(int.class); // 6 assertThat(six).isEqualTo(6); double twentyFour = parser.parseExpression("2.0 * 3e0 * 4").getValue(double.class); // 24.0 assertThat(twentyFour).isCloseTo(24.0d, within((double) 0)); // -- Division -- int minusTwo = parser.parseExpression("6 / -3").getValue(int.class); // -2 assertThat(minusTwo).isEqualTo(-2); double one = parser.parseExpression("8.0 / 4e0 / 2").getValue(double.class); // 1.0 assertThat(one).isCloseTo(1.0d, within((double) 0)); // -- Modulus -- int three = parser.parseExpression("7 % 4").getValue(int.class); // 3 assertThat(three).isEqualTo(3); int oneInt = parser.parseExpression("8 / 5 % 2").getValue(int.class); // 1 assertThat(oneInt).isEqualTo(1); // -- Exponential power -- int maxInt = parser.parseExpression("(2^31) - 1").getValue(int.class); // Integer.MAX_VALUE assertThat(maxInt).isEqualTo(Integer.MAX_VALUE); int minInt = parser.parseExpression("-2^31").getValue(int.class); // Integer.MIN_VALUE assertThat(minInt).isEqualTo(Integer.MIN_VALUE); // -- Operator precedence -- int minusTwentyOne = parser.parseExpression("1+2-3*8").getValue(int.class); // -21 assertThat(minusTwentyOne).isEqualTo(-21); } @Test void assignment() { Inventor inventor = new Inventor(); EvaluationContext context = SimpleEvaluationContext.forReadWriteDataBinding().build(); parser.parseExpression("foo").setValue(context, inventor, "Alexander Seovic2"); assertThat(parser.parseExpression("foo").getValue(context, inventor, String.class)).isEqualTo("Alexander Seovic2"); // alternatively String aleks = parser.parseExpression("foo = 'Alexandar Seovic'").getValue(context, inventor, String.class); assertThat(parser.parseExpression("foo").getValue(context, inventor, String.class)).isEqualTo("Alexandar Seovic"); assertThat(aleks).isEqualTo("Alexandar Seovic"); } @Test @SuppressWarnings("unchecked") void overloadingOperators() { StandardEvaluationContext context = new StandardEvaluationContext(); context.setOperatorOverloader(new ListConcatenation()); // evaluates to [1, 2, 3, 4, 5] List list = parser.parseExpression("{1, 2, 3} + {2 + 2, 5}").getValue(context, List.class); assertThat(list).containsExactly(1, 2, 3, 4, 5); } } @Nested
Operators
java
alibaba__druid
core/src/main/java/com/alibaba/druid/support/json/JSONParser.java
{ "start": 8913, "end": 9378 }
enum ____ { INT, // DOUBLE, // STRING, // BOOLEAN, // TRUE, // FALSE, // NULL, // EOF, // LBRACE("{"), // RBRACE("}"), // LBRACKET("["), // RBRACKET("]"), // COMMA(","), // COLON(":"); public final String name; Token() { this(null); } Token(String name) { this.name = name; } } }
Token
java
spring-projects__spring-security
core/src/test/java/org/springframework/security/core/token/KeyBasedPersistenceTokenServiceTests.java
{ "start": 1000, "end": 3315 }
class ____ { private KeyBasedPersistenceTokenService getService() { SecureRandomFactoryBean fb = new SecureRandomFactoryBean(); KeyBasedPersistenceTokenService service = new KeyBasedPersistenceTokenService(); service.setServerSecret("MY:SECRET$$$#"); service.setServerInteger(454545); try { SecureRandom rnd = fb.getObject(); service.setSecureRandom(rnd); service.afterPropertiesSet(); } catch (Exception ex) { throw new RuntimeException(ex); } return service; } @Test public void testOperationWithSimpleExtendedInformation() { KeyBasedPersistenceTokenService service = getService(); Token token = service.allocateToken("Hello world"); Token result = service.verifyToken(token.getKey()); assertThat(result).isEqualTo(token); } @Test public void testOperationWithComplexExtendedInformation() { KeyBasedPersistenceTokenService service = getService(); Token token = service.allocateToken("Hello:world:::"); Token result = service.verifyToken(token.getKey()); assertThat(result).isEqualTo(token); } @Test public void testOperationWithEmptyRandomNumber() { KeyBasedPersistenceTokenService service = getService(); service.setPseudoRandomNumberBytes(0); Token token = service.allocateToken("Hello:world:::"); Token result = service.verifyToken(token.getKey()); assertThat(result).isEqualTo(token); } @Test public void testOperationWithNoExtendedInformation() { KeyBasedPersistenceTokenService service = getService(); Token token = service.allocateToken(""); Token result = service.verifyToken(token.getKey()); assertThat(result).isEqualTo(token); } @Test public void testOperationWithMissingKey() { KeyBasedPersistenceTokenService service = getService(); assertThatIllegalArgumentException().isThrownBy(() -> { Token token = new DefaultToken("", new Date().getTime(), ""); service.verifyToken(token.getKey()); }); } @Test public void testOperationWithTamperedKey() { KeyBasedPersistenceTokenService service = getService(); Token goodToken = service.allocateToken(""); String fake = goodToken.getKey().toUpperCase(); Token token = new DefaultToken(fake, new Date().getTime(), ""); assertThatIllegalArgumentException().isThrownBy(() -> service.verifyToken(token.getKey())); } }
KeyBasedPersistenceTokenServiceTests
java
apache__dubbo
dubbo-common/src/main/java/org/apache/dubbo/rpc/model/ConsumerModel.java
{ "start": 1276, "end": 6802 }
class ____ extends ServiceModel { private final Set<String> apps = new TreeSet<>(); private final Map<String, AsyncMethodInfo> methodConfigs; private Map<Method, ConsumerMethodModel> methodModels = new HashMap<>(); /** * This constructor creates an instance of ConsumerModel and passed objects should not be null. * If service name, service instance, proxy object,methods should not be null. If these are null * then this constructor will throw {@link IllegalArgumentException} * * @param serviceKey Name of the service. * @param proxyObject Proxy object. */ public ConsumerModel( String serviceKey, Object proxyObject, ServiceDescriptor serviceDescriptor, Map<String, AsyncMethodInfo> methodConfigs, ClassLoader interfaceClassLoader) { super(proxyObject, serviceKey, serviceDescriptor, null, interfaceClassLoader); Assert.notEmptyString(serviceKey, "Service name can't be null or blank"); this.methodConfigs = methodConfigs == null ? new HashMap<>() : methodConfigs; } public ConsumerModel( String serviceKey, Object proxyObject, ServiceDescriptor serviceDescriptor, ServiceMetadata metadata, Map<String, AsyncMethodInfo> methodConfigs, ClassLoader interfaceClassLoader) { super(proxyObject, serviceKey, serviceDescriptor, null, metadata, interfaceClassLoader); Assert.notEmptyString(serviceKey, "Service name can't be null or blank"); this.methodConfigs = methodConfigs == null ? new HashMap<>() : methodConfigs; } public ConsumerModel( String serviceKey, Object proxyObject, ServiceDescriptor serviceDescriptor, ModuleModel moduleModel, ServiceMetadata metadata, Map<String, AsyncMethodInfo> methodConfigs, ClassLoader interfaceClassLoader) { super(proxyObject, serviceKey, serviceDescriptor, moduleModel, metadata, interfaceClassLoader); Assert.notEmptyString(serviceKey, "Service name can't be null or blank"); this.methodConfigs = methodConfigs == null ? new HashMap<>() : methodConfigs; } public AsyncMethodInfo getMethodConfig(String methodName) { return methodConfigs.get(methodName); } public Set<String> getApps() { return apps; } public AsyncMethodInfo getAsyncInfo(String methodName) { return methodConfigs.get(methodName); } public void initMethodModels() { Class<?>[] interfaceList; if (getProxyObject() == null) { Class<?> serviceInterfaceClass = getServiceInterfaceClass(); if (serviceInterfaceClass != null) { interfaceList = new Class[] {serviceInterfaceClass}; } else { interfaceList = new Class[0]; } } else { interfaceList = getProxyObject().getClass().getInterfaces(); } for (Class<?> interfaceClass : interfaceList) { for (Method method : interfaceClass.getMethods()) { methodModels.put(method, new ConsumerMethodModel(method)); } } } /** * Return method model for the given method on consumer side * * @param method method object * @return method model */ public ConsumerMethodModel getMethodModel(Method method) { return methodModels.get(method); } /** * Return method model for the given method on consumer side * * @param method method object * @return method model */ public ConsumerMethodModel getMethodModel(String method) { Optional<Map.Entry<Method, ConsumerMethodModel>> consumerMethodModelEntry = methodModels.entrySet().stream() .filter(entry -> entry.getKey().getName().equals(method)) .findFirst(); return consumerMethodModelEntry.map(Map.Entry::getValue).orElse(null); } /** * @param method methodName * @param argsType method arguments type * @return */ public ConsumerMethodModel getMethodModel(String method, String[] argsType) { Optional<ConsumerMethodModel> consumerMethodModel = methodModels.entrySet().stream() .filter(entry -> entry.getKey().getName().equals(method)) .map(Map.Entry::getValue) .filter(methodModel -> Arrays.equals(argsType, methodModel.getParameterTypes())) .findFirst(); return consumerMethodModel.orElse(null); } /** * Return all method models for the current service * * @return method model list */ public List<ConsumerMethodModel> getAllMethodModels() { return new ArrayList<>(methodModels.values()); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } ConsumerModel that = (ConsumerModel) o; return Objects.equals(apps, that.apps) && Objects.equals(methodConfigs, that.methodConfigs) && Objects.equals(methodModels, that.methodModels); } @Override public int hashCode() { return Objects.hash(super.hashCode(), apps, methodConfigs, methodModels); } }
ConsumerModel
java
apache__dubbo
dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/mapping/condition/MediaTypeExpression.java
{ "start": 1180, "end": 8915 }
class ____ implements Comparable<MediaTypeExpression> { public static final MediaTypeExpression ALL = new MediaTypeExpression(MediaType.WILDCARD, MediaType.WILDCARD); public static final List<MediaTypeExpression> ALL_LIST = Collections.singletonList(ALL); public static final Comparator<MediaTypeExpression> COMPARATOR = (m1, m2) -> { int comparison = compareQuality(m1, m2); if (comparison != 0) { return comparison; } comparison = compareType(m1.type, m2.type); if (comparison != Integer.MIN_VALUE) { return comparison; } comparison = compareType(m1.subType, m2.subType); return comparison == Integer.MIN_VALUE ? 0 : comparison; }; public static final Comparator<MediaTypeExpression> QUALITY_COMPARATOR = MediaTypeExpression::compareQuality; private final String type; private final String subType; private final boolean negated; private final float quality; private MediaTypeExpression(String type, String subType, float quality, boolean negated) { this.type = type; this.subType = subType; this.quality = quality; this.negated = negated; } public MediaTypeExpression(String type, String subType) { this.type = type; this.subType = subType; quality = 1.0F; negated = false; } public static List<MediaType> toMediaTypes(List<MediaTypeExpression> expressions) { int size = expressions.size(); List<MediaType> mediaTypes = new ArrayList<>(size); for (int i = 0; i < size; i++) { MediaTypeExpression expr = expressions.get(i); mediaTypes.add(new MediaType(expr.getType(), expr.getSubType())); } return mediaTypes; } public static MediaTypeExpression parse(String expr) { boolean negated; if (expr.indexOf('!') == 0) { negated = true; expr = expr.substring(1); } else { negated = false; } if (StringUtils.isEmpty(expr)) { return null; } int index = expr.indexOf(';'); String mimeType = (index == -1 ? expr : expr.substring(0, index)).trim(); if (MediaType.WILDCARD.equals(mimeType)) { mimeType = "*/*"; } int subIndex = mimeType.indexOf('/'); if (subIndex == -1 || subIndex == mimeType.length() - 1) { return null; } String type = mimeType.substring(0, subIndex); String subType = mimeType.substring(subIndex + 1); if (MediaType.WILDCARD.equals(type) && !MediaType.WILDCARD.equals(subType)) { return null; } return new MediaTypeExpression(type, subType, HttpUtils.parseQuality(expr, index), negated); } private static int compareType(String type1, String type2) { boolean type1IsWildcard = MediaType.WILDCARD.equals(type1); boolean type2IsWildcard = MediaType.WILDCARD.equals(type2); if (type1IsWildcard && !type2IsWildcard) { return 1; } if (type2IsWildcard && !type1IsWildcard) { return -1; } if (!type1.equals(type2)) { return 0; } return Integer.MIN_VALUE; } public String getType() { return type; } public String getSubType() { return subType; } public float getQuality() { return quality; } private static int compareQuality(MediaTypeExpression m1, MediaTypeExpression m2) { return Float.compare(m2.quality, m1.quality); } public boolean typesEquals(MediaTypeExpression other) { return type.equalsIgnoreCase(other.type) && subType.equalsIgnoreCase(other.subType); } public boolean match(MediaTypeExpression other) { return matchMediaType(other) != negated; } private boolean matchMediaType(MediaTypeExpression other) { if (other == null) { return false; } if (isWildcardType()) { return true; } if (type.equals(other.type)) { if (subType.equals(other.subType)) { return true; } if (isWildcardSubtype()) { int plusIdx = subType.lastIndexOf('+'); if (plusIdx == -1) { return true; } int otherPlusIdx = other.subType.indexOf('+'); if (otherPlusIdx != -1) { String subTypeNoSuffix = subType.substring(0, plusIdx); String subTypeSuffix = subType.substring(plusIdx + 1); String otherSubtypeSuffix = other.subType.substring(otherPlusIdx + 1); return subTypeSuffix.equals(otherSubtypeSuffix) && MediaType.WILDCARD.equals(subTypeNoSuffix); } } } return false; } public boolean compatibleWith(MediaTypeExpression other) { return compatibleWithMediaType(other) != negated; } private boolean compatibleWithMediaType(MediaTypeExpression other) { if (other == null) { return false; } if (isWildcardType() || other.isWildcardType()) { return true; } if (type.equals(other.type)) { if (subType.equalsIgnoreCase(other.subType)) { return true; } if (isWildcardSubtype() || other.isWildcardSubtype()) { if (subType.equals(MediaType.WILDCARD) || other.subType.equals(MediaType.WILDCARD)) { return true; } String thisSuffix = getSubtypeSuffix(); String otherSuffix = other.getSubtypeSuffix(); if (isWildcardSubtype() && thisSuffix != null) { return (thisSuffix.equals(other.subType) || thisSuffix.equals(otherSuffix)); } if (other.isWildcardSubtype() && otherSuffix != null) { return (subType.equals(otherSuffix) || otherSuffix.equals(thisSuffix)); } } } return false; } private boolean isWildcardType() { return MediaType.WILDCARD.equals(type); } private boolean isWildcardSubtype() { return MediaType.WILDCARD.equals(subType) || subType.startsWith("*+"); } private String getSubtypeSuffix() { int suffixIndex = subType.lastIndexOf('+'); if (suffixIndex != -1) { return subType.substring(suffixIndex + 1); } return null; } @Override public int compareTo(MediaTypeExpression other) { return COMPARATOR.compare(this, other); } @Override public int hashCode() { return Objects.hash(type, subType, negated, quality); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || obj.getClass() != MediaTypeExpression.class) { return false; } MediaTypeExpression other = (MediaTypeExpression) obj; return negated == other.negated && Float.compare(quality, other.quality) == 0 && Objects.equals(type, other.type) && Objects.equals(subType, other.subType); } @Override public String toString() { StringBuilder sb = new StringBuilder(); if (negated) { sb.append('!'); } sb.append(type).append('/').append(subType); if (quality != 1.0F) { sb.append(";q=").append(quality); } return sb.toString(); } }
MediaTypeExpression
java
spring-projects__spring-boot
smoke-test/spring-boot-smoke-test-kafka/src/dockerTest/java/smoketest/kafka/ssl/SampleKafkaSslApplicationTests.java
{ "start": 1846, "end": 2570 }
class ____ { @Container @ServiceConnection @JksTrustStore(location = "classpath:ssl/test-ca.p12", password = "password") @JksKeyStore(location = "classpath:ssl/test-client.p12", password = "password") public static ConfluentKafkaContainer kafka = TestImage.container(SecureKafkaContainer.class); @Autowired private Producer producer; @Autowired private Consumer consumer; @Test void testVanillaExchange() { this.producer.send(new SampleMessage(1, "A simple test message")); Awaitility.waitAtMost(Duration.ofSeconds(30)).until(this.consumer::getMessages, not(empty())); assertThat(this.consumer.getMessages()).extracting("message").containsOnly("A simple test message"); } }
SampleKafkaSslApplicationTests
java
apache__camel
components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/NettyHttpClientChunkedTest.java
{ "start": 1055, "end": 1825 }
class ____ extends BaseNettyTest { @Test public void testHttpSimple() throws Exception { getMockEndpoint("mock:input").expectedBodiesReceived("Hello World"); String out = template.requestBody("netty-http:http://localhost:{{port}}/foo", "Hello World", String.class); assertEquals("Bye World", out); MockEndpoint.assertIsSatisfied(context); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { from("netty-http:http://0.0.0.0:{{port}}/foo") .to("mock:input") .transform().constant("Bye World"); } }; } }
NettyHttpClientChunkedTest
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/CheckReturnValueTest.java
{ "start": 25438, "end": 26020 }
class ____ { void f() { try { new Foo(); org.junit.Assert.fail(); } catch (Exception expected) { } org.junit.Assert.assertThrows(IllegalArgumentException.class, () -> new Foo()); } } """) .doTest(); } @Test public void constructor_reference() { compilationHelper .addSourceLines( "Foo.java", """ @com.google.errorprone.annotations.CheckReturnValue public
Test
java
apache__flink
flink-core/src/test/java/org/apache/flink/api/common/accumulators/AverageAccumulatorTest.java
{ "start": 1073, "end": 3724 }
class ____ { @Test void testGet() { AverageAccumulator average = new AverageAccumulator(); assertThat(average.getLocalValue()).isCloseTo(0.0, within(0.0)); } @Test void testAdd() { AverageAccumulator average = new AverageAccumulator(); int i1; for (i1 = 0; i1 < 10; i1++) { average.add(i1); } assertThat(average.getLocalValue()).isCloseTo(4.5, within(0.0)); average.resetLocal(); Integer i2; for (i2 = 0; i2 < 10; i2++) { average.add(i2); } assertThat(average.getLocalValue()).isCloseTo(4.5, within(0.0)); average.resetLocal(); long i3; for (i3 = 0; i3 < 10; i3++) { average.add(i3); } assertThat(average.getLocalValue()).isCloseTo(4.5, within(0.0)); average.resetLocal(); Long i4; for (i4 = 0L; i4 < 10; i4++) { average.add(i4); } assertThat(average.getLocalValue()).isCloseTo(4.5, within(0.0)); average.resetLocal(); double i5; for (i5 = 0; i5 < 10; i5++) { average.add(i5); } assertThat(average.getLocalValue()).isCloseTo(4.5, within(0.0)); average.resetLocal(); Double i6; for (i6 = 0.0; i6 < 10; i6++) { average.add(i6); } assertThat(average.getLocalValue()).isCloseTo(4.5, within(0.0)); average.resetLocal(); assertThat(average.getLocalValue()).isCloseTo(0.0, within(0.0)); } @Test void testMergeSuccess() { AverageAccumulator avg1 = new AverageAccumulator(); for (int i = 0; i < 5; i++) { avg1.add(i); } AverageAccumulator avg2 = new AverageAccumulator(); for (int i = 5; i < 10; i++) { avg2.add(i); } avg1.merge(avg2); assertThat(avg1.getLocalValue()).isCloseTo(4.5, within(0.0)); } @Test void testMergeFailed() { AverageAccumulator average = new AverageAccumulator(); Accumulator<Double, Double> averageNew = null; average.add(1); assertThatThrownBy(() -> average.merge(averageNew)) .isInstanceOf(IllegalArgumentException.class) .hasMessageContaining("The merged accumulator must be AverageAccumulator."); } @Test void testClone() { AverageAccumulator average = new AverageAccumulator(); average.add(1); AverageAccumulator averageNew = average.clone(); assertThat(averageNew.getLocalValue()).isCloseTo(1, within(0.0)); } }
AverageAccumulatorTest
java
apache__kafka
metadata/src/main/java/org/apache/kafka/controller/PartitionChangeBuilder.java
{ "start": 2726, "end": 7724 }
enum ____ { /** * Perform leader election to keep the partition online. Elect the preferred replica if it is in the ISR. */ PREFERRED, /** * Perform leader election from the ISR to keep the partition online. */ ONLINE, /** * Prefer replicas in the ISR but keep the partition online even if it requires picking a leader that is not in the ISR. */ UNCLEAN } private final PartitionRegistration partition; private final Uuid topicId; private final int partitionId; private final IntPredicate isAcceptableLeader; private final MetadataVersion metadataVersion; private final int minISR; private final Map<Integer, Uuid> targetDirectories; private List<Integer> targetIsr; private List<Integer> targetReplicas; private List<Integer> targetRemoving; private List<Integer> targetAdding; private List<Integer> targetElr; private List<Integer> targetLastKnownElr; private List<Integer> uncleanShutdownReplicas; private Election election = Election.ONLINE; private LeaderRecoveryState targetLeaderRecoveryState; private boolean eligibleLeaderReplicasEnabled; private DefaultDirProvider defaultDirProvider; // Whether allow electing last known leader in a Balanced recovery. Note, the last known leader will be stored in the // lastKnownElr field if enabled. private boolean useLastKnownLeaderInBalancedRecovery = true; public PartitionChangeBuilder( PartitionRegistration partition, Uuid topicId, int partitionId, IntPredicate isAcceptableLeader, MetadataVersion metadataVersion, int minISR ) { this.partition = partition; this.topicId = topicId; this.partitionId = partitionId; this.isAcceptableLeader = isAcceptableLeader; this.metadataVersion = metadataVersion; this.eligibleLeaderReplicasEnabled = false; this.minISR = minISR; this.targetIsr = Replicas.toList(partition.isr); this.targetReplicas = Replicas.toList(partition.replicas); this.targetRemoving = Replicas.toList(partition.removingReplicas); this.targetAdding = Replicas.toList(partition.addingReplicas); this.targetElr = Replicas.toList(partition.elr); this.targetLastKnownElr = Replicas.toList(partition.lastKnownElr); this.targetLeaderRecoveryState = partition.leaderRecoveryState; this.targetDirectories = DirectoryId.createAssignmentMap(partition.replicas, partition.directories); this.defaultDirProvider = uuid -> { throw new IllegalStateException("DefaultDirProvider is not set"); }; } public PartitionChangeBuilder setTargetIsr(List<Integer> targetIsr) { this.targetIsr = targetIsr; return this; } public PartitionChangeBuilder setTargetIsrWithBrokerStates(List<BrokerState> targetIsrWithEpoch) { return setTargetIsr( targetIsrWithEpoch .stream() .map(BrokerState::brokerId) .collect(Collectors.toList()) ); } public PartitionChangeBuilder setTargetReplicas(List<Integer> targetReplicas) { this.targetReplicas = targetReplicas; return this; } public PartitionChangeBuilder setUncleanShutdownReplicas(List<Integer> uncleanShutdownReplicas) { this.uncleanShutdownReplicas = uncleanShutdownReplicas; return this; } public PartitionChangeBuilder setElection(Election election) { this.election = election; return this; } public PartitionChangeBuilder setTargetRemoving(List<Integer> targetRemoving) { this.targetRemoving = targetRemoving; return this; } public PartitionChangeBuilder setTargetAdding(List<Integer> targetAdding) { this.targetAdding = targetAdding; return this; } public PartitionChangeBuilder setTargetLeaderRecoveryState(LeaderRecoveryState targetLeaderRecoveryState) { this.targetLeaderRecoveryState = targetLeaderRecoveryState; return this; } public PartitionChangeBuilder setEligibleLeaderReplicasEnabled(boolean eligibleLeaderReplicasEnabled) { this.eligibleLeaderReplicasEnabled = eligibleLeaderReplicasEnabled; return this; } public PartitionChangeBuilder setUseLastKnownLeaderInBalancedRecovery(boolean useLastKnownLeaderInBalancedRecovery) { this.useLastKnownLeaderInBalancedRecovery = useLastKnownLeaderInBalancedRecovery; return this; } public PartitionChangeBuilder setDirectory(int brokerId, Uuid dir) { this.targetDirectories.put(brokerId, dir); return this; } public PartitionChangeBuilder setDefaultDirProvider(DefaultDirProvider defaultDirProvider) { this.defaultDirProvider = defaultDirProvider; return this; } // VisibleForTesting static
Election