language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStore.java | {
"start": 4927,
"end": 9633
} | class ____ only used in template configuration. It wraps the fields of {@link DataStreamFailureStore} with {@link ResettableValue}
* to allow a user to signal when they want to reset any previously encountered values during template composition.
*/
public record Template(ResettableValue<Boolean> enabled, ResettableValue<DataStreamLifecycle.Template> lifecycle)
implements
Writeable,
ToXContentObject {
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<Template, Void> PARSER = new ConstructingObjectParser<>(
"failure_store_template",
false,
(args, unused) -> new Template(
args[0] == null ? ResettableValue.undefined() : (ResettableValue<Boolean>) args[0],
args[1] == null ? ResettableValue.undefined() : (ResettableValue<DataStreamLifecycle.Template>) args[1]
)
);
static {
PARSER.declareField(
ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL
? ResettableValue.reset()
: ResettableValue.create(p.booleanValue()),
ENABLED_FIELD,
ObjectParser.ValueType.BOOLEAN_OR_NULL
);
PARSER.declareField(
ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL
? ResettableValue.reset()
: ResettableValue.create(DataStreamLifecycle.Template.failuresLifecycleTemplatefromXContent(p)),
LIFECYCLE_FIELD,
ObjectParser.ValueType.OBJECT_OR_NULL
);
}
public Template(@Nullable Boolean enabled, @Nullable DataStreamLifecycle.Template lifecycle) {
this(ResettableValue.create(enabled), ResettableValue.create(lifecycle));
}
public Template {
if (enabled.isDefined() == false && lifecycle.isDefined() == false) {
throw new IllegalArgumentException(EMPTY_FAILURE_STORE_ERROR_MESSAGE);
}
assert lifecycle.get() == null || lifecycle.mapAndGet(l -> l.toDataStreamLifecycle().targetsFailureStore())
: "Invalid lifecycle type in failure store template";
}
@Override
public void writeTo(StreamOutput out) throws IOException {
ResettableValue.write(out, enabled, StreamOutput::writeBoolean);
if (out.getTransportVersion().supports(INTRODUCE_FAILURES_LIFECYCLE)) {
ResettableValue.write(out, lifecycle, (o, v) -> v.writeTo(o));
}
}
public static Template read(StreamInput in) throws IOException {
ResettableValue<Boolean> enabled = ResettableValue.read(in, StreamInput::readBoolean);
ResettableValue<DataStreamLifecycle.Template> lifecycle = ResettableValue.undefined();
if (in.getTransportVersion().supports(INTRODUCE_FAILURES_LIFECYCLE)) {
lifecycle = ResettableValue.read(in, DataStreamLifecycle.Template::read);
}
return new Template(enabled, lifecycle);
}
/**
* Converts the template to XContent, depending on the XContent.Params set by {@link ResettableValue#hideResetValues(Params)}
* it may or may not display any explicit nulls when the value is to be reset.
*/
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
enabled.toXContent(builder, params, ENABLED_FIELD.getPreferredName());
lifecycle.toXContent(builder, params, LIFECYCLE_FIELD.getPreferredName());
builder.endObject();
return builder;
}
public static Template fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
}
public static Builder builder() {
return new Builder();
}
public static Builder builder(Template template) {
return new Builder(template);
}
public static Builder builder(DataStreamFailureStore failureStore) {
return new Builder(failureStore);
}
/**
* Builder that is able to create either a DataStreamFailureStore or its respective Template.
* Furthermore, its composeTemplate method during template composition.
*/
public static | is |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/EntityValuedInSubqueryGroupAndOrderTest.java | {
"start": 1073,
"end": 3408
} | class ____ {
@BeforeAll
public void setUp(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final EntityA entityA = new EntityA( "entity_a" );
session.persist( entityA );
session.persist( new EntityB( entityA, 1 ) );
session.persist( new EntityB( entityA, 2 ) );
} );
}
@AfterAll
public void tearDown(SessionFactoryScope scope) {
scope.inTransaction( session -> {
session.createMutationQuery( "delete from EntityB" ).executeUpdate();
session.createMutationQuery( "delete from EntityA" ).executeUpdate();
} );
}
@Test
@SkipForDialect(dialectClass = OracleDialect.class, majorVersion = 23, reason = "Oracle 23c bug")
public void testInSubqueryGroupBy(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final EntityB result = session.createQuery(
"select b from EntityB b " +
"where (b.entityA, b.amount) in " +
" (select b2.entityA, max(b2.amount) from EntityB b2 " +
" where b2.entityA.unlisted = false " +
" group by b2.entityA)",
EntityB.class
).getSingleResult();
assertThat( result.getAmount() ).isEqualTo( 2 );
} );
}
@Test
@Jira( "https://hibernate.atlassian.net/browse/HHH-17231" )
public void testInSubqueryGroupByProp(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final EntityB result = session.createQuery(
"select b from EntityB b " +
"where (b.entityA.name, b.amount) in " +
" (select b2.entityA.name, max(b2.amount) from EntityB b2 " +
" where b2.entityA.unlisted = false " +
" group by b2.entityA)",
EntityB.class
).getSingleResult();
assertThat( result.getAmount() ).isEqualTo( 2 );
} );
}
@Test
public void testTopLevelSelect(SessionFactoryScope scope) {
scope.inTransaction( session -> {
// Here, the selection is top level so the entity valued path will be expanded
final Tuple result = session.createQuery(
"select b.entityA, max(b.amount) from EntityB b " +
" where b.entityA.unlisted = false " +
" group by b.entityA ",
Tuple.class
).getSingleResult();
assertThat( result.get( 0, EntityA.class ).getName() ).isEqualTo( "entity_a" );
assertThat( result.get( 1, Integer.class ) ).isEqualTo( 2 );
} );
}
@Entity( name = "EntityA" )
public static | EntityValuedInSubqueryGroupAndOrderTest |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/multipart/MultipartParser.java | {
"start": 1165,
"end": 1936
} | interface ____ {
void beginPart(final CaseInsensitiveMap<String> headers);
void data(final ByteBuffer buffer) throws IOException;
void endPart();
}
public static ParseState beginParse(final PartHandler handler, final byte[] boundary, final String requestCharset) {
// We prepend CR/LF to the boundary to chop trailing CR/LF from
// body-data tokens.
byte[] boundaryToken = new byte[boundary.length + BOUNDARY_PREFIX.length];
System.arraycopy(BOUNDARY_PREFIX, 0, boundaryToken, 0, BOUNDARY_PREFIX.length);
System.arraycopy(boundary, 0, boundaryToken, BOUNDARY_PREFIX.length, boundary.length);
return new ParseState(handler, requestCharset, boundaryToken);
}
public static | PartHandler |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java | {
"start": 1516,
"end": 2114
} | class ____ text using standard UTF8 encoding. It provides methods
* to serialize, deserialize, and compare texts at byte level. The type of
* length is integer and is serialized using zero-compressed format. <p>In
* addition, it provides methods for string traversal without converting the
* byte array to a string. <p>Also includes utilities for
* serializing/deserialing a string, coding/decoding a string, checking if a
* byte array contains valid UTF8 code, calculating the length of an encoded
* string.
*/
@Stringable
@InterfaceAudience.Public
@InterfaceStability.Stable
public | stores |
java | apache__camel | core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedRollback.java | {
"start": 1171,
"end": 1883
} | class ____ extends ManagedProcessor implements ManagedRollbackMBean {
public ManagedRollback(CamelContext context, RollbackProcessor processor, ProcessorDefinition<?> definition) {
super(context, processor, definition);
}
@Override
public RollbackProcessor getProcessor() {
return (RollbackProcessor) super.getProcessor();
}
@Override
public String getMessage() {
return getProcessor().getMessage();
}
@Override
public Boolean isMarkRollbackOnly() {
return getProcessor().isMarkRollbackOnly();
}
@Override
public Boolean isMarkRollbackOnlyLast() {
return getProcessor().isMarkRollbackOnlyLast();
}
}
| ManagedRollback |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/argument/TypeConverter.java | {
"start": 934,
"end": 1099
} | interface ____ {
@Nullable
<T> T convert(Object source, Class<T> targetClass);
@Nullable
<T> T convert(Object source, Type targetType);
}
| TypeConverter |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/common/beanutil/JavaBeanAccessor.java | {
"start": 852,
"end": 1321
} | enum ____ {
/**
* Field accessor.
*/
FIELD,
/**
* Method accessor.
*/
METHOD,
/**
* Method prefer to field.
*/
ALL;
public static boolean isAccessByMethod(JavaBeanAccessor accessor) {
return METHOD.equals(accessor) || ALL.equals(accessor);
}
public static boolean isAccessByField(JavaBeanAccessor accessor) {
return FIELD.equals(accessor) || ALL.equals(accessor);
}
}
| JavaBeanAccessor |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/runtime/io/benchmark/StreamNetworkThroughputBenchmark.java | {
"start": 1439,
"end": 5652
} | class ____ {
protected StreamNetworkBenchmarkEnvironment<LongValue> environment;
protected ReceiverThread receiver;
protected LongRecordWriterThread[] writerThreads;
public void executeBenchmark(long records) throws Exception {
executeBenchmark(records, Long.MAX_VALUE);
}
/**
* Executes the throughput benchmark with the given number of records.
*
* @param records to pass through the network stack
*/
public void executeBenchmark(long records, long timeout) throws Exception {
final LongValue value = new LongValue();
value.setValue(0);
long lastRecord = records / writerThreads.length;
CompletableFuture<?> recordsReceived = receiver.setExpectedRecord(lastRecord);
for (LongRecordWriterThread writerThread : writerThreads) {
writerThread.setRecordsToSend(lastRecord);
}
recordsReceived.get(timeout, TimeUnit.MILLISECONDS);
}
public void setUp(int recordWriters, int channels, int flushTimeout) throws Exception {
setUp(recordWriters, channels, flushTimeout, false);
}
public void setUp(int recordWriters, int channels, int flushTimeout, boolean localMode)
throws Exception {
setUp(recordWriters, channels, flushTimeout, localMode, -1, -1);
}
public void setUp(
int recordWriters,
int channels,
int flushTimeout,
boolean localMode,
int senderBufferPoolSize,
int receiverBufferPoolSize)
throws Exception {
setUp(
recordWriters,
channels,
flushTimeout,
false,
localMode,
senderBufferPoolSize,
receiverBufferPoolSize,
new Configuration());
}
/**
* Initializes the throughput benchmark with the given parameters.
*
* @param recordWriters number of senders, i.e. {@link
* org.apache.flink.runtime.io.network.api.writer.RecordWriter} instances
* @param channels number of outgoing channels / receivers
*/
public void setUp(
int recordWriters,
int channels,
int flushTimeout,
boolean broadcastMode,
boolean localMode,
int senderBufferPoolSize,
int receiverBufferPoolSize,
Configuration config)
throws Exception {
environment = new StreamNetworkBenchmarkEnvironment<>();
environment.setUp(
recordWriters,
channels,
localMode,
senderBufferPoolSize,
receiverBufferPoolSize,
config);
writerThreads = new LongRecordWriterThread[recordWriters];
for (int writer = 0; writer < recordWriters; writer++) {
ResultPartitionWriter resultPartitionWriter =
environment.createResultPartitionWriter(writer);
RecordWriterBuilder recordWriterBuilder =
new RecordWriterBuilder().setTimeout(flushTimeout);
setChannelSelector(recordWriterBuilder, broadcastMode);
writerThreads[writer] =
new LongRecordWriterThread(
recordWriterBuilder.build(resultPartitionWriter), broadcastMode);
writerThreads[writer].start();
}
receiver = environment.createReceiver();
}
protected void setChannelSelector(
RecordWriterBuilder recordWriterBuilder, boolean broadcastMode) {
if (broadcastMode) {
recordWriterBuilder.setChannelSelector(new BroadcastPartitioner());
}
}
/**
* Shuts down a benchmark previously set up via {@link #setUp}.
*
* <p>This will wait for all senders to finish but timeout with an exception after 5 seconds.
*/
public void tearDown() throws Exception {
for (LongRecordWriterThread writerThread : writerThreads) {
writerThread.shutdown();
writerThread.sync(5000);
}
environment.tearDown();
receiver.shutdown();
}
}
| StreamNetworkThroughputBenchmark |
java | hibernate__hibernate-orm | hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/function/json/SingleStoreJsonExistsFunction.java | {
"start": 658,
"end": 2442
} | class ____ extends JsonExistsFunction {
public SingleStoreJsonExistsFunction(TypeConfiguration typeConfiguration) {
super( typeConfiguration, true, false );
}
@Override
protected void render(
SqlAppender sqlAppender,
JsonExistsArguments arguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> walker) {
if ( arguments.errorBehavior() != null && arguments.errorBehavior() != JsonExistsErrorBehavior.ERROR ) {
throw new QueryException( "Can't emulate on error clause on SingleStore" );
}
final String jsonPath;
try {
jsonPath = walker.getLiteralValue( arguments.jsonPath() );
}
catch (Exception ex) {
throw new QueryException( "SingleStore json_exists only support literal json paths, but got " + arguments.jsonPath() );
}
final List<JsonPathHelper.JsonPathElement> jsonPathElements = JsonPathHelper.parseJsonPathElements( jsonPath );
sqlAppender.appendSql( "json_match_any_exists(" );
arguments.jsonDocument().accept( walker );
for ( JsonPathHelper.JsonPathElement pathElement : jsonPathElements ) {
sqlAppender.appendSql( ',' );
if ( pathElement instanceof JsonPathHelper.JsonAttribute attribute ) {
sqlAppender.appendSingleQuoteEscapedString( attribute.attribute() );
}
else if ( pathElement instanceof JsonPathHelper.JsonParameterIndexAccess jsonParameterIndexAccess) {
final String parameterName = jsonParameterIndexAccess.parameterName();
throw new QueryException( "JSON path [" + jsonPath + "] uses parameter [" + parameterName + "] that is not passed" );
}
else {
sqlAppender.appendSql( '\'' );
sqlAppender.appendSql( ( (JsonPathHelper.JsonIndexAccess) pathElement ).index() );
sqlAppender.appendSql( '\'' );
}
}
sqlAppender.appendSql( ')' );
}
}
| SingleStoreJsonExistsFunction |
java | apache__kafka | raft/src/test/java/org/apache/kafka/raft/ValidOffsetAndEpochTest.java | {
"start": 984,
"end": 2025
} | class ____ {
@Test
void diverging() {
ValidOffsetAndEpoch validOffsetAndEpoch = ValidOffsetAndEpoch.diverging(new OffsetAndEpoch(0, 0));
assertEquals(ValidOffsetAndEpoch.Kind.DIVERGING, validOffsetAndEpoch.kind());
}
@Test
void snapshot() {
ValidOffsetAndEpoch validOffsetAndEpoch = ValidOffsetAndEpoch.snapshot(new OffsetAndEpoch(0, 0));
assertEquals(ValidOffsetAndEpoch.Kind.SNAPSHOT, validOffsetAndEpoch.kind());
}
@Test
void valid() {
ValidOffsetAndEpoch validOffsetAndEpoch = ValidOffsetAndEpoch.valid(new OffsetAndEpoch(0, 0));
assertEquals(ValidOffsetAndEpoch.Kind.VALID, validOffsetAndEpoch.kind());
}
@Test
void testValidWithoutSpecifyingOffsetAndEpoch() {
ValidOffsetAndEpoch validOffsetAndEpoch = ValidOffsetAndEpoch.valid();
assertEquals(ValidOffsetAndEpoch.Kind.VALID, validOffsetAndEpoch.kind());
assertEquals(new OffsetAndEpoch(-1, -1), validOffsetAndEpoch.offsetAndEpoch());
}
} | ValidOffsetAndEpochTest |
java | bumptech__glide | instrumentation/src/androidTest/java/com/bumptech/glide/MultiRequestTest.java | {
"start": 6192,
"end": 6470
} | class ____ extends CustomTarget<Drawable> {
@Override
public void onResourceReady(
@NonNull Drawable resource, @Nullable Transition<? super Drawable> transition) {}
@Override
public void onLoadCleared(@Nullable Drawable placeholder) {}
}
}
| DoNothingTarget |
java | spring-projects__spring-framework | spring-websocket/src/main/java/org/springframework/web/socket/config/annotation/WebSocketMessageBrokerConfigurationSupport.java | {
"start": 2865,
"end": 7044
} | class ____ extends AbstractMessageBrokerConfiguration {
/**
* Scope identifier for WebSocket scope: "websocket".
* @since 7.0
*/
public static final String SCOPE_WEBSOCKET = "websocket";
private @Nullable WebSocketTransportRegistration transportRegistration;
@Override
protected SimpAnnotationMethodMessageHandler createAnnotationMethodMessageHandler(
AbstractSubscribableChannel clientInboundChannel,AbstractSubscribableChannel clientOutboundChannel,
SimpMessagingTemplate brokerMessagingTemplate) {
WebSocketAnnotationMethodMessageHandler handler = new WebSocketAnnotationMethodMessageHandler(
clientInboundChannel, clientOutboundChannel, brokerMessagingTemplate);
handler.setPhase(getPhase());
return handler;
}
@Override
protected SimpUserRegistry createLocalUserRegistry(@Nullable Integer order) {
DefaultSimpUserRegistry registry = new DefaultSimpUserRegistry();
if (order != null) {
registry.setOrder(order);
}
return registry;
}
@Bean
public HandlerMapping stompWebSocketHandlerMapping(
WebSocketHandler subProtocolWebSocketHandler, TaskScheduler messageBrokerTaskScheduler,
AbstractSubscribableChannel clientInboundChannel) {
WebSocketHandler handler = decorateWebSocketHandler(subProtocolWebSocketHandler);
WebMvcStompEndpointRegistry registry =
new WebMvcStompEndpointRegistry(handler, getTransportRegistration(), messageBrokerTaskScheduler);
ApplicationContext applicationContext = getApplicationContext();
if (applicationContext != null) {
registry.setApplicationContext(applicationContext);
}
registerStompEndpoints(registry);
OrderedMessageChannelDecorator.configureInterceptor(clientInboundChannel, registry.isPreserveReceiveOrder());
AbstractHandlerMapping handlerMapping = registry.getHandlerMapping();
if (handlerMapping instanceof WebSocketHandlerMapping webSocketMapping) {
webSocketMapping.setPhase(getPhase());
}
return handlerMapping;
}
@Bean
public WebSocketHandler subProtocolWebSocketHandler(
AbstractSubscribableChannel clientInboundChannel, AbstractSubscribableChannel clientOutboundChannel) {
SubProtocolWebSocketHandler handler =
new SubProtocolWebSocketHandler(clientInboundChannel, clientOutboundChannel);
handler.setPhase(getPhase());
return handler;
}
protected WebSocketHandler decorateWebSocketHandler(WebSocketHandler handler) {
for (WebSocketHandlerDecoratorFactory factory : getTransportRegistration().getDecoratorFactories()) {
handler = factory.decorate(handler);
}
return handler;
}
protected final WebSocketTransportRegistration getTransportRegistration() {
if (this.transportRegistration == null) {
this.transportRegistration = new WebSocketTransportRegistration();
configureWebSocketTransport(this.transportRegistration);
}
return this.transportRegistration;
}
protected void configureWebSocketTransport(WebSocketTransportRegistration registry) {
}
protected abstract void registerStompEndpoints(StompEndpointRegistry registry);
@Bean
public static CustomScopeConfigurer webSocketScopeConfigurer() {
CustomScopeConfigurer configurer = new CustomScopeConfigurer();
configurer.addScope(SCOPE_WEBSOCKET, new SimpSessionScope());
return configurer;
}
@Bean
public WebSocketMessageBrokerStats webSocketMessageBrokerStats(
@Nullable AbstractBrokerMessageHandler stompBrokerRelayMessageHandler,
WebSocketHandler subProtocolWebSocketHandler,
@Qualifier("clientInboundChannelExecutor") TaskExecutor inboundExecutor,
@Qualifier("clientOutboundChannelExecutor") TaskExecutor outboundExecutor,
@Qualifier("messageBrokerTaskScheduler") TaskScheduler scheduler) {
WebSocketMessageBrokerStats stats = new WebSocketMessageBrokerStats();
stats.setSubProtocolWebSocketHandler((SubProtocolWebSocketHandler) subProtocolWebSocketHandler);
if (stompBrokerRelayMessageHandler instanceof StompBrokerRelayMessageHandler sbrmh) {
stats.setStompBrokerRelay(sbrmh);
}
stats.setInboundChannelExecutor(inboundExecutor);
stats.setOutboundChannelExecutor(outboundExecutor);
stats.setSockJsTaskScheduler(scheduler);
return stats;
}
}
| WebSocketMessageBrokerConfigurationSupport |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/converter/HttpMessageConverterTests.java | {
"start": 1023,
"end": 2710
} | class ____ {
@Test
void canRead() {
MediaType mediaType = new MediaType("foo", "bar");
HttpMessageConverter<MyType> converter = new MyHttpMessageConverter<>(mediaType);
assertThat(converter.canRead(MyType.class, mediaType)).isTrue();
assertThat(converter.canRead(MyType.class, new MediaType("foo", "*"))).isFalse();
assertThat(converter.canRead(MyType.class, MediaType.ALL)).isFalse();
}
@Test
void canReadWithWildcardSubtype() {
MediaType mediaType = new MediaType("foo");
HttpMessageConverter<MyType> converter = new MyHttpMessageConverter<>(mediaType);
assertThat(converter.canRead(MyType.class, new MediaType("foo", "bar"))).isTrue();
assertThat(converter.canRead(MyType.class, new MediaType("foo", "*"))).isTrue();
assertThat(converter.canRead(MyType.class, MediaType.ALL)).isFalse();
}
@Test
void canWrite() {
MediaType mediaType = new MediaType("foo", "bar");
HttpMessageConverter<MyType> converter = new MyHttpMessageConverter<>(mediaType);
assertThat(converter.canWrite(MyType.class, mediaType)).isTrue();
assertThat(converter.canWrite(MyType.class, new MediaType("foo", "*"))).isTrue();
assertThat(converter.canWrite(MyType.class, MediaType.ALL)).isTrue();
}
@Test
void canWriteWithWildcardInSupportedSubtype() {
MediaType mediaType = new MediaType("foo");
HttpMessageConverter<MyType> converter = new MyHttpMessageConverter<>(mediaType);
assertThat(converter.canWrite(MyType.class, new MediaType("foo", "bar"))).isTrue();
assertThat(converter.canWrite(MyType.class, new MediaType("foo", "*"))).isTrue();
assertThat(converter.canWrite(MyType.class, MediaType.ALL)).isTrue();
}
private static | HttpMessageConverterTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/StaticAssignmentOfThrowableTest.java | {
"start": 1709,
"end": 2135
} | class ____ {
// BUG: Diagnostic contains: [StaticAssignmentOfThrowable]
static Throwable foo = new NullPointerException("message");
public Test(int foo) {}
}
""")
.doTest();
}
@Test
public void staticWithThrowableDuringInitializationFromMethod_error() {
helper
.addSourceLines(
"Test.java",
"""
| Test |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/test/ReachabilityChecker.java | {
"start": 1551,
"end": 3551
} | class ____ {
private final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
private final Queue<Registered> references = ConcurrentCollections.newQueue();
public ReachabilityChecker() {
memoryMXBean.gc();
}
/**
* Register the given target object for reachability checks.
*
* @return the given target object.
*/
public <T> T register(T target) {
var referenceQueue = new ReferenceQueue<>();
references.add(
new Registered(target.toString(), new PhantomReference<>(Objects.requireNonNull(target), referenceQueue), referenceQueue)
);
return target;
}
/**
* Ensure that all registered objects have become unreachable.
*/
public void ensureUnreachable() {
ensureUnreachable(TimeUnit.SECONDS.toMillis(10));
}
void ensureUnreachable(long timeoutMillis) {
Registered registered;
while ((registered = references.poll()) != null) {
registered.assertReferenceEnqueuedForCollection(memoryMXBean, timeoutMillis);
}
}
/**
* From the objects registered since the most recent call to {@link #ensureUnreachable()} (or since the construction of this {@link
* ReachabilityChecker} if {@link #ensureUnreachable()} has not been called) this method chooses one at random and verifies that it has
* not yet become unreachable.
*/
public void checkReachable() {
if (references.peek() == null) {
throw new AssertionError("no references registered");
}
var target = Randomness.get().nextInt(references.size());
var iterator = references.iterator();
for (int i = 0; i < target; i++) {
assertTrue(iterator.hasNext());
assertNotNull(iterator.next());
}
assertTrue(iterator.hasNext());
iterator.next().assertReferenceNotEnqueuedForCollection(memoryMXBean);
}
private static final | ReachabilityChecker |
java | google__dagger | javatests/dagger/functional/generated/NeedsProviderOfFactory.java | {
"start": 1075,
"end": 1176
} | interface ____ {
InjectsProviderOfFactory injectsProviderOfFactory();
}
| ExposesFactoryAsProvider |
java | spring-projects__spring-framework | spring-websocket/src/main/java/org/springframework/web/socket/client/ConnectionManagerSupport.java | {
"start": 1399,
"end": 4968
} | class ____ implements SmartLifecycle {
protected final Log logger = LogFactory.getLog(getClass());
private final URI uri;
private boolean autoStartup = false;
private int phase = DEFAULT_PHASE;
private volatile boolean running;
private final Object lifecycleMonitor = new Object();
/**
* Constructor with a URI template and variables.
*/
public ConnectionManagerSupport(String uriTemplate, @Nullable Object... uriVariables) {
this.uri = UriComponentsBuilder.fromUriString(uriTemplate).buildAndExpand(uriVariables).encode().toUri();
}
/**
* Constructor with a prepared {@link URI}.
* @param uri the url to connect to
* @since 6.0.5
*/
public ConnectionManagerSupport(URI uri) {
this.uri = uri;
}
protected URI getUri() {
return this.uri;
}
/**
* Set whether to auto-connect to the remote endpoint after this connection manager
* has been initialized and the Spring context has been refreshed.
* <p>Default is "false".
*/
public void setAutoStartup(boolean autoStartup) {
this.autoStartup = autoStartup;
}
/**
* Return the value for the 'autoStartup' property. If "true", this endpoint
* connection manager will connect to the remote endpoint upon a
* ContextRefreshedEvent.
*/
@Override
public boolean isAutoStartup() {
return this.autoStartup;
}
/**
* Specify the phase in which a connection should be established to the remote
* endpoint and subsequently closed. The startup order proceeds from lowest to
* highest, and the shutdown order is the reverse of that. By default, this value is
* Integer.MAX_VALUE meaning that this endpoint connection factory connects as late as
* possible and is closed as soon as possible.
*/
public void setPhase(int phase) {
this.phase = phase;
}
/**
* Return the phase in which this endpoint connection factory will be auto-connected
* and stopped.
*/
@Override
public int getPhase() {
return this.phase;
}
/**
* Start the WebSocket connection. If already connected, the method has no impact.
*/
@Override
public final void start() {
synchronized (this.lifecycleMonitor) {
if (!isRunning()) {
startInternal();
}
}
}
protected void startInternal() {
synchronized (this.lifecycleMonitor) {
if (logger.isInfoEnabled()) {
logger.info("Starting " + getClass().getSimpleName());
}
this.running = true;
openConnection();
}
}
@Override
public final void stop() {
synchronized (this.lifecycleMonitor) {
if (isRunning()) {
if (logger.isInfoEnabled()) {
logger.info("Stopping " + getClass().getSimpleName());
}
try {
stopInternal();
}
catch (Throwable ex) {
logger.error("Failed to stop WebSocket connection", ex);
}
finally {
this.running = false;
}
}
}
}
@Override
public final void stop(Runnable callback) {
synchronized (this.lifecycleMonitor) {
stop();
callback.run();
}
}
protected void stopInternal() throws Exception {
if (isConnected()) {
closeConnection();
}
}
/**
* Return whether this ConnectionManager has been started.
*/
@Override
public boolean isRunning() {
return this.running;
}
/**
* Whether the connection is open/{@code true} or closed/{@code false}.
*/
public abstract boolean isConnected();
/**
* Subclasses implement this to actually establish the connection.
*/
protected abstract void openConnection();
/**
* Subclasses implement this to close the connection.
*/
protected abstract void closeConnection() throws Exception;
}
| ConnectionManagerSupport |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowSchemas.java | {
"start": 872,
"end": 1731
} | class ____ extends Command {
public ShowSchemas(Source source) {
super(source);
}
@Override
protected NodeInfo<ShowSchemas> info() {
return NodeInfo.create(this);
}
@Override
public List<Attribute> output() {
return singletonList(new FieldAttribute(source(), "schema", new KeywordEsField("schema")));
}
@Override
public void execute(SqlSession session, ActionListener<Page> listener) {
listener.onResponse(Page.last(Rows.empty(output())));
}
@Override
public int hashCode() {
return getClass().hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
return true;
}
}
| ShowSchemas |
java | apache__camel | test-infra/camel-test-infra-opensearch/src/test/java/org/apache/camel/test/infra/opensearch/services/OpenSearchServiceFactory.java | {
"start": 3072,
"end": 3178
} | class ____ extends RemoteOpenSearchInfraService implements OpenSearchService {
}
}
| RemoteOpenSearchService |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JUnitParameterMethodNotFoundTest.java | {
"start": 879,
"end": 1665
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(JUnitParameterMethodNotFound.class, getClass());
@Test
public void negativeCase_noErrorsFound() {
compilationHelper
.addSourceLines(
"JUnitParameterMethodNotFoundNegativeCase.java",
"""
package com.google.errorprone.bugpatterns.testdata;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import junitparams.JUnitParamsRunner;
import junitparams.Parameters;
import junitparams.naming.TestCaseName;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Negative cases for {@link com.google.errorprone.bugpatterns.JUnitParameterMethodNotFound} */
@RunWith(JUnitParamsRunner.class)
public | JUnitParameterMethodNotFoundTest |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/AssertTimeout.java | {
"start": 922,
"end": 2868
} | class ____ {
private AssertTimeout() {
/* no-op */
}
static void assertTimeout(Duration timeout, Executable executable) {
assertTimeout(timeout, executable, (String) null);
}
static void assertTimeout(Duration timeout, Executable executable, @Nullable String message) {
AssertTimeout.<@Nullable Object> assertTimeout(timeout, () -> {
executable.execute();
return null;
}, message);
}
static void assertTimeout(Duration timeout, Executable executable, Supplier<@Nullable String> messageSupplier) {
AssertTimeout.<@Nullable Object> assertTimeout(timeout, () -> {
executable.execute();
return null;
}, messageSupplier);
}
static <T extends @Nullable Object> T assertTimeout(Duration timeout, ThrowingSupplier<T> supplier) {
return assertTimeout(timeout, supplier, (Object) null);
}
static <T extends @Nullable Object> T assertTimeout(Duration timeout, ThrowingSupplier<T> supplier,
@Nullable String message) {
return assertTimeout(timeout, supplier, (Object) message);
}
static <T extends @Nullable Object> T assertTimeout(Duration timeout, ThrowingSupplier<T> supplier,
Supplier<@Nullable String> messageSupplier) {
return assertTimeout(timeout, supplier, (Object) messageSupplier);
}
private static <T extends @Nullable Object> T assertTimeout(Duration timeout, ThrowingSupplier<T> supplier,
@Nullable Object messageOrSupplier) {
long timeoutInMillis = timeout.toMillis();
long start = System.currentTimeMillis();
T result;
try {
result = supplier.get();
}
catch (Throwable ex) {
throw throwAsUncheckedException(ex);
}
long timeElapsed = System.currentTimeMillis() - start;
if (timeElapsed > timeoutInMillis) {
assertionFailure() //
.message(messageOrSupplier) //
.reason("execution exceeded timeout of " + timeoutInMillis + " ms by "
+ (timeElapsed - timeoutInMillis) + " ms") //
.buildAndThrow();
}
return result;
}
}
| AssertTimeout |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/scripting/defaults/RawLanguageDriver.java | {
"start": 1289,
"end": 2130
} | class ____ extends XMLLanguageDriver {
@Override
public SqlSource createSqlSource(Configuration configuration, XNode script, Class<?> parameterType) {
SqlSource source = super.createSqlSource(configuration, script, parameterType);
checkIsNotDynamic(source);
return source;
}
@Override
public SqlSource createSqlSource(Configuration configuration, String script, Class<?> parameterType,
ParamNameResolver paramNameResolver) {
SqlSource source = super.createSqlSource(configuration, script, parameterType, paramNameResolver);
checkIsNotDynamic(source);
return source;
}
private void checkIsNotDynamic(SqlSource source) {
if (!RawSqlSource.class.equals(source.getClass())) {
throw new BuilderException("Dynamic content is not allowed when using RAW language");
}
}
}
| RawLanguageDriver |
java | spring-projects__spring-boot | loader/spring-boot-loader-tools/src/test/java/org/springframework/boot/loader/tools/MainClassFinderTests.java | {
"start": 6137,
"end": 10191
} | class ____ the following candidates [a.B, a.b.c.E]");
}
@Test
void findSingleDirectorySearchPrefersAnnotatedMainClass() throws Exception {
this.testJarFile.addClass("a/B.class", ClassWithMainMethod.class);
this.testJarFile.addClass("a/b/c/E.class", AnnotatedClassWithMainMethod.class);
String mainClass = MainClassFinder.findSingleMainClass(this.testJarFile.getJarSource(),
"org.springframework.boot.loader.tools.sample.SomeApplication");
assertThat(mainClass).isEqualTo("a.b.c.E");
}
@Test
void doWithDirectoryMainMethods() throws Exception {
this.testJarFile.addClass("a/b/c/D.class", ClassWithMainMethod.class);
this.testJarFile.addClass("a/b/c/E.class", ClassWithoutMainMethod.class);
this.testJarFile.addClass("a/b/F.class", ClassWithoutMainMethod.class);
this.testJarFile.addClass("a/b/G.class", ClassWithMainMethod.class);
ClassNameCollector callback = new ClassNameCollector();
MainClassFinder.doWithMainClasses(this.testJarFile.getJarSource(), callback);
assertThat(callback.getClassNames()).hasToString("[a.b.G, a.b.c.D]");
}
@Test
void doWithJarMainMethods() throws Exception {
this.testJarFile.addClass("a/b/c/D.class", ClassWithMainMethod.class);
this.testJarFile.addClass("a/b/c/E.class", ClassWithoutMainMethod.class);
this.testJarFile.addClass("a/b/F.class", ClassWithoutMainMethod.class);
this.testJarFile.addClass("a/b/G.class", ClassWithMainMethod.class);
ClassNameCollector callback = new ClassNameCollector();
try (JarFile jarFile = this.testJarFile.getJarFile()) {
MainClassFinder.doWithMainClasses(jarFile, null, callback);
assertThat(callback.getClassNames()).hasToString("[a.b.G, a.b.c.D]");
}
}
@Test
void packagePrivateMainMethod() throws Exception {
this.testJarFile.addFile("a/b/c/D.class", packagePrivateMainMethod(ClassFileVersion.JAVA_V25));
ClassNameCollector callback = new ClassNameCollector();
try (JarFile jarFile = this.testJarFile.getJarFile()) {
MainClassFinder.doWithMainClasses(jarFile, null, callback);
assertThat(callback.getClassNames()).hasToString("[a.b.c.D]");
}
}
@Test
void packagePrivateMainMethodBeforeJava25() throws Exception {
this.testJarFile.addFile("a/b/c/D.class", packagePrivateMainMethod(ClassFileVersion.JAVA_V24));
ClassNameCollector callback = new ClassNameCollector();
try (JarFile jarFile = this.testJarFile.getJarFile()) {
MainClassFinder.doWithMainClasses(jarFile, null, callback);
assertThat(callback.getClassNames()).isEmpty();
}
}
@Test
void parameterlessMainMethod() throws Exception {
this.testJarFile.addFile("a/b/c/D.class", parameterlessMainMethod(ClassFileVersion.JAVA_V25));
ClassNameCollector callback = new ClassNameCollector();
try (JarFile jarFile = this.testJarFile.getJarFile()) {
MainClassFinder.doWithMainClasses(jarFile, null, callback);
assertThat(callback.getClassNames()).hasToString("[a.b.c.D]");
}
}
@Test
void parameterlessMainMethodBeforeJava25() throws Exception {
this.testJarFile.addFile("a/b/c/D.class", parameterlessMainMethod(ClassFileVersion.JAVA_V24));
ClassNameCollector callback = new ClassNameCollector();
try (JarFile jarFile = this.testJarFile.getJarFile()) {
MainClassFinder.doWithMainClasses(jarFile, null, callback);
assertThat(callback.getClassNames()).isEmpty();
}
}
private ByteArrayInputStream packagePrivateMainMethod(ClassFileVersion classFileVersion) {
byte[] bytecode = new ByteBuddy(classFileVersion).subclass(Object.class)
.defineMethod("main", void.class, Modifier.STATIC)
.withParameter(String[].class)
.intercept(new EmptyBodyImplementation())
.make()
.getBytes();
return new ByteArrayInputStream(bytecode);
}
private ByteArrayInputStream parameterlessMainMethod(ClassFileVersion classFileVersion) {
byte[] bytecode = new ByteBuddy(classFileVersion).subclass(Object.class)
.defineMethod("main", void.class, Modifier.STATIC | Modifier.PUBLIC)
.intercept(new EmptyBodyImplementation())
.make()
.getBytes();
return new ByteArrayInputStream(bytecode);
}
static | from |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/basic/bitset/BitSetJdbcTypeRegistrationTests.java | {
"start": 1174,
"end": 2660
} | class ____ {
@Test
public void verifyMappings(SessionFactoryScope scope) {
final MappingMetamodelImplementor mappingMetamodel = scope.getSessionFactory()
.getRuntimeMetamodels()
.getMappingMetamodel();
final EntityPersister entityDescriptor = mappingMetamodel.findEntityDescriptor(Product.class);
final BasicAttributeMapping attributeMapping = (BasicAttributeMapping) entityDescriptor.findAttributeMapping("bitSet");
assertThat( attributeMapping.getJavaType().getJavaTypeClass(), equalTo( BitSet.class));
assertThat(attributeMapping.getJdbcMapping().getValueConverter(), nullValue());
assertThat(
attributeMapping.getJdbcMapping().getJdbcType().getJdbcTypeCode(),
is(Types.VARBINARY)
);
assertThat(attributeMapping.getJdbcMapping().getJavaTypeDescriptor().getJavaTypeClass(), equalTo(BitSet.class));
scope.inTransaction(
(session) -> {
session.persist(new Product(1, BitSet.valueOf(BitSetHelper.BYTES)));
}
);
scope.inSession(
(session) -> {
final Product product = session.get(Product.class, 1);
assertThat(product.getBitSet(), equalTo(BitSet.valueOf(BitSetHelper.BYTES)));
}
);
}
@AfterEach
public void dropData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Table(name = "Product")
//tag::basic-bitset-example-jdbc-type-global[]
@Entity(name = "Product")
@JdbcTypeRegistration(CustomBinaryJdbcType.class)
public static | BitSetJdbcTypeRegistrationTests |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/cluster/project/ProjectStateRegistryTests.java | {
"start": 866,
"end": 5071
} | class ____ extends ESTestCase {
public void testBuilder() {
final var projects = randomSet(1, 5, ESTestCase::randomUniqueProjectId);
final var projectsUnderDeletion = randomSet(0, 5, ESTestCase::randomUniqueProjectId);
var builder = ProjectStateRegistry.builder();
projects.forEach(projectId -> builder.putProjectSettings(projectId, randomSettings()));
projectsUnderDeletion.forEach(
projectId -> builder.putProjectSettings(projectId, randomSettings()).markProjectForDeletion(projectId)
);
var projectStateRegistry = builder.build();
var gen1 = projectStateRegistry.getProjectsMarkedForDeletionGeneration();
assertThat(gen1, equalTo(projectsUnderDeletion.isEmpty() ? 0L : 1L));
projectStateRegistry = ProjectStateRegistry.builder(projectStateRegistry).markProjectForDeletion(randomFrom(projects)).build();
var gen2 = projectStateRegistry.getProjectsMarkedForDeletionGeneration();
assertThat(gen2, equalTo(gen1 + 1));
if (projectsUnderDeletion.isEmpty() == false) {
// re-adding the same projectId should not change the generation
projectStateRegistry = ProjectStateRegistry.builder(projectStateRegistry)
.markProjectForDeletion(randomFrom(projectsUnderDeletion))
.build();
assertThat(projectStateRegistry.getProjectsMarkedForDeletionGeneration(), equalTo(gen2));
}
var unknownProjectId = randomUniqueProjectId();
var throwingBuilder = ProjectStateRegistry.builder(projectStateRegistry).markProjectForDeletion(unknownProjectId);
assertThrows(IllegalArgumentException.class, throwingBuilder::build);
var projectToRemove = randomFrom(projectStateRegistry.knownProjects());
projectStateRegistry = ProjectStateRegistry.builder(projectStateRegistry).removeProject(projectToRemove).build();
assertFalse(projectStateRegistry.hasProject(projectToRemove));
assertFalse(projectStateRegistry.isProjectMarkedForDeletion(projectToRemove));
}
public void testDiff() {
ProjectStateRegistry originalRegistry = ProjectStateRegistry.builder()
.putProjectSettings(randomUniqueProjectId(), randomSettings())
.putProjectSettings(randomUniqueProjectId(), randomSettings())
.putProjectSettings(randomUniqueProjectId(), randomSettings())
.build();
ProjectId newProjectId = randomUniqueProjectId();
Settings newSettings = randomSettings();
ProjectId projectToMarkForDeletion = randomFrom(originalRegistry.knownProjects());
ProjectId projectToModifyId = randomFrom(originalRegistry.knownProjects());
Settings modifiedSettings = randomSettings();
ProjectStateRegistry modifiedRegistry = ProjectStateRegistry.builder(originalRegistry)
.putProjectSettings(newProjectId, newSettings)
.markProjectForDeletion(projectToMarkForDeletion)
.putProjectSettings(projectToModifyId, modifiedSettings)
.build();
var diff = modifiedRegistry.diff(originalRegistry);
var appliedRegistry = (ProjectStateRegistry) diff.apply(originalRegistry);
assertThat(appliedRegistry, equalTo(modifiedRegistry));
assertThat(appliedRegistry.size(), equalTo(originalRegistry.size() + 1));
assertTrue(appliedRegistry.knownProjects().contains(newProjectId));
assertTrue(appliedRegistry.isProjectMarkedForDeletion(projectToMarkForDeletion));
assertThat(appliedRegistry.getProjectSettings(newProjectId), equalTo(newSettings));
assertThat(appliedRegistry.getProjectSettings(projectToModifyId), equalTo(modifiedSettings));
}
public void testDiffNoChanges() {
ProjectStateRegistry originalRegistry = ProjectStateRegistry.builder()
.putProjectSettings(randomUniqueProjectId(), randomSettings())
.build();
var diff = originalRegistry.diff(originalRegistry);
var appliedRegistry = (ProjectStateRegistry) diff.apply(originalRegistry);
assertThat(appliedRegistry, sameInstance(originalRegistry));
}
}
| ProjectStateRegistryTests |
java | apache__camel | components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/rest/RestNettyHttpVerbTest.java | {
"start": 1154,
"end": 4181
} | class ____ extends BaseNettyTest {
@Test
public void testGetAll() {
String out = template.requestBodyAndHeader("http://localhost:" + getPort() + "/users", null, Exchange.HTTP_METHOD,
"GET", String.class);
assertEquals("[{ \"id\":\"1\", \"name\":\"Scott\" },{ \"id\":\"2\", \"name\":\"Claus\" }]", out);
}
@Test
public void testGetOne() {
String out = template.requestBodyAndHeader("http://localhost:" + getPort() + "/users/1", null, Exchange.HTTP_METHOD,
"GET", String.class);
assertEquals("{ \"id\":\"1\", \"name\":\"Scott\" }", out);
}
@Test
public void testPost() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:create");
mock.expectedBodiesReceived("{ \"id\":\"1\", \"name\":\"Scott\" }");
mock.expectedHeaderReceived(Exchange.HTTP_METHOD, "POST");
template.requestBodyAndHeader("http://localhost:" + getPort() + "/users", "{ \"id\":\"1\", \"name\":\"Scott\" }",
Exchange.HTTP_METHOD, "POST", String.class);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testPut() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:update");
mock.expectedBodiesReceived("{ \"id\":\"1\", \"name\":\"Scott\" }");
mock.expectedHeaderReceived("id", "1");
mock.expectedHeaderReceived(Exchange.HTTP_METHOD, "PUT");
template.requestBodyAndHeader("http://localhost:" + getPort() + "/users/1", "{ \"id\":\"1\", \"name\":\"Scott\" }",
Exchange.HTTP_METHOD, "PUT", String.class);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testDelete() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:delete");
mock.expectedHeaderReceived("id", "1");
mock.expectedHeaderReceived(Exchange.HTTP_METHOD, "DELETE");
template.requestBodyAndHeader("http://localhost:" + getPort() + "/users/1", null, Exchange.HTTP_METHOD, "DELETE",
String.class);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
restConfiguration().component("netty-http").host("localhost").port(getPort());
rest()
.get("/users").to("direct:users")
.get("/users/{id}").to("direct:id")
.post("/users").to("mock:create")
.put("/users/{id}").to("mock:update")
.delete("/users/{id}").to("mock:delete");
from("direct:users").transform()
.constant("[{ \"id\":\"1\", \"name\":\"Scott\" },{ \"id\":\"2\", \"name\":\"Claus\" }]");
from("direct:id").transform().simple("{ \"id\":\"${header.id}\", \"name\":\"Scott\" }");
}
};
}
}
| RestNettyHttpVerbTest |
java | apache__camel | components/camel-azure/camel-azure-files/src/main/java/org/apache/camel/component/file/azure/FilesConfiguration.java | {
"start": 1203,
"end": 4621
} | class ____ extends RemoteFileConfiguration {
public static final int DEFAULT_HTTPS_PORT = 443;
public static final String DEFAULT_INTERNET_DOMAIN = "file.core.windows.net";
@UriParam(label = "common", description = "Shared key (storage account key)", secret = true)
private String sharedKey;
@UriPath(name = "account", description = "The account to use")
@Metadata(required = true)
private String account;
@UriPath(name = "share", description = "The share to use")
@Metadata(required = true)
private String share;
@UriParam(label = "common", enums = "SHARED_ACCOUNT_KEY,SHARED_KEY_CREDENTIAL,AZURE_IDENTITY,AZURE_SAS",
defaultValue = "SHARED_ACCOUNT_KEY", description = "Determines the credential strategy to adopt")
private CredentialType credentialType = SHARED_ACCOUNT_KEY;
public FilesConfiguration() {
setProtocol(FilesComponent.SCHEME);
}
public FilesConfiguration(URI uri) {
super(uri);
setSendNoop(false);
setBinary(true);
setPassiveMode(true);
if (account == null) {
// URI host maps to the account option
String host = uri.getHost();
if (host != null) {
// reset host as it requires to know the account name also
setAccount(host);
setHost(host);
}
}
}
@Override
protected void setDefaultPort() {
setPort(DEFAULT_HTTPS_PORT);
}
@Override
public void setDirectory(String path) {
// split URI path to share and starting directory
if (path == null || path.isBlank() || path.contains(FilesPath.PATH_SEPARATOR + "" + FilesPath.PATH_SEPARATOR)
|| path.equals(FilesPath.SHARE_ROOT)) {
throw new IllegalArgumentException("Illegal endpoint URI path (expected share[/dir]): " + path);
}
var dir = FilesPath.trimTrailingSeparator(path);
dir = FilesPath.trimLeadingSeparator(dir);
var separator = dir.indexOf(FilesPath.PATH_SEPARATOR);
if (separator == -1) {
share = dir;
dir = FilesPath.SHARE_ROOT;
} else {
share = dir.substring(0, separator);
dir = dir.substring(separator);
}
super.setDirectory(dir);
}
public String getShare() {
return share;
}
public void setShare(String share) {
this.share = share;
}
@Override
public String remoteServerInformation() {
return getProtocol() + "://" + getAccount();
}
/**
* Files service account or <account>.file.core.windows.net hostname.
*/
@Override
public void setHost(String host) {
var dot = host.indexOf('.');
var hasDot = dot >= 0;
super.setHost(hasDot ? host : account + '.' + DEFAULT_INTERNET_DOMAIN);
}
public String getAccount() {
return account;
}
public void setAccount(String account) {
this.account = account;
}
public String getSharedKey() {
return sharedKey;
}
public void setSharedKey(String sharedKey) {
this.sharedKey = sharedKey;
}
public CredentialType getCredentialType() {
return credentialType;
}
public void setCredentialType(CredentialType credentialType) {
this.credentialType = credentialType;
}
}
| FilesConfiguration |
java | apache__camel | components/camel-openapi-java/src/test/java/org/apache/camel/openapi/model/OneOfForm.java | {
"start": 1257,
"end": 1517
} | interface ____ {
// The discriminator explicitly declares which property you can inspect to determine the object type.
// The discriminator must apply to the same level of the schema it is declared in (common mistake when using nested objects).
}
| OneOfForm |
java | apache__spark | common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ErrorHandlerSuite.java | {
"start": 1163,
"end": 3139
} | class ____ {
@Test
public void testErrorRetry() {
ErrorHandler.BlockPushErrorHandler pushHandler = new ErrorHandler.BlockPushErrorHandler();
assertFalse(pushHandler.shouldRetryError(new BlockPushNonFatalFailure(
ReturnCode.TOO_LATE_BLOCK_PUSH, "")));
assertFalse(pushHandler.shouldRetryError(new BlockPushNonFatalFailure(
ReturnCode.TOO_OLD_ATTEMPT_PUSH, "")));
assertFalse(pushHandler.shouldRetryError(new BlockPushNonFatalFailure(
ReturnCode.STALE_BLOCK_PUSH, "")));
assertFalse(pushHandler.shouldRetryError(new RuntimeException(new ConnectException())));
assertTrue(pushHandler.shouldRetryError(new BlockPushNonFatalFailure(
ReturnCode.BLOCK_APPEND_COLLISION_DETECTED, "")));
assertTrue(pushHandler.shouldRetryError(new Throwable()));
ErrorHandler.BlockFetchErrorHandler fetchHandler = new ErrorHandler.BlockFetchErrorHandler();
assertFalse(fetchHandler.shouldRetryError(new RuntimeException(
ErrorHandler.BlockFetchErrorHandler.STALE_SHUFFLE_BLOCK_FETCH)));
}
@Test
public void testErrorLogging() {
ErrorHandler.BlockPushErrorHandler pushHandler = new ErrorHandler.BlockPushErrorHandler();
assertFalse(pushHandler.shouldLogError(new BlockPushNonFatalFailure(
ReturnCode.TOO_LATE_BLOCK_PUSH, "")));
assertFalse(pushHandler.shouldLogError(new BlockPushNonFatalFailure(
ReturnCode.TOO_OLD_ATTEMPT_PUSH, "")));
assertFalse(pushHandler.shouldLogError(new BlockPushNonFatalFailure(
ReturnCode.STALE_BLOCK_PUSH, "")));
assertFalse(pushHandler.shouldLogError(new BlockPushNonFatalFailure(
ReturnCode.BLOCK_APPEND_COLLISION_DETECTED, "")));
assertTrue(pushHandler.shouldLogError(new Throwable()));
ErrorHandler.BlockFetchErrorHandler fetchHandler = new ErrorHandler.BlockFetchErrorHandler();
assertFalse(fetchHandler.shouldLogError(new RuntimeException(
ErrorHandler.BlockFetchErrorHandler.STALE_SHUFFLE_BLOCK_FETCH)));
}
}
| ErrorHandlerSuite |
java | mapstruct__mapstruct | processor/src/test/resources/fixtures/org/mapstruct/ap/test/nestedbeans/UserDtoMapperClassicImpl.java | {
"start": 3168,
"end": 3251
} | enum ____: " + roofType );
}
return externalRoofType;
}
}
| constant |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/function/FailableTest.java | {
"start": 111759,
"end": 112287
} | interface ____ properly defined to throw any exception using the top level generic types
* Object and Throwable.
*/
@Test
void testThrows_FailableToIntFunction_Object_Throwable() {
assertThrows(IOException.class, () -> new FailableToIntFunction<Object, Throwable>() {
@Override
public int applyAsInt(final Object t) throws Throwable {
throw new IOException("test");
}
}.applyAsInt(new Object()));
}
/**
* Tests that our failable | is |
java | quarkusio__quarkus | extensions/reactive-mysql-client/runtime/src/main/java/io/quarkus/reactive/mysql/client/runtime/MySQLPoolRecorder.java | {
"start": 2384,
"end": 15789
} | class ____ {
private static final boolean SUPPORTS_CACHE_PREPARED_STATEMENTS = true;
private static final TypeLiteral<Instance<MySQLPoolCreator>> POOL_CREATOR_TYPE_LITERAL = new TypeLiteral<>() {
};
private final RuntimeValue<DataSourcesRuntimeConfig> runtimeConfig;
private final RuntimeValue<DataSourcesReactiveRuntimeConfig> reactiveRuntimeConfig;
private final RuntimeValue<DataSourcesReactiveMySQLConfig> reactiveMySQLRuntimeConfig;
public MySQLPoolRecorder(
final RuntimeValue<DataSourcesRuntimeConfig> runtimeConfig,
final RuntimeValue<DataSourcesReactiveRuntimeConfig> reactiveRuntimeConfig,
final RuntimeValue<DataSourcesReactiveMySQLConfig> reactiveMySQLRuntimeConfig) {
this.runtimeConfig = runtimeConfig;
this.reactiveRuntimeConfig = reactiveRuntimeConfig;
this.reactiveMySQLRuntimeConfig = reactiveMySQLRuntimeConfig;
}
public Supplier<ActiveResult> poolCheckActiveSupplier(String dataSourceName) {
return new Supplier<>() {
@Override
public ActiveResult get() {
Optional<Boolean> active = runtimeConfig.getValue().dataSources().get(dataSourceName).active();
if (active.isPresent() && !active.get()) {
return ActiveResult.inactive(DataSourceUtil.dataSourceInactiveReasonDeactivated(dataSourceName));
}
if (reactiveRuntimeConfig.getValue().dataSources().get(dataSourceName).reactive().url().isEmpty()) {
return ActiveResult.inactive(DataSourceUtil.dataSourceInactiveReasonUrlMissing(dataSourceName,
"reactive.url"));
}
return ActiveResult.active();
}
};
}
public Function<SyntheticCreationalContext<MySQLPool>, MySQLPool> configureMySQLPool(RuntimeValue<Vertx> vertx,
Supplier<Integer> eventLoopCount, String dataSourceName, ShutdownContext shutdown) {
return new Function<>() {
@Override
public MySQLPool apply(SyntheticCreationalContext<MySQLPool> context) {
MySQLPool pool = initialize((VertxInternal) vertx.getValue(),
eventLoopCount.get(),
dataSourceName,
runtimeConfig.getValue().dataSources().get(dataSourceName),
reactiveRuntimeConfig.getValue().dataSources().get(dataSourceName).reactive(),
reactiveMySQLRuntimeConfig.getValue().dataSources().get(dataSourceName).reactive().mysql(),
context);
shutdown.addShutdownTask(pool::close);
return pool;
}
};
}
public Function<SyntheticCreationalContext<io.vertx.mutiny.mysqlclient.MySQLPool>, io.vertx.mutiny.mysqlclient.MySQLPool> mutinyMySQLPool(
String dataSourceName) {
return new Function<>() {
@Override
@SuppressWarnings("unchecked")
public io.vertx.mutiny.mysqlclient.MySQLPool apply(SyntheticCreationalContext context) {
return io.vertx.mutiny.mysqlclient.MySQLPool.newInstance(
(MySQLPool) context.getInjectedReference(MySQLPool.class, qualifier(dataSourceName)));
}
};
}
private MySQLPool initialize(VertxInternal vertx,
Integer eventLoopCount,
String dataSourceName,
DataSourceRuntimeConfig dataSourceRuntimeConfig,
DataSourceReactiveRuntimeConfig dataSourceReactiveRuntimeConfig,
DataSourceReactiveMySQLConfig dataSourceReactiveMySQLConfig,
SyntheticCreationalContext<MySQLPool> context) {
PoolOptions poolOptions = toPoolOptions(eventLoopCount, dataSourceReactiveRuntimeConfig,
dataSourceReactiveMySQLConfig);
List<MySQLConnectOptions> mySQLConnectOptions = toMySQLConnectOptions(dataSourceName, dataSourceRuntimeConfig,
dataSourceReactiveRuntimeConfig, dataSourceReactiveMySQLConfig);
Supplier<Future<MySQLConnectOptions>> databasesSupplier = toDatabasesSupplier(mySQLConnectOptions,
dataSourceRuntimeConfig);
return createPool(vertx, poolOptions, mySQLConnectOptions, dataSourceName, databasesSupplier, context);
}
private Supplier<Future<MySQLConnectOptions>> toDatabasesSupplier(List<MySQLConnectOptions> mySQLConnectOptions,
DataSourceRuntimeConfig dataSourceRuntimeConfig) {
Supplier<Future<MySQLConnectOptions>> supplier;
if (dataSourceRuntimeConfig.credentialsProvider().isPresent()) {
String beanName = dataSourceRuntimeConfig.credentialsProviderName().orElse(null);
CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(beanName);
String name = dataSourceRuntimeConfig.credentialsProvider().get();
supplier = new ConnectOptionsSupplier<>(credentialsProvider, name, mySQLConnectOptions,
MySQLConnectOptions::new);
} else {
supplier = Utils.roundRobinSupplier(mySQLConnectOptions);
}
return supplier;
}
private PoolOptions toPoolOptions(Integer eventLoopCount,
DataSourceReactiveRuntimeConfig dataSourceReactiveRuntimeConfig,
DataSourceReactiveMySQLConfig dataSourceReactiveMySQLConfig) {
PoolOptions poolOptions;
poolOptions = new PoolOptions();
poolOptions.setMaxSize(dataSourceReactiveRuntimeConfig.maxSize());
if (dataSourceReactiveRuntimeConfig.idleTimeout().isPresent()) {
var idleTimeout = unitised(dataSourceReactiveRuntimeConfig.idleTimeout().get());
poolOptions.setIdleTimeout(idleTimeout.value).setIdleTimeoutUnit(idleTimeout.unit);
}
if (dataSourceReactiveRuntimeConfig.maxLifetime().isPresent()) {
var maxLifetime = unitised(dataSourceReactiveRuntimeConfig.maxLifetime().get());
poolOptions.setMaxLifetime(maxLifetime.value).setMaxLifetimeUnit(maxLifetime.unit);
}
if (dataSourceReactiveRuntimeConfig.shared()) {
poolOptions.setShared(true);
if (dataSourceReactiveRuntimeConfig.name().isPresent()) {
poolOptions.setName(dataSourceReactiveRuntimeConfig.name().get());
}
}
if (dataSourceReactiveRuntimeConfig.eventLoopSize().isPresent()) {
poolOptions.setEventLoopSize(Math.max(0, dataSourceReactiveRuntimeConfig.eventLoopSize().getAsInt()));
} else if (eventLoopCount != null) {
poolOptions.setEventLoopSize(Math.max(0, eventLoopCount));
}
if (dataSourceReactiveMySQLConfig.connectionTimeout().isPresent()) {
poolOptions.setConnectionTimeout(dataSourceReactiveMySQLConfig.connectionTimeout().getAsInt());
poolOptions.setConnectionTimeoutUnit(TimeUnit.SECONDS);
}
return poolOptions;
}
private List<MySQLConnectOptions> toMySQLConnectOptions(String dataSourceName,
DataSourceRuntimeConfig dataSourceRuntimeConfig,
DataSourceReactiveRuntimeConfig dataSourceReactiveRuntimeConfig,
DataSourceReactiveMySQLConfig dataSourceReactiveMySQLConfig) {
List<MySQLConnectOptions> mysqlConnectOptionsList = new ArrayList<>();
if (dataSourceReactiveRuntimeConfig.url().isPresent()) {
List<String> urls = dataSourceReactiveRuntimeConfig.url().get();
urls.forEach(url -> {
// clean up the URL to make migrations easier
if (url.startsWith("vertx-reactive:mysql://")) {
url = url.substring("vertx-reactive:".length());
}
mysqlConnectOptionsList.add(MySQLConnectOptions.fromUri(url));
});
} else {
mysqlConnectOptionsList.add(new MySQLConnectOptions());
}
mysqlConnectOptionsList.forEach(mysqlConnectOptions -> {
dataSourceRuntimeConfig.username().ifPresent(mysqlConnectOptions::setUser);
dataSourceRuntimeConfig.password().ifPresent(mysqlConnectOptions::setPassword);
// credentials provider
if (dataSourceRuntimeConfig.credentialsProvider().isPresent()) {
String beanName = dataSourceRuntimeConfig.credentialsProviderName().orElse(null);
CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(beanName);
String name = dataSourceRuntimeConfig.credentialsProvider().get();
Map<String, String> credentials = credentialsProvider.getCredentialsAsync(name).await().indefinitely();
String user = credentials.get(USER_PROPERTY_NAME);
String password = credentials.get(PASSWORD_PROPERTY_NAME);
if (user != null) {
mysqlConnectOptions.setUser(user);
}
if (password != null) {
mysqlConnectOptions.setPassword(password);
}
}
mysqlConnectOptions
.setCachePreparedStatements(dataSourceReactiveRuntimeConfig.cachePreparedStatements()
.orElse(SUPPORTS_CACHE_PREPARED_STATEMENTS));
dataSourceReactiveMySQLConfig.charset().ifPresent(mysqlConnectOptions::setCharset);
dataSourceReactiveMySQLConfig.collation().ifPresent(mysqlConnectOptions::setCollation);
if (dataSourceReactiveMySQLConfig.pipeliningLimit().isPresent()) {
mysqlConnectOptions.setPipeliningLimit(dataSourceReactiveMySQLConfig.pipeliningLimit().getAsInt());
}
dataSourceReactiveMySQLConfig.useAffectedRows().ifPresent(mysqlConnectOptions::setUseAffectedRows);
if (dataSourceReactiveMySQLConfig.sslMode().isPresent()) {
final SslMode sslMode = dataSourceReactiveMySQLConfig.sslMode().get();
mysqlConnectOptions.setSslMode(sslMode);
// If sslMode is verify-identity, we also need a hostname verification algorithm
var algo = dataSourceReactiveRuntimeConfig.hostnameVerificationAlgorithm();
if ("NONE".equalsIgnoreCase(algo) && sslMode == SslMode.VERIFY_IDENTITY) {
throw new IllegalArgumentException(
"quarkus.datasource.reactive.hostname-verification-algorithm must be specified under verify-identity sslmode");
}
}
mysqlConnectOptions.setTrustAll(dataSourceReactiveRuntimeConfig.trustAll());
configurePemTrustOptions(mysqlConnectOptions, dataSourceReactiveRuntimeConfig.trustCertificatePem());
configureJksTrustOptions(mysqlConnectOptions, dataSourceReactiveRuntimeConfig.trustCertificateJks());
configurePfxTrustOptions(mysqlConnectOptions, dataSourceReactiveRuntimeConfig.trustCertificatePfx());
configurePemKeyCertOptions(mysqlConnectOptions, dataSourceReactiveRuntimeConfig.keyCertificatePem());
configureJksKeyCertOptions(mysqlConnectOptions, dataSourceReactiveRuntimeConfig.keyCertificateJks());
configurePfxKeyCertOptions(mysqlConnectOptions, dataSourceReactiveRuntimeConfig.keyCertificatePfx());
mysqlConnectOptions.setReconnectAttempts(dataSourceReactiveRuntimeConfig.reconnectAttempts());
mysqlConnectOptions.setReconnectInterval(dataSourceReactiveRuntimeConfig.reconnectInterval().toMillis());
var algo = dataSourceReactiveRuntimeConfig.hostnameVerificationAlgorithm();
if ("NONE".equalsIgnoreCase(algo)) {
mysqlConnectOptions.setHostnameVerificationAlgorithm("");
} else {
mysqlConnectOptions.setHostnameVerificationAlgorithm(algo);
}
dataSourceReactiveMySQLConfig.authenticationPlugin().ifPresent(mysqlConnectOptions::setAuthenticationPlugin);
dataSourceReactiveRuntimeConfig.additionalProperties().forEach(mysqlConnectOptions::addProperty);
// Use the convention defined by Quarkus Micrometer Vert.x metrics to create metrics prefixed with mysql.
// and the client_name as tag.
// See io.quarkus.micrometer.runtime.binder.vertx.VertxMeterBinderAdapter.extractPrefix and
// io.quarkus.micrometer.runtime.binder.vertx.VertxMeterBinderAdapter.extractClientName
mysqlConnectOptions.setMetricsName("mysql|" + dataSourceName);
});
return mysqlConnectOptionsList;
}
private MySQLPool createPool(Vertx vertx, PoolOptions poolOptions, List<MySQLConnectOptions> mySQLConnectOptionsList,
String dataSourceName, Supplier<Future<MySQLConnectOptions>> databases,
SyntheticCreationalContext<MySQLPool> context) {
Instance<MySQLPoolCreator> instance = context.getInjectedReference(POOL_CREATOR_TYPE_LITERAL,
qualifier(dataSourceName));
if (instance.isResolvable()) {
MySQLPoolCreator.Input input = new DefaultInput(vertx, poolOptions, mySQLConnectOptionsList);
return (MySQLPool) instance.get().create(input);
}
return (MySQLPool) MySQLDriver.INSTANCE.createPool(vertx, databases, poolOptions);
}
private static | MySQLPoolRecorder |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/injectionstrategy/jakarta/setter/JakartaSetterMapperTest.java | {
"start": 1195,
"end": 2192
} | class ____ {
@RegisterExtension
final GeneratedSource generatedSource = new GeneratedSource();
@ProcessorTest
public void shouldHaveSetterInjection() {
String method = "@Inject" + lineSeparator() +
" public void setGenderJakartaSetterMapper(GenderJakartaSetterMapper genderJakartaSetterMapper) {" +
lineSeparator() + " this.genderJakartaSetterMapper = genderJakartaSetterMapper;" +
lineSeparator() + " }";
generatedSource.forMapper( CustomerJakartaSetterMapper.class )
.content()
.contains( "import jakarta.inject.Inject;" )
.contains( "import jakarta.inject.Named;" )
.contains( "import jakarta.inject.Singleton;" )
.contains( "private GenderJakartaSetterMapper genderJakartaSetterMapper;" )
.doesNotContain( "@Inject" + lineSeparator() + " private GenderJakartaSetterMapper" )
.contains( method );
}
}
| JakartaSetterMapperTest |
java | apache__flink | flink-core/src/main/java/org/apache/flink/core/fs/ICloseableRegistry.java | {
"start": 1348,
"end": 3416
} | interface ____ extends Closeable {
/**
* Registers a {@link Closeable} with the registry. In case the registry is already closed, this
* method throws an {@link IllegalStateException} and closes the passed {@link Closeable}.
*
* @param closeable Closeable to register.
* @throws IOException exception when the registry was closed before.
*/
void registerCloseable(Closeable closeable) throws IOException;
/**
* Same as {@link #registerCloseable(Closeable)} but allows to {@link
* #unregisterCloseable(Closeable) unregister} the passed closeable by closing the returned
* closeable.
*
* @param closeable Closeable to register.
* @return another Closeable that unregisters the passed closeable.
* @throws IOException exception when the registry was closed before.
*/
default Closeable registerCloseableTemporarily(Closeable closeable) throws IOException {
registerCloseable(closeable);
return () -> unregisterCloseable(closeable);
}
/**
* Removes a {@link Closeable} from the registry.
*
* @param closeable instance to remove from the registry.
* @return true if the closeable was previously registered and became unregistered through this
* call.
*/
boolean unregisterCloseable(Closeable closeable);
/** No-op implementation of {@link org.apache.flink.core.fs.ICloseableRegistry}. */
ICloseableRegistry NO_OP =
new ICloseableRegistry() {
@Override
public void registerCloseable(Closeable closeable) {}
@Override
public boolean unregisterCloseable(Closeable closeable) {
return false;
}
@Override
public boolean isClosed() {
return false;
}
@Override
public void close() {}
};
/**
* @return true if this registry was closed.
*/
boolean isClosed();
}
| ICloseableRegistry |
java | apache__logging-log4j2 | log4j-jakarta-web/src/test/java/org/apache/logging/log4j/web/TestAsyncServlet.java | {
"start": 1200,
"end": 2191
} | class ____ extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) {
final AsyncContext asyncContext = req.startAsync();
asyncContext.start(WebLoggerContextUtils.wrapExecutionContext(this.getServletContext(), () -> {
final Logger logger = LogManager.getLogger(TestAsyncServlet.class);
logger.info("Hello, servlet!");
}));
}
@Override
protected void doPost(final HttpServletRequest req, final HttpServletResponse resp) {
final AsyncContext asyncContext = req.startAsync();
asyncContext.start(() -> {
final Log4jWebSupport webSupport =
WebLoggerContextUtils.getWebLifeCycle(TestAsyncServlet.this.getServletContext());
webSupport.setLoggerContext();
// do stuff
webSupport.clearLoggerContext();
});
}
}
| TestAsyncServlet |
java | jhy__jsoup | src/main/java/org/jsoup/Jsoup.java | {
"start": 586,
"end": 18530
} | class ____ {
private Jsoup() {}
/**
Parse HTML into a Document. The parser will make a sensible, balanced document tree out of any HTML.
@param html HTML to parse
@param baseUri The URL where the HTML was retrieved from. Used to resolve relative URLs to absolute URLs, that occur
before the HTML declares a {@code <base href>} tag.
@return sane HTML
*/
public static Document parse(String html, String baseUri) {
return Parser.parse(html, baseUri);
}
/**
Parse HTML into a Document, using the provided Parser. You can provide an alternate parser, such as a simple XML
(non-HTML) parser.
@param html HTML to parse
@param baseUri The URL where the HTML was retrieved from. Used to resolve relative URLs to absolute URLs, that occur
before the HTML declares a {@code <base href>} tag.
@param parser alternate {@link Parser#xmlParser() parser} to use.
@return sane HTML
*/
public static Document parse(String html, String baseUri, Parser parser) {
return parser.parseInput(html, baseUri);
}
/**
Parse HTML into a Document, using the provided Parser. You can provide an alternate parser, such as a simple XML
(non-HTML) parser. As no base URI is specified, absolute URL resolution, if required, relies on the HTML including
a {@code <base href>} tag.
@param html HTML to parse
before the HTML declares a {@code <base href>} tag.
@param parser alternate {@link Parser#xmlParser() parser} to use.
@return sane HTML
*/
public static Document parse(String html, Parser parser) {
return parser.parseInput(html, "");
}
/**
Parse HTML into a Document. As no base URI is specified, absolute URL resolution, if required, relies on the HTML
including a {@code <base href>} tag.
@param html HTML to parse
@return sane HTML
@see #parse(String, String)
*/
public static Document parse(String html) {
return Parser.parse(html, "");
}
/**
* Creates a new {@link Connection} (session), with the defined request URL. Use to fetch and parse a HTML page.
* <p>
* Use examples:
* <ul>
* <li><code>Document doc = Jsoup.connect("http://example.com").userAgent("Mozilla").data("name", "jsoup").get();</code></li>
* <li><code>Document doc = Jsoup.connect("http://example.com").cookie("auth", "token").post();</code></li>
* </ul>
* @param url URL to connect to. The protocol must be {@code http} or {@code https}.
* @return the connection. You can add data, cookies, and headers; set the user-agent, referrer, method; and then execute.
* @see #newSession()
* @see Connection#newRequest()
*/
public static Connection connect(String url) {
return HttpConnection.connect(url);
}
/**
Creates a new {@link Connection} to use as a session. Connection settings (user-agent, timeouts, URL, etc), and
cookies will be maintained for the session. Use examples:
<pre><code>
Connection session = Jsoup.newSession()
.timeout(20 * 1000)
.userAgent("FooBar 2000");
Document doc1 = session.newRequest()
.url("https://jsoup.org/").data("ref", "example")
.get();
Document doc2 = session.newRequest()
.url("https://en.wikipedia.org/wiki/Main_Page")
.get();
Connection con3 = session.newRequest();
</code></pre>
<p>For multi-threaded requests, it is safe to use this session between threads, but take care to call {@link
Connection#newRequest()} per request and not share that instance between threads when executing or parsing.</p>
@return a connection
@since 1.14.1
*/
public static Connection newSession() {
return new HttpConnection();
}
/**
Parse the contents of a file as HTML.
@param file file to load HTML from. Supports gzipped files (ending in .z or .gz).
@param charsetName (optional) character set of file contents. Set to {@code null} to determine from {@code http-equiv} meta tag, if
present, or fall back to {@code UTF-8} (which is often safe to do).
@param baseUri The URL where the HTML was retrieved from, to resolve relative links against.
@return sane HTML
@throws IOException if the file could not be found, or read, or if the charsetName is invalid.
*/
public static Document parse(File file, @Nullable String charsetName, String baseUri) throws IOException {
return DataUtil.load(file, charsetName, baseUri);
}
/**
Parse the contents of a file as HTML. The location of the file is used as the base URI to qualify relative URLs.
@param file file to load HTML from. Supports gzipped files (ending in .z or .gz).
@param charsetName (optional) character set of file contents. Set to {@code null} to determine from {@code http-equiv} meta tag, if
present, or fall back to {@code UTF-8} (which is often safe to do).
@return sane HTML
@throws IOException if the file could not be found, or read, or if the charsetName is invalid.
@see #parse(File, String, String) parse(file, charset, baseUri)
*/
public static Document parse(File file, @Nullable String charsetName) throws IOException {
return DataUtil.load(file, charsetName, file.getAbsolutePath());
}
/**
Parse the contents of a file as HTML. The location of the file is used as the base URI to qualify relative URLs.
The charset used to read the file will be determined by the byte-order-mark (BOM), or a {@code <meta charset>} tag,
or if neither is present, will be {@code UTF-8}.
<p>This is the equivalent of calling {@link #parse(File, String) parse(file, null)}</p>
@param file the file to load HTML from. Supports gzipped files (ending in .z or .gz).
@return sane HTML
@throws IOException if the file could not be found or read.
@see #parse(File, String, String) parse(file, charset, baseUri)
@since 1.15.1
*/
public static Document parse(File file) throws IOException {
return DataUtil.load(file, null, file.getAbsolutePath());
}
/**
Parse the contents of a file as HTML.
@param file file to load HTML from. Supports gzipped files (ending in .z or .gz).
@param charsetName (optional) character set of file contents. Set to {@code null} to determine from {@code http-equiv} meta tag, if
present, or fall back to {@code UTF-8} (which is often safe to do).
@param baseUri The URL where the HTML was retrieved from, to resolve relative links against.
@param parser alternate {@link Parser#xmlParser() parser} to use.
@return sane HTML
@throws IOException if the file could not be found, or read, or if the charsetName is invalid.
@since 1.14.2
*/
public static Document parse(File file, @Nullable String charsetName, String baseUri, Parser parser) throws IOException {
return DataUtil.load(file, charsetName, baseUri, parser);
}
/**
Parse the contents of a file as HTML.
@param path file to load HTML from. Supports gzipped files (ending in .z or .gz).
@param charsetName (optional) character set of file contents. Set to {@code null} to determine from {@code http-equiv} meta tag, if
present, or fall back to {@code UTF-8} (which is often safe to do).
@param baseUri The URL where the HTML was retrieved from, to resolve relative links against.
@return sane HTML
@throws IOException if the file could not be found, or read, or if the charsetName is invalid.
@since 1.18.1
*/
public static Document parse(Path path, @Nullable String charsetName, String baseUri) throws IOException {
return DataUtil.load(path, charsetName, baseUri);
}
/**
Parse the contents of a file as HTML. The location of the file is used as the base URI to qualify relative URLs.
@param path file to load HTML from. Supports gzipped files (ending in .z or .gz).
@param charsetName (optional) character set of file contents. Set to {@code null} to determine from {@code http-equiv} meta tag, if
present, or fall back to {@code UTF-8} (which is often safe to do).
@return sane HTML
@throws IOException if the file could not be found, or read, or if the charsetName is invalid.
@see #parse(File, String, String) parse(file, charset, baseUri)
@since 1.18.1
*/
public static Document parse(Path path, @Nullable String charsetName) throws IOException {
return DataUtil.load(path, charsetName, path.toAbsolutePath().toString());
}
/**
Parse the contents of a file as HTML. The location of the file is used as the base URI to qualify relative URLs.
The charset used to read the file will be determined by the byte-order-mark (BOM), or a {@code <meta charset>} tag,
or if neither is present, will be {@code UTF-8}.
<p>This is the equivalent of calling {@link #parse(File, String) parse(file, null)}</p>
@param path the file to load HTML from. Supports gzipped files (ending in .z or .gz).
@return sane HTML
@throws IOException if the file could not be found or read.
@see #parse(Path, String, String) parse(file, charset, baseUri)
@since 1.18.1
*/
public static Document parse(Path path) throws IOException {
return DataUtil.load(path, null, path.toAbsolutePath().toString());
}
/**
Parse the contents of a file as HTML.
@param path file to load HTML from. Supports gzipped files (ending in .z or .gz).
@param charsetName (optional) character set of file contents. Set to {@code null} to determine from {@code http-equiv} meta tag, if
present, or fall back to {@code UTF-8} (which is often safe to do).
@param baseUri The URL where the HTML was retrieved from, to resolve relative links against.
@param parser alternate {@link Parser#xmlParser() parser} to use.
@return sane HTML
@throws IOException if the file could not be found, or read, or if the charsetName is invalid.
@since 1.18.1
*/
public static Document parse(Path path, @Nullable String charsetName, String baseUri, Parser parser) throws IOException {
return DataUtil.load(path, charsetName, baseUri, parser);
}
/**
Read an input stream, and parse it to a Document.
@param in input stream to read. The stream will be closed after reading.
@param charsetName (optional) character set of file contents. Set to {@code null} to determine from {@code http-equiv} meta tag, if
present, or fall back to {@code UTF-8} (which is often safe to do).
@param baseUri The URL where the HTML was retrieved from, to resolve relative links against.
@return sane HTML
@throws IOException if the stream could not be read, or if the charsetName is invalid.
*/
public static Document parse(InputStream in, @Nullable String charsetName, String baseUri) throws IOException {
return DataUtil.load(in, charsetName, baseUri);
}
/**
Read an input stream, and parse it to a Document. You can provide an alternate parser, such as a simple XML
(non-HTML) parser.
@param in input stream to read. Make sure to close it after parsing.
@param charsetName (optional) character set of file contents. Set to {@code null} to determine from {@code http-equiv} meta tag, if
present, or fall back to {@code UTF-8} (which is often safe to do).
@param baseUri The URL where the HTML was retrieved from, to resolve relative links against.
@param parser alternate {@link Parser#xmlParser() parser} to use.
@return sane HTML
@throws IOException if the stream could not be read, or if the charsetName is invalid.
*/
public static Document parse(InputStream in, @Nullable String charsetName, String baseUri, Parser parser) throws IOException {
return DataUtil.load(in, charsetName, baseUri, parser);
}
/**
Parse a fragment of HTML, with the assumption that it forms the {@code body} of the HTML.
@param bodyHtml body HTML fragment
@param baseUri URL to resolve relative URLs against.
@return sane HTML document
@see Document#body()
*/
public static Document parseBodyFragment(String bodyHtml, String baseUri) {
return Parser.parseBodyFragment(bodyHtml, baseUri);
}
/**
Parse a fragment of HTML, with the assumption that it forms the {@code body} of the HTML.
@param bodyHtml body HTML fragment
@return sane HTML document
@see Document#body()
*/
public static Document parseBodyFragment(String bodyHtml) {
return Parser.parseBodyFragment(bodyHtml, "");
}
/**
Fetch a URL, and parse it as HTML. Provided for compatibility; in most cases use {@link #connect(String)} instead.
<p>
The encoding character set is determined by the content-type header or http-equiv meta tag, or falls back to {@code UTF-8}.
@param url URL to fetch (with a GET). The protocol must be {@code http} or {@code https}.
@param timeoutMillis Connection and read timeout, in milliseconds. If exceeded, IOException is thrown.
@return The parsed HTML.
@throws java.net.MalformedURLException if the request URL is not a HTTP or HTTPS URL, or is otherwise malformed
@throws HttpStatusException if the response is not OK and HTTP response errors are not ignored
@throws UnsupportedMimeTypeException if the response mime type is not supported and those errors are not ignored
@throws java.net.SocketTimeoutException if the connection times out
@throws IOException if a connection or read error occurs
@see #connect(String)
*/
public static Document parse(URL url, int timeoutMillis) throws IOException {
Connection con = HttpConnection.connect(url);
con.timeout(timeoutMillis);
return con.get();
}
/**
Get safe HTML from untrusted input HTML, by parsing input HTML and filtering it through an allow-list of safe
tags and attributes.
@param bodyHtml input untrusted HTML (body fragment)
@param baseUri URL to resolve relative URLs against
@param safelist list of permitted HTML elements
@return safe HTML (body fragment)
@see Cleaner#clean(Document)
*/
public static String clean(String bodyHtml, String baseUri, Safelist safelist) {
if (baseUri.isEmpty() && safelist.preserveRelativeLinks()) {
baseUri = DummyUri; // set a placeholder URI to allow relative links to pass abs resolution for protocol tests; won't leak to output
}
Document dirty = parseBodyFragment(bodyHtml, baseUri);
Cleaner cleaner = new Cleaner(safelist);
Document clean = cleaner.clean(dirty);
return clean.body().html();
}
/**
Get safe HTML from untrusted input HTML, by parsing input HTML and filtering it through a safe-list of permitted
tags and attributes.
<p>Note that as this method does not take a base href URL to resolve attributes with relative URLs against, those
URLs will be removed, unless the input HTML contains a {@code <base href> tag}. If you wish to preserve those, use
the {@link Jsoup#clean(String html, String baseHref, Safelist)} method instead, and enable
{@link Safelist#preserveRelativeLinks(boolean)}.</p>
<p>Note that the output of this method is still <b>HTML</b> even when using the TextNode only
{@link Safelist#none()}, and so any HTML entities in the output will be appropriately escaped.
If you want plain text, not HTML, you should use a text method such as {@link Element#text()} instead, after
cleaning the document.</p>
<p>Example:</p>
<pre>{@code
String sourceBodyHtml = "<p>5 is < 6.</p>";
String html = Jsoup.clean(sourceBodyHtml, Safelist.none());
Cleaner cleaner = new Cleaner(Safelist.none());
String text = cleaner.clean(Jsoup.parse(sourceBodyHtml)).text();
// html is: 5 is < 6.
// text is: 5 is < 6.
}</pre>
@param bodyHtml input untrusted HTML (body fragment)
@param safelist list of permitted HTML elements
@return safe HTML (body fragment)
@see Cleaner#clean(Document)
*/
public static String clean(String bodyHtml, Safelist safelist) {
return clean(bodyHtml, "", safelist);
}
/**
* Get safe HTML from untrusted input HTML, by parsing input HTML and filtering it through a safe-list of
* permitted tags and attributes.
* <p>The HTML is treated as a body fragment; it's expected the cleaned HTML will be used within the body of an
* existing document. If you want to clean full documents, use {@link Cleaner#clean(Document)} instead, and add
* structural tags (<code>html, head, body</code> etc) to the safelist.
*
* @param bodyHtml input untrusted HTML (body fragment)
* @param baseUri URL to resolve relative URLs against
* @param safelist list of permitted HTML elements
* @param outputSettings document output settings; use to control pretty-printing and entity escape modes
* @return safe HTML (body fragment)
* @see Cleaner#clean(Document)
*/
public static String clean(String bodyHtml, String baseUri, Safelist safelist, Document.OutputSettings outputSettings) {
Document dirty = parseBodyFragment(bodyHtml, baseUri);
Cleaner cleaner = new Cleaner(safelist);
Document clean = cleaner.clean(dirty);
clean.outputSettings(outputSettings);
return clean.body().html();
}
/**
Test if the input body HTML has only tags and attributes allowed by the Safelist. Useful for form validation.
<p>
This method is intended to be used in a user | Jsoup |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/util/concurrent/AbstractRunnableTests.java | {
"start": 804,
"end": 4739
} | class ____ extends ESTestCase {
public void testRunSuccess() throws Exception {
Callable<?> runCallable = mock(Callable.class);
AbstractRunnable runnable = new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
fail(e.toString());
}
@Override
protected void doRun() throws Exception {
runCallable.call();
}
};
runnable.run();
verify(runCallable).call();
}
public void testRunFailure() throws Exception {
RuntimeException exception = new RuntimeException();
AbstractRunnable runnable = new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
assertSame(exception, e);
}
@Override
protected void doRun() throws Exception {
throw exception;
}
};
runnable.run();
}
public void testOnAfterSuccess() throws Exception {
Callable<?> runCallable = mock(Callable.class);
Callable<?> afterCallable = mock(Callable.class);
AbstractRunnable runnable = new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
fail(e.toString());
}
@Override
protected void doRun() throws Exception {
runCallable.call();
}
@Override
public void onAfter() {
try {
afterCallable.call();
} catch (Exception e) {
fail(e.toString());
}
}
};
runnable.run();
InOrder inOrder = inOrder(runCallable, afterCallable);
inOrder.verify(runCallable).call();
inOrder.verify(afterCallable).call();
}
public void testOnAfterFailure() throws Exception {
RuntimeException exception = new RuntimeException();
Callable<?> afterCallable = mock(Callable.class);
AbstractRunnable runnable = new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
assertSame(exception, e);
}
@Override
protected void doRun() throws Exception {
throw exception;
}
@Override
public void onAfter() {
try {
afterCallable.call();
} catch (Exception e) {
fail(e.toString());
}
}
};
runnable.run();
verify(afterCallable).call();
}
public void testOnRejection() throws Exception {
RuntimeException exception = new RuntimeException();
Callable<?> failureCallable = mock(Callable.class);
AbstractRunnable runnable = new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
assertSame(exception, e);
try {
failureCallable.call();
} catch (Exception inner) {
inner.addSuppressed(e);
fail(inner.toString());
}
}
@Override
protected void doRun() throws Exception {
fail("Not tested");
}
};
runnable.onRejection(exception);
}
public void testIsForceExecutuonDefaultsFalse() {
AbstractRunnable runnable = new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
fail(e.toString());
}
@Override
protected void doRun() throws Exception {
fail("Not tested");
}
};
assertFalse(runnable.isForceExecution());
}
}
| AbstractRunnableTests |
java | mapstruct__mapstruct | integrationtest/src/test/resources/sealedSubclassTest/src/main/java/org/mapstruct/itest/sealedsubclass/Bike.java | {
"start": 206,
"end": 446
} | class ____ extends Vehicle {
private int numberOfGears;
public int getNumberOfGears() {
return numberOfGears;
}
public void setNumberOfGears(int numberOfGears) {
this.numberOfGears = numberOfGears;
}
}
| Bike |
java | processing__processing4 | app/src/processing/app/ui/Editor.java | {
"start": 43854,
"end": 87180
} | interface ____ the get/set functions
* found in this class. This will maintain compatibility with future releases,
* which will not use JEditTextArea.
*/
public JEditTextArea getTextArea() {
return textarea;
}
public PdeTextArea getPdeTextArea() {
return (textarea instanceof PdeTextArea) ? (PdeTextArea) textarea : null;
}
/**
* Get the contents of the current buffer. Used by the Sketch class.
*/
public String getText() {
return textarea.getText();
}
/**
* Get a range of text from the current buffer.
*/
public String getText(int start, int stop) {
return textarea.getText(start, stop - start);
}
/**
* Replace the entire contents of the front-most tab. Note that this does
* a compound edit, so internal callers may want to use textarea.setText()
* if this is part of a larger compound edit.
*/
public void setText(String what) {
startCompoundEdit();
textarea.setText(what);
stopCompoundEdit();
}
@SuppressWarnings("unused")
public void insertText(String what) {
startCompoundEdit();
int caret = getCaretOffset();
setSelection(caret, caret);
textarea.setSelectedText(what);
stopCompoundEdit();
}
public String getSelectedText() {
return textarea.getSelectedText();
}
@SuppressWarnings("unused")
public void setSelectedText(String what) {
textarea.setSelectedText(what);
}
public void setSelectedText(String what, boolean ever) {
textarea.setSelectedText(what, ever);
}
public void setSelection(int start, int stop) {
// make sure that a tool isn't asking for a bad location
start = PApplet.constrain(start, 0, textarea.getDocumentLength());
stop = PApplet.constrain(stop, 0, textarea.getDocumentLength());
textarea.select(start, stop);
}
/**
* Get the position (character offset) of the caret. With text selected,
* this will be the last character actually selected, no matter the direction
* of the selection. That is, if the user clicks and drags to select lines
* 7 up to 4, then the caret position will be somewhere on line four.
*/
public int getCaretOffset() {
return textarea.getCaretPosition();
}
/**
* True if some text is currently selected.
*/
public boolean isSelectionActive() {
return textarea.isSelectionActive();
}
/**
* Get the beginning point of the current selection.
*/
public int getSelectionStart() {
return textarea.getSelectionStart();
}
/**
* Get the end point of the current selection.
*/
public int getSelectionStop() {
return textarea.getSelectionStop();
}
/**
* Get text for a specified line.
*/
public String getLineText(int line) {
return textarea.getLineText(line);
}
/**
* Replace the text on a specified line.
*/
@SuppressWarnings("unused")
public void setLineText(int line, String what) {
startCompoundEdit();
textarea.select(getLineStartOffset(line), getLineStopOffset(line));
textarea.setSelectedText(what);
stopCompoundEdit();
}
/**
* Get character offset for the start of a given line of text.
*/
public int getLineStartOffset(int line) {
return textarea.getLineStartOffset(line);
}
/**
* Get character offset for end of a given line of text.
*/
public int getLineStopOffset(int line) {
return textarea.getLineStopOffset(line);
}
/**
* Get the number of lines in the currently displayed buffer.
*/
public int getLineCount() {
return textarea.getLineCount();
}
/**
* Use before a manipulating text to group editing operations together
* as a single undo. Use stopCompoundEdit() once finished.
*/
public void startCompoundEdit() {
// Call endTextEditHistory() before starting a new CompoundEdit,
// because there's a timer that's possibly set off for 3 seconds after
// which endTextEditHistory() is called, which means that things get
// messed up. Hence, manually call this method so that auto-format gets
// undone in one fell swoop if the user calls auto-formats within 3
// seconds of typing in the last character. Then start a new compound
// edit so that the auto-format can be undone in one go.
// https://github.com/processing/processing/issues/3003
endTextEditHistory(); // also calls stopCompoundEdit()
//stopCompoundEdit();
compoundEdit = new CompoundEdit();
caretUndoStack.push(textarea.getCaretPosition());
caretRedoStack.clear();
}
/**
* Use with startCompoundEdit() to group edit operations in a single undo.
*/
public void stopCompoundEdit() {
if (compoundEdit != null) {
compoundEdit.end();
undo.addEdit(compoundEdit);
undoAction.updateUndoState();
redoAction.updateRedoState();
compoundEdit = null;
}
}
public int getScrollPosition() {
return textarea.getVerticalScrollPosition();
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
/**
* Switch between tabs, this swaps out the Document object
* that's currently being manipulated.
*/
public void setCode(SketchCode code) {
SyntaxDocument document = (SyntaxDocument) code.getDocument();
if (document == null) { // this document not yet inited
document = new SyntaxDocument() {
@Override
public void beginCompoundEdit() {
if (compoundEdit == null)
startCompoundEdit();
super.beginCompoundEdit();
}
@Override
public void endCompoundEdit() {
stopCompoundEdit();
super.endCompoundEdit();
}
};
code.setDocument(document);
// turn on syntax highlighting
document.setTokenMarker(mode.getTokenMarker(code));
// insert the program text into the document object
try {
document.insertString(0, code.getProgram(), null);
} catch (BadLocationException bl) {
bl.printStackTrace();
}
// set up this guy's own undo manager
// code.undo = new UndoManager();
document.addDocumentListener(new DocumentListener() {
public void removeUpdate(DocumentEvent e) {
if (isInserting && isDirectEdit() && !textarea.isOverwriteEnabled()) {
endTextEditHistory();
}
isInserting = false;
}
public void insertUpdate(DocumentEvent e) {
if (!isInserting && !textarea.isOverwriteEnabled() && isDirectEdit()) {
endTextEditHistory();
}
if (!textarea.isOverwriteEnabled()) {
isInserting = true;
}
}
public void changedUpdate(DocumentEvent e) {
endTextEditHistory();
}
});
// connect the undo listener to the editor
document.addUndoableEditListener(e -> {
// if an edit is in progress, reset the timer
if (endUndoEvent != null) {
endUndoEvent.cancel();
endUndoEvent = null;
startTimerEvent();
}
// if this edit is just getting started, create a compound edit
if (compoundEdit == null) {
startCompoundEdit();
startTimerEvent();
}
compoundEdit.addEdit(e.getEdit());
undoAction.updateUndoState();
redoAction.updateRedoState();
});
}
// update the document object that's in use
textarea.setDocument(document,
code.getSelectionStart(), code.getSelectionStop(),
code.getScrollPosition());
// textarea.requestFocus(); // get the caret blinking
textarea.requestFocusInWindow(); // required for caret blinking
// end edits in the previous tab
endTextEditHistory();
// update the UndoManager and caret positions to the selected tab
this.undo = code.getUndo();
caretUndoStack = code.getCaretUndoStack();
caretRedoStack = code.getCaretRedoStack();
undoAction.updateUndoState();
redoAction.updateRedoState();
}
/**
* @return true if the text is being edited from direct input from typing and
* not shortcuts that manipulate text
*/
boolean isDirectEdit() {
return endUndoEvent != null;
}
void startTimerEvent() {
endUndoEvent = new TimerTask() {
public void run() {
EventQueue.invokeLater(Editor.this::endTextEditHistory);
}
};
timer.schedule(endUndoEvent, 3000);
// let the gc eat the cancelled events
timer.purge();
}
void endTextEditHistory() {
if (endUndoEvent != null) {
endUndoEvent.cancel();
endUndoEvent = null;
}
stopCompoundEdit();
}
public void removeNotify() {
timer.cancel();
super.removeNotify();
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
/**
* Implements Edit → Cut.
*/
public void handleCut() {
textarea.cut();
sketch.setModified(true);
}
/**
* Implements Edit → Copy.
*/
public void handleCopy() {
textarea.copy();
}
/**
* Implements Edit → Copy as HTML.
*/
public void handleCopyAsHTML() {
textarea.copyAsHTML();
statusNotice(Language.text("editor.status.copy_as_html"));
}
/**
* Implements Edit → Paste.
*/
public void handlePaste() {
textarea.paste();
sketch.setModified(true);
}
/**
* Implements Edit → Select All.
*/
public void handleSelectAll() {
textarea.selectAll();
}
public void handleAutoFormat() {
final String source = getText();
try {
final String formattedText = createFormatter().format(source);
// save current (rough) selection point
int selectionEnd = getSelectionStop();
// boolean wasVisible =
// textarea.getSelectionStopLine() >= textarea.getFirstLine() &&
// textarea.getSelectionStopLine() < textarea.getLastLine();
// make sure the caret would be past the end of the text
if (formattedText.length() < selectionEnd - 1) {
selectionEnd = formattedText.length() - 1;
}
if (formattedText.equals(source)) {
statusNotice(Language.text("editor.status.autoformat.no_changes"));
} else { // replace with new bootiful text
startCompoundEdit();
// selectionEnd hopefully at least in the neighborhood
int scrollPos = textarea.getVerticalScrollPosition();
textarea.setText(formattedText);
setSelection(selectionEnd, selectionEnd);
// Put the scrollbar position back, otherwise it jumps on each format.
// Since we're not doing a good job of maintaining position anyway,
// a more complicated workaround here is fairly pointless.
// https://github.com/processing/processing/issues/1571
if (scrollPos != textarea.getVerticalScrollPosition()) {
textarea.setVerticalScrollPosition(scrollPos);
}
stopCompoundEdit();
sketch.setModified(true);
statusNotice(Language.text("editor.status.autoformat.finished"));
}
} catch (final Exception e) {
statusError(e);
}
}
abstract public String getCommentPrefix();
protected void handleCommentUncomment() {
// log("Entering handleCommentUncomment()");
startCompoundEdit();
String prefix = getCommentPrefix();
int prefixLen = prefix.length();
int startLine = textarea.getSelectionStartLine();
int stopLine = textarea.getSelectionStopLine();
int lastLineStart = textarea.getLineStartOffset(stopLine);
int selectionStop = textarea.getSelectionStop();
// If the selection ends at the beginning of the last line,
// then don't (un)comment that line.
if (selectionStop == lastLineStart) {
// Though if there's no selection, don't do that
if (textarea.isSelectionActive()) {
stopLine--;
}
}
// If the text is empty, ignore the user.
// Also ensure that all lines are commented (not just the first)
// when determining whether to comment or uncomment.
boolean commented = true;
for (int i = startLine; commented && (i <= stopLine); i++) {
String lineText = textarea.getLineText(i).trim();
if (lineText.length() == 0) {
continue; //ignore blank lines
}
commented = lineText.startsWith(prefix);
}
// log("Commented: " + commented);
// This is the min line start offset of the selection, which is added to
// all lines while adding a comment. Required when commenting
// lines which have uneven whitespaces in the beginning. Makes the
// commented lines look more uniform.
int lso = Math.abs(textarea.getLineStartNonWhiteSpaceOffset(startLine)
- textarea.getLineStartOffset(startLine));
if (!commented) {
// get min line start offset of all selected lines
for (int line = startLine+1; line <= stopLine; line++) {
String lineText = textarea.getLineText(line);
if (lineText.trim().length() == 0) {
continue; //ignore blank lines
}
int so = Math.abs(textarea.getLineStartNonWhiteSpaceOffset(line)
- textarea.getLineStartOffset(line));
lso = Math.min(lso, so);
}
}
for (int line = startLine; line <= stopLine; line++) {
int location = textarea.getLineStartNonWhiteSpaceOffset(line);
String lineText = textarea.getLineText(line);
if (lineText.trim().length() == 0)
continue; //ignore blank lines
if (commented) {
// remove a comment
textarea.select(location, location + prefixLen);
textarea.setSelectedText("");
} else {
// add a comment
location = textarea.getLineStartOffset(line) + lso;
textarea.select(location, location);
textarea.setSelectedText(prefix);
}
}
// Subtract one from the end, otherwise selects past the current line.
// (Which causes subsequent calls to keep expanding the selection)
textarea.select(textarea.getLineStartOffset(startLine),
textarea.getLineStopOffset(stopLine) - 1);
stopCompoundEdit();
sketch.setModified(true);
}
public void handleIndent() {
handleIndentOutdent(true);
}
public void handleOutdent() {
handleIndentOutdent(false);
}
public void handleIndentOutdent(boolean indent) {
int tabSize = Preferences.getInteger("editor.tabs.size");
String tabString = Editor.EMPTY.substring(0, tabSize);
startCompoundEdit();
int startLine = textarea.getSelectionStartLine();
int stopLine = textarea.getSelectionStopLine();
// If the selection ends at the beginning of the last line,
// then don't (un)comment that line.
int lastLineStart = textarea.getLineStartOffset(stopLine);
int selectionStop = textarea.getSelectionStop();
if (selectionStop == lastLineStart) {
// Though if there's no selection, don't do that
if (textarea.isSelectionActive()) {
stopLine--;
}
}
for (int line = startLine; line <= stopLine; line++) {
int location = textarea.getLineStartOffset(line);
if (indent) {
textarea.select(location, location);
textarea.setSelectedText(tabString);
} else { // outdent
int last = Math.min(location + tabSize, textarea.getDocumentLength());
textarea.select(location, last);
// Don't eat code if it's not indented
if (tabString.equals(textarea.getSelectedText())) {
textarea.setSelectedText("");
}
}
}
// Subtract one from the end, otherwise selects past the current line.
// (Which causes subsequent calls to keep expanding the selection)
textarea.select(textarea.getLineStartOffset(startLine),
textarea.getLineStopOffset(stopLine) - 1);
stopCompoundEdit();
sketch.setModified(true);
}
/**
* Moves the selected lines up or down in the text editor.
*
* <p>If {@code moveUp} is true, the selected lines are moved up. If false, they move down.</p>
* <p>This method ensures proper selection updates and handles edge cases like moving
* the first or last line.</p>
* <p>This operation is undo/redoable, allowing the user to revert the action using
* {@code Ctrl/Cmd + Z} (Undo). Redo functionality is available through the
* keybinding {@code Ctrl/Cmd + Z} on Windows/Linux and {@code Shift + Cmd + Z} on macOS.</p>
*
* @param moveUp {@code true} to move the selection up, {@code false} to move it down.
*/
public void handleMoveLines(boolean moveUp) {
startCompoundEdit();
boolean isSelected = false;
if (textarea.isSelectionActive())
isSelected = true;
int caretPos = textarea.getCaretPosition();
int currentLine = textarea.getCaretLine();
int lineStart = textarea.getLineStartOffset(currentLine);
int column = caretPos - lineStart;
int startLine = textarea.getSelectionStartLine();
int stopLine = textarea.getSelectionStopLine();
// Adjust selection if the last line isn't fully selected
if (startLine != stopLine &&
textarea.getSelectionStop() == textarea.getLineStartOffset(stopLine)) {
stopLine--;
}
int replacedLine = moveUp ? startLine - 1 : stopLine + 1;
if (replacedLine < 0 || replacedLine >= textarea.getLineCount()) {
stopCompoundEdit();
return;
}
final String source = textarea.getText(); // Get full text from textarea
int replaceStart = textarea.getLineStartOffset(replacedLine);
int replaceEnd = textarea.getLineStopOffset(replacedLine);
if (replaceEnd > source.length()) {
replaceEnd = source.length();
}
int selectionStart = textarea.getLineStartOffset(startLine);
int selectionEnd = textarea.getLineStopOffset(stopLine);
if (selectionEnd > source.length()) {
selectionEnd = source.length();
}
String replacedText = source.substring(replaceStart, replaceEnd);
String selectedText = source.substring(selectionStart, selectionEnd);
if (replacedLine == textarea.getLineCount() - 1) {
replacedText += "\n";
selectedText = selectedText.substring(0, Math.max(0, selectedText.length() - 1));
} else if (stopLine == textarea.getLineCount() - 1) {
selectedText += "\n";
replacedText = replacedText.substring(0, Math.max(0, replacedText.length() - 1));
}
int newSelectionStart, newSelectionEnd;
if (moveUp) {
textarea.select(selectionStart, selectionEnd);
textarea.setSelectedText(replacedText); // Use setSelectedText()
textarea.select(replaceStart, replaceEnd);
textarea.setSelectedText(selectedText);
newSelectionStart = textarea.getLineStartOffset(startLine - 1);
newSelectionEnd = textarea.getLineStopOffset(stopLine - 1);
} else {
textarea.select(replaceStart, replaceEnd);
textarea.setSelectedText(selectedText);
textarea.select(selectionStart, selectionEnd);
textarea.setSelectedText(replacedText);
newSelectionStart = textarea.getLineStartOffset(startLine + 1);
newSelectionEnd = stopLine + 1 < textarea.getLineCount()
? Math.min(textarea.getLineStopOffset(stopLine + 1), source.length())
: textarea.getLineStopOffset(stopLine); // Prevent out-of-bounds
}
stopCompoundEdit();
if (isSelected)
SwingUtilities.invokeLater(() -> {
textarea.select(newSelectionStart, newSelectionEnd-1);
});
else if (replacedLine >= 0 && replacedLine < textarea.getLineCount()) {
int replacedLineStart = textarea.getLineStartOffset(replacedLine);
int replacedLineEnd = textarea.getLineStopOffset(replacedLine);
// Ensure caret stays within bounds of the new line
int newCaretPos = Math.min(replacedLineStart + column, replacedLineEnd - 1);
SwingUtilities.invokeLater(() -> textarea.setCaretPosition(newCaretPos));
}
}
static public boolean checkParen(char[] array, int index, int stop) {
while (index < stop) {
switch (array[index]) {
case '(' -> {
return true;
}
case ' ', '\t', '\n', '\r' -> index++;
default -> {
return false;
}
}
}
return false;
}
protected boolean functionable(char c) {
return (c == '_') || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z');
}
/**
* Check the current selection for reference. If no selection is active,
* expand the current selection.
*/
protected String referenceCheck(boolean selectIfFound) {
int start = textarea.getSelectionStart();
int stop = textarea.getSelectionStop();
if (stop < start) {
int temp = stop;
stop = start;
start = temp;
}
char[] c = textarea.getText().toCharArray();
// System.out.println("checking reference");
if (start == stop) {
while (start > 0 && functionable(c[start - 1])) {
start--;
}
while (stop < c.length && functionable(c[stop])) {
stop++;
}
// System.out.println("start is stop");
}
String text = new String(c, start, stop - start).trim();
// System.out.println(" reference piece is '" + text + "'");
if (checkParen(c, stop, c.length)) {
text += "_";
}
String ref = mode.lookupReference(text);
if (selectIfFound) {
textarea.select(start, stop);
}
return ref;
}
protected void handleFindReference() {
String ref = referenceCheck(true);
if (ref != null) {
showReference(ref + ".html");
} else {
String text = textarea.getSelectedText();
if (text == null) {
statusNotice(Language.text("editor.status.find_reference.select_word_first"));
} else {
statusNotice(Language.interpolate("editor.status.find_reference.not_available", text.trim()));
}
}
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
/**
* Set the location of the sketch run window. Used by Runner to update the
* Editor about window drag events while the sketch is running.
*/
public void setSketchLocation(Point p) {
sketchWindowLocation = p;
}
/**
* Get the last location of the sketch's run window. Used by Runner to make
* the window show up in the same location as when it was last closed.
*/
public Point getSketchLocation() {
return sketchWindowLocation;
}
// public void internalCloseRunner() {
// mode.internalCloseRunner(this);
// }
public boolean isDebuggerEnabled() {
return false;
}
public void toggleBreakpoint(int lineIndex) { }
/**
* Check if the sketch is modified and ask user to save changes.
* @return false if canceling the close/quit operation
*/
@SuppressWarnings({"BooleanMethodIsAlwaysInverted", "RedundantIfStatement"})
public boolean checkModified() {
if (!sketch.isModified()) return true;
// As of Processing 1.0.10, this always happens immediately.
// https://download.processing.org/bugzilla/1456.html
// With Java 7u40 on OS X, need to bring the window forward.
toFront();
if (!Platform.isMacOS()) {
String prompt =
Language.interpolate("close.unsaved_changes", sketch.getName());
int result =
JOptionPane.showConfirmDialog(this, prompt,
Language.text("menu.file.close"),
JOptionPane.YES_NO_CANCEL_OPTION,
JOptionPane.QUESTION_MESSAGE);
if (result == JOptionPane.YES_OPTION) {
return handleSave(true);
} else if (result == JOptionPane.NO_OPTION) {
return true; // ok to continue
} else if (result == JOptionPane.CANCEL_OPTION ||
result == JOptionPane.CLOSED_OPTION) {
return false;
} else {
throw new IllegalStateException();
}
} else {
String tier1 = Language.interpolate("save.title", sketch.getName());
String tier2 = Language.text("save.hint");
JOptionPane pane =
new JOptionPane(Toolkit.formatMessage(tier1, tier2), JOptionPane.QUESTION_MESSAGE);
String[] options = new String[] {
Language.text("save.btn.save"),
Language.text("prompt.cancel"),
Language.text("save.btn.dont_save")
};
pane.setOptions(options);
// highlight the safest option ala apple hig
pane.setInitialValue(options[0]);
// On macOS, setting the destructive property places
// this option away from the others at the left-hand side.
pane.putClientProperty("Quaqua.OptionPane.destructiveOption", 2);
JDialog dialog = pane.createDialog(this, null);
dialog.setVisible(true);
Object result = pane.getValue();
if (result == options[0]) { // save (and close/quit)
return handleSave(true);
} else if (result == options[2]) { // don't save (still close/quit)
return true;
} else { // cancel?
return false;
}
}
}
/**
* Second stage of open, occurs after having checked to see if the
* modifications (if any) to the previous sketch need to be saved.
* Because this method is called in Editor's constructor, a subclass
* shouldn't rely on any of its variables being initialized already.
*/
protected void handleOpenInternal(String path) throws EditorException {
// Prior to 4.0 beta 6, a lot of logic happened here that was
// instead moved into Base. Probably was here so that other Modes
// could override the behavior, but that was too messy. [fry 220206]
try {
sketch = new Sketch(path, this);
} catch (IOException e) {
throw new EditorException("Could not create the sketch.", e);
}
header.rebuild();
updateTitle();
}
/**
* Set the title of the PDE window based on the current sketch, i.e.
* something like "sketch_070752a - Processing 0126"
*/
public void updateTitle() {
setTitle(sketch.getName());
if (!sketch.isUntitled()) {
// Set current file for macOS so that cmd-click in title bar works.
// For 4.0 beta 6 changing this to the sketch folder, rather than the
// .pde for the main tab. (Otherwise, we should have it update when
// the tab changes, which seems like overkill for how this is used.)
getRootPane().putClientProperty("Window.documentFile", sketch.getFolder());
} else {
// per other applications, don't set this until the file has been saved
getRootPane().putClientProperty("Window.documentFile", null);
}
}
/**
* Actually handle the save command. If 'immediately' is set to false,
* this will happen in another thread so that the message area
* will update and the save button will stay highlighted while the
* save is happening. If 'immediately' is true, then it will happen
* immediately. This is used during a quit, because invokeLater()
* won't run properly while a quit is happening. This fixes
* <A HREF="https://download.processing.org/bugzilla/276.html">Bug 276</A>.
*/
public boolean handleSave(boolean immediately) {
// This was a mistake (rectified in 0136) that would cause long-running
// sketches to be interrupted, causing much sadness.
//handleStop();
if (sketch.isUntitled()) {
return handleSaveAs();
// need to get the name, user might also cancel here
} else if (immediately) {
handleSaveImpl();
} else {
EventQueue.invokeLater(this::handleSaveImpl);
}
return true;
}
protected void handleSaveImpl() {
statusNotice(Language.text("editor.status.saving"));
try {
if (sketch.save()) {
statusNotice(Language.text("editor.status.saving.done"));
} else {
statusEmpty();
}
} catch (Exception e) {
// show the error as a message in the window
statusError(e);
// zero out the current action,
// so that checkModified2 will just do nothing
//checkModifiedMode = 0;
// this is used when another operation calls a save
}
}
public boolean handleSaveAs() {
statusNotice(Language.text("editor.status.saving"));
try {
if (sketch.saveAs()) {
// No longer showing "Done" message except in cases where a
// progress bar is necessary. Message will come from Sketch.
//statusNotice(Language.text("editor.status.saving.done"));
return true;
} else {
statusNotice(Language.text("editor.status.saving.canceled"));
}
} catch (Exception e) {
// show the error as a message in the window
statusError(e);
}
return false;
}
/**
* Handler for File → Page Setup.
*/
public void handlePageSetup() {
//printerJob = null;
if (printerJob == null) {
printerJob = PrinterJob.getPrinterJob();
}
if (pageFormat == null) {
pageFormat = printerJob.defaultPage();
}
pageFormat = printerJob.pageDialog(pageFormat);
//System.out.println("page format is " + pageFormat);
}
/**
* Handler for File → Print.
*/
public void handlePrint() {
statusNotice(Language.text("editor.status.printing"));
StringBuilder html = new StringBuilder("<html><body>");
for (SketchCode tab : sketch.getCode()) {
html.append("<b>");
html.append(tab.getPrettyName());
html.append("</b><br>");
html.append(textarea.getTextAsHtml((SyntaxDocument)tab.getDocument()));
html.append("<br>");
}
html.setLength(html.length() - 4); // Don't want last <br>.
html.append("</body></html>");
JTextPane jtp = new JTextPane();
// Needed for good line wrapping; otherwise one very long word breaks
// wrapping for the whole document.
jtp.setEditorKit(new HTMLEditorKit() {
public ViewFactory getViewFactory() {
return new HTMLFactory() {
public View create(Element e) {
View v = super.create(e);
if (!(v instanceof javax.swing.text.html.ParagraphView))
return v;
else
return new javax.swing.text.html.ParagraphView(e) {
protected SizeRequirements calculateMinorAxisRequirements(
int axis, SizeRequirements r) {
r = super.calculateMinorAxisRequirements(axis, r);
r.minimum = 1;
return r;
}
};
}
};
}
});
jtp.setFont(new Font(Preferences.get("editor.font.family"), Font.PLAIN, 10));
jtp.setText(html.toString().replace("\n", "<br>") // Not in a <pre>.
.replaceAll("(?<! ) ", " ")); // Allow line wrap.
//printerJob = null;
if (printerJob == null) {
printerJob = PrinterJob.getPrinterJob();
}
if (pageFormat != null) {
//System.out.println("setting page format " + pageFormat);
printerJob.setPrintable(jtp.getPrintable(null, null), pageFormat);
} else {
printerJob.setPrintable(jtp.getPrintable(null, null));
}
// set the name of the job to the code name
printerJob.setJobName(sketch.getCurrentCode().getPrettyName());
if (printerJob.printDialog()) {
try {
printerJob.print();
statusNotice(Language.text("editor.status.printing.done"));
} catch (PrinterException pe) {
statusError(Language.text("editor.status.printing.error"));
pe.printStackTrace();
}
} else {
statusNotice(Language.text("editor.status.printing.canceled"));
}
//printerJob = null; // clear this out?
}
/**
* Grab current contents of the sketch window, advance the console,
* stop any other running sketches... not in that order.
* It's essential that this function be called by any Mode subclass,
* otherwise current edits may not be stored for getProgram().
*/
public void prepareRun() {
internalCloseRunner();
statusEmpty();
// Do this to advance/clear the terminal window / dos prompt / etc.
// This may be useful especially when 'console.auto_clear' is false.
int headPadding = Preferences.getInteger("console.head_padding");
for (int i = 0; i < headPadding; i++) console.message("\n", false);
// clear the console on each run, unless the user doesn't want to
if (Preferences.getBoolean("console.auto_clear")) {
console.clear();
}
// make sure the user didn't hide the sketch folder
sketch.ensureExistence();
// make sure any edits have been stored
//current.setProgram(editor.getText());
// Go through all tabs; Replace All, Rename or Undo could have changed them
for (SketchCode sc : sketch.getCode()) {
if (sc.getDocument() != null) {
try {
sc.setProgram(sc.getDocumentText());
} catch (BadLocationException ignored) { }
}
}
// // if an external editor is being used, need to grab the
// // latest version of the code from the file.
// if (Preferences.getBoolean("editor.external")) {
// sketch.reload();
// }
}
/**
* Halt the current runner for whatever reason. Might be the VM dying,
* the window closing, an error...
*/
abstract public void internalCloseRunner();
abstract public void deactivateRun();
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
public ErrorTable getErrorTable() {
return errorTable;
}
/**
* Called by ErrorTable when a row is selected. Action taken is specific
* to each Mode, based on the object passed in.
*/
public void errorTableClick(Object item) {
highlight((Problem) item);
}
public void errorTableDoubleClick(Object item) { }
/**
* Handle whether the tiny red error indicator is shown near
* the error button at the bottom of the PDE
*/
public void updateErrorToggle(boolean hasErrors) {
if (errorTable != null) {
footer.setNotification(errorTable.getParent(), hasErrors);
}
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
/**
* Show an error in the status bar.
*/
public void statusError(String what) {
status.error(what);
//new Exception("deactivating RUN").printStackTrace();
// toolbar.deactivate(EditorToolbar.RUN);
}
/**
* Show an exception in the editor status bar.
*/
public void statusError(Exception e) {
// if (e == null) {
// System.err.println("Editor.statusError() was passed a null exception.");
// return;
// }
if (e instanceof SketchException re) {
// Make sure something is printed into the console
// Status bar is volatile
System.err.println(re.getMessage());
// Move the cursor to the line before updating the status bar, otherwise
// status message might get hidden by a potential message caused by moving
// the cursor to a line with warning in it
if (re.hasCodeIndex()) {
sketch.setCurrentCode(re.getCodeIndex());
}
if (re.hasCodeLine()) {
int line = re.getCodeLine();
// subtract one from the end so that the \n ain't included
if (line >= textarea.getLineCount()) {
// The error is at the end of this current chunk of code,
// so the last line needs to be selected.
line = textarea.getLineCount() - 1;
if (textarea.getLineText(line).length() == 0) {
// The last line may be zero length, meaning nothing to select.
// If so, back up one more line.
line--;
}
}
if (line < 0 || line >= textarea.getLineCount()) {
System.err.println("Bad error line: " + line);
} else {
textarea.select(textarea.getLineStartOffset(line),
textarea.getLineStopOffset(line) - 1);
}
}
} else {
e.printStackTrace();
}
// Since this will catch all Exception types, spend some time figuring
// out which kind and try to give a better error message to the user.
String mess = e.getMessage();
if (mess != null) {
String javaLang = "java.lang.";
if (mess.indexOf(javaLang) == 0) {
mess = mess.substring(javaLang.length());
}
// The phrase "RuntimeException" isn't useful for most users
String rxString = "RuntimeException: ";
if (mess.startsWith(rxString)) {
mess = mess.substring(rxString.length());
}
// This is just confusing for most PDE users (save it for Eclipse users)
String illString = "IllegalArgumentException: ";
if (mess.startsWith(illString)) {
mess = mess.substring(illString.length());
}
// This is confusing and common with the size() and fullScreen() changes
String illState = "IllegalStateException: ";
if (mess.startsWith(illState)) {
mess = mess.substring(illState.length());
}
statusError(mess);
}
// e.printStackTrace();
}
/**
* Show a notice message in the editor status bar.
*/
public void statusNotice(String msg) {
if (msg == null) {
new IllegalArgumentException("This code called statusNotice(null)").printStackTrace();
msg = "";
}
status.notice(msg);
}
public void clearNotice(String msg) {
if (status.message.equals(msg)) {
statusEmpty();
}
}
/**
* Returns the current notice message in the editor status bar.
*/
public String getStatusMessage() {
return status.message;
}
/**
* Returns the current notice message in the editor status bar.
*/
public int getStatusMode() {
return status.mode;
}
// /**
// * Returns the current mode of the editor status bar: NOTICE, ERR or EDIT.
// */
// public int getStatusMode() {
// return status.mode;
// }
/**
* Clear the status area.
*/
public void statusEmpty() {
status.empty();
}
public void statusMessage(String message, int type) {
if (EventQueue.isDispatchThread()) {
status.message(message, type);
} else {
EventQueue.invokeLater(() -> statusMessage(message, type));
}
}
public void startIndeterminate() {
status.startIndeterminate();
}
public void stopIndeterminate() {
status.stopIndeterminate();
}
public void statusHalt() {
// stop called by someone else
}
public boolean isHalted() {
return false;
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
public void setProblemList(List<Problem> problems) {
this.problems = problems;
boolean hasErrors = problems.stream().anyMatch(Problem::isError);
updateErrorTable(problems);
errorColumn.updateErrorPoints(problems);
textarea.repaint();
updateErrorToggle(hasErrors);
updateEditorStatus();
}
/**
* Updates the error table in the Error Window.
*/
public void updateErrorTable(List<Problem> problems) {
if (errorTable != null) {
errorTable.clearRows();
for (Problem p : problems) {
String message = p.getMessage();
errorTable.addRow(p, message,
sketch.getCode(p.getTabIndex()).getPrettyName(),
Integer.toString(p.getLineNumber() + 1));
// Added +1 because lineNumbers internally are 0-indexed
}
}
}
public void highlight(Problem p) {
if (p == null) {
return;
}
int tabIndex = p.getTabIndex();
sketch.setCurrentCode(tabIndex); // so we are looking at the right offsets below
int lineNumber = p.getLineNumber();
int lineStart = textarea.getLineStartOffset(lineNumber);
int lineEnd = textarea.getLineStopOffset(lineNumber);
int tabToStartOffset = lineStart + p.getStartOffset();
int lineStopOffset = getProblemEditorLineStop(p, lineStart, lineEnd);
int tabToStopOffset = lineStart + lineStopOffset;
highlight(tabIndex, tabToStartOffset, tabToStopOffset);
}
public void highlight(int tabIndex, int startOffset, int stopOffset) {
// Switch to tab
toFront();
sketch.setCurrentCode(tabIndex);
// Make sure offsets are in bounds
int length = textarea.getDocumentLength();
startOffset = PApplet.constrain(startOffset, 0, length);
stopOffset = PApplet.constrain(stopOffset, 0, length);
// Highlight the code
textarea.select(startOffset, stopOffset);
// Scroll to error line
textarea.scrollToCaret();
repaint();
}
public List<Problem> getProblems() {
return problems;
}
/**
* Updates editor status bar, depending on whether the caret is on an error
* line or not
*/
public void updateEditorStatus() {
Problem problem = findProblem(textarea.getCaretLine());
if (problem != null) {
int type = problem.isError() ?
EditorStatus.CURSOR_LINE_ERROR : EditorStatus.CURSOR_LINE_WARNING;
statusMessage(problem.getMessage(), type);
} else {
switch (getStatusMode()) {
case EditorStatus.CURSOR_LINE_ERROR, EditorStatus.CURSOR_LINE_WARNING -> statusEmpty();
}
}
}
/**
* @return the Problem for the most relevant error or warning on 'line',
* defaults to the first error, if there are no errors first warning.
*/
protected Problem findProblem(int line) {
List<Problem> problems = findProblems(line);
for (Problem p : problems) {
if (p.isError()) return p;
}
return problems.isEmpty() ? null : problems.get(0);
}
public List<Problem> findProblems(int line) {
int currentTab = getSketch().getCurrentCodeIndex();
return problems.stream()
.filter(p -> p.getTabIndex() == currentTab)
.filter(p -> {
int pStartLine = p.getLineNumber();
int lineOffset = textarea.getLineStartOffset(pStartLine);
int stopOffset = p.getStopOffset();
int pEndOffset = lineOffset + (stopOffset == -1 ? 0 : stopOffset);
int pEndLine = textarea.getLineOfOffset(pEndOffset);
return line >= pStartLine && line <= pEndLine;
})
.collect(Collectors.toList());
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
static Font toolTipFont;
static String toolTipTextColor;
static String toolTipWarningColor;
static String toolTipErrorColor;
public void statusToolTip(JComponent comp, String message, boolean error) {
// Adjust margin if the UI zoom has been manually set larger/smaller
int m = Toolkit.zoom(5);
String css =
// https://github.com/AdoptOpenJDK/openjdk-jdk8u/blob/master/jdk/src/share/classes/javax/swing/plaf/basic/BasicToolTipUI.java
"margin: 0 -3 0 -3;" + // Basic LAF adds 3px to either side; yay!
String.format("padding: %d %d %d %d; ", m, m*2, m, m*2) +
"background: " + (error ? toolTipErrorColor : toolTipWarningColor) + ";" +
"color: " + toolTipTextColor + ";" +
"font-family: " + toolTipFont.getFontName() + ", sans-serif;" +
"font-size: " + toolTipFont.getSize() + "px;";
String content =
"<html> <div style='" + css + "'>" + message + "</div> </html>";
comp.setToolTipText(content);
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
/**
* Returns the edit popup menu.
*/
| via |
java | apache__camel | core/camel-management-api/src/main/java/org/apache/camel/api/management/JmxSystemPropertyKeys.java | {
"start": 933,
"end": 3708
} | class ____ {
// disable jmx
public static final String DISABLED = "org.apache.camel.jmx.disabled";
// jmx domain name
public static final String DOMAIN = "org.apache.camel.jmx.mbeanServerDefaultDomain";
// the domain name for the camel mbeans
public static final String MBEAN_DOMAIN = "org.apache.camel.jmx.mbeanObjectDomainName";
// use jvm platform mbean server flag
public static final String USE_PLATFORM_MBS = "org.apache.camel.jmx.usePlatformMBeanServer";
// whether all processors or only processors with a custom id given should be registered
public static final String ONLY_REGISTER_PROCESSOR_WITH_CUSTOM_ID
= "org.apache.camel.jmx.onlyRegisterProcessorWithCustomId";
// whether to enable gathering load statistics in the background
public static final String LOAD_STATISTICS_ENABLED = "org.apache.camel.jmx.loadStatisticsEnabled";
// whether to enable gathering endpoint runtime statistics
public static final String ENDPOINT_RUNTIME_STATISTICS_ENABLED = "org.apache.camel.jmx.endpointRuntimeStatisticsEnabled";
// the level of statistics enabled
public static final String STATISTICS_LEVEL = "org.apache.camel.jmx.statisticsLevel";
// whether to register always
public static final String REGISTER_ALWAYS = "org.apache.camel.jmx.registerAlways";
// whether to register when starting new routes
public static final String REGISTER_NEW_ROUTES = "org.apache.camel.jmx.registerNewRoutes";
// whether to register routes created by route templates (not kamelets)
public static final String REGISTER_ROUTES_CREATED_BY_TEMPLATE = "org.apache.camel.jmx.registerRoutesCreateByTemplate";
// whether to register routes created by Kamelets
public static final String REGISTER_ROUTES_CREATED_BY_KAMELET = "org.apache.camel.jmx.registerRoutesCreateByKamelet";
// Whether to remove detected sensitive information (such as passwords) from MBean names and attributes.
public static final String MASK = "org.apache.camel.jmx.mask";
// Whether to include host name in MBean names
public static final String INCLUDE_HOST_NAME = "org.apache.camel.jmx.includeHostName";
// To configure the default management name pattern using a JVM system property
public static final String MANAGEMENT_NAME_PATTERN = "org.apache.camel.jmx.managementNamePattern";
// flag to enable host ip address instead of host name
public static final String USE_HOST_IP_ADDRESS = "org.apache.camel.jmx.useHostIPAddress";
// flag to enable updating routes via XML
public static final String UPDATE_ROUTE_ENABLED = "org.apache.camel.jmx.updateRouteEnabled";
private JmxSystemPropertyKeys() {
// not instantiated
}
}
| JmxSystemPropertyKeys |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DefaultCharsetTest.java | {
"start": 4510,
"end": 5168
} | class ____ {
void f(String s) throws Exception {
// BUG: Diagnostic contains: try (BufferedReader reader = Files.newBufferedReader(Paths.get(s),
// UTF_8)) {}'
try (BufferedReader reader = new BufferedReader(new FileReader(s))) {}
// BUG: Diagnostic contains: try (BufferedWriter writer = Files.newBufferedWriter(Paths.get(s),
// UTF_8)) {}
try (BufferedWriter writer = new BufferedWriter(new FileWriter(s))) {}
}
}
""")
.doTest();
}
@Test
public void negative() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import static java.nio.charset.StandardCharsets.UTF_8;
import java.io.*;
| Test |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inlineme/SuggesterTest.java | {
"start": 20794,
"end": 21216
} | class ____ {
@Deprecated
public void foo(String input) {
if (input.equals("hi")) {
return;
}
}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void nestedBlock() {
refactoringTestHelper
.addInputLines(
"Client.java",
"""
public | Client |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/utils/PlannerMocks.java | {
"start": 2139,
"end": 6004
} | class ____ {
private final FlinkPlannerImpl planner;
private final ParserImpl parser;
private final CatalogManager catalogManager;
private final FunctionCatalog functionCatalog;
private final TableConfig tableConfig;
private final PlannerContext plannerContext;
@SuppressWarnings("rawtypes")
private PlannerMocks(
boolean isBatchMode,
TableConfig tableConfig,
ResourceManager resourceManager,
CatalogManager catalogManager,
List<RelTraitDef> traitDefs,
CalciteSchema rootSchema) {
this.catalogManager = catalogManager;
this.tableConfig = tableConfig;
final ModuleManager moduleManager = new ModuleManager();
this.functionCatalog =
new FunctionCatalog(tableConfig, resourceManager, catalogManager, moduleManager);
this.plannerContext =
new PlannerContext(
isBatchMode,
tableConfig,
moduleManager,
functionCatalog,
catalogManager,
rootSchema != null
? rootSchema
: asRootSchema(
new CatalogManagerCalciteSchema(
catalogManager, !isBatchMode)),
traitDefs,
PlannerMocks.class.getClassLoader());
this.planner = plannerContext.createFlinkPlanner();
this.parser =
new ParserImpl(
catalogManager,
() -> planner,
planner::parser,
plannerContext.getRexFactory());
catalogManager.initSchemaResolver(
true,
ExpressionResolver.resolverFor(
tableConfig,
PlannerMocks.class.getClassLoader(),
name -> {
throw new UnsupportedOperationException();
},
functionCatalog.asLookup(parser::parseIdentifier),
catalogManager.getDataTypeFactory(),
parser::parseSqlExpression),
parser);
}
public FlinkPlannerImpl getPlanner() {
return planner;
}
public ParserImpl getParser() {
return parser;
}
public CatalogManager getCatalogManager() {
return catalogManager;
}
public FunctionCatalog getFunctionCatalog() {
return functionCatalog;
}
public TableConfig getTableConfig() {
return tableConfig;
}
public PlannerContext getPlannerContext() {
return plannerContext;
}
public PlannerMocks registerTemporaryTable(String tableName, Schema tableSchema) {
final CatalogTable table =
CatalogTable.newBuilder()
.schema(tableSchema)
.options(
Map.of(
"connector",
TestSimpleDynamicTableSourceFactory.IDENTIFIER()))
.build();
this.getCatalogManager()
.createTemporaryTable(
table,
ObjectIdentifier.of(
this.getCatalogManager().getCurrentCatalog(),
this.getCatalogManager().getCurrentDatabase(),
tableName),
false);
return this;
}
/** Builder for {@link PlannerMocks} to facilitate various test use cases. */
@SuppressWarnings("rawtypes")
public static | PlannerMocks |
java | apache__camel | core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedFailoverLoadBalancer.java | {
"start": 1834,
"end": 5741
} | class ____ extends ManagedProcessor implements ManagedFailoverLoadBalancerMBean {
private String exceptions;
public ManagedFailoverLoadBalancer(CamelContext context, FailOverLoadBalancer processor, LoadBalanceDefinition definition) {
super(context, processor, definition);
}
@Override
public FailOverLoadBalancer getProcessor() {
return (FailOverLoadBalancer) super.getProcessor();
}
@Override
public LoadBalanceDefinition getDefinition() {
return (LoadBalanceDefinition) super.getDefinition();
}
@Override
public void reset() {
super.reset();
getProcessor().reset();
}
@Override
public Boolean getSupportExtendedInformation() {
return true;
}
@Override
public Integer getSize() {
return getProcessor().getProcessors().size();
}
@Override
public Boolean isRoundRobin() {
return getProcessor().isRoundRobin();
}
@Override
public Boolean isSticky() {
return getProcessor().isSticky();
}
@Override
public Integer getMaximumFailoverAttempts() {
return getProcessor().getMaximumFailoverAttempts();
}
@Override
public String getExceptions() {
if (exceptions != null) {
return exceptions;
}
List<Class<?>> classes = getProcessor().getExceptions();
if (classes == null || classes.isEmpty()) {
exceptions = "";
} else {
StringJoiner exceptionsBuilder = new StringJoiner(",");
for (Class<?> clazz : classes) {
exceptionsBuilder.add(clazz.getCanonicalName());
}
exceptions = exceptionsBuilder.toString();
}
return exceptions;
}
@Override
public String getLastGoodProcessorId() {
int idx = getProcessor().getLastGoodIndex();
if (idx != -1) {
LoadBalanceDefinition def = getDefinition();
ProcessorDefinition<?> output = def.getOutputs().get(idx);
if (output != null) {
return output.getId();
}
}
return null;
}
@Override
public TabularData extendedInformation() {
try {
TabularData answer = new TabularDataSupport(CamelOpenMBeanTypes.loadbalancerExceptionsTabularType());
ExceptionFailureStatistics statistics = getProcessor().getExceptionFailureStatistics();
Iterator<Class<?>> it = statistics.getExceptions();
boolean empty = true;
while (it.hasNext()) {
empty = false;
Class<?> exception = it.next();
String name = ObjectHelper.name(exception);
long counter = statistics.getFailureCounter(exception);
CompositeType ct = CamelOpenMBeanTypes.loadbalancerExceptionsCompositeType();
CompositeData data = new CompositeDataSupport(
ct,
new String[] { "exception", "failures" },
new Object[] { name, counter });
answer.put(data);
}
if (empty) {
// use Exception as a single general
String name = ObjectHelper.name(Exception.class);
long counter = statistics.getFailureCounter(Exception.class);
CompositeType ct = CamelOpenMBeanTypes.loadbalancerExceptionsCompositeType();
CompositeData data = new CompositeDataSupport(
ct,
new String[] { "exception", "failures" },
new Object[] { name, counter });
answer.put(data);
}
return answer;
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
}
| ManagedFailoverLoadBalancer |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java | {
"start": 14967,
"end": 16038
} | class ____ extends FilterInitializer {
public DummyFilterInitializer() {
}
@Override
public void initFilter(FilterContainer container, Configuration conf) {
container.addFilter("DummyFilter", DummyServletFilter.class.getName(), null);
}
}
/**
* Access a URL and get the corresponding return Http status code. The URL
* will be accessed as the passed user, by sending user.name request
* parameter.
*
* @param urlstring web url.
* @param userName userName.
* @return http status code.
* @throws IOException an I/O exception of some sort has occurred.
*/
static int getHttpStatusCode(String urlstring, String userName)
throws IOException {
URL url = new URL(urlstring + "?user.name=" + userName);
System.out.println("Accessing " + url + " as user " + userName);
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
connection.connect();
return connection.getResponseCode();
}
/**
* Custom user->group mapping service.
*/
public static | DummyFilterInitializer |
java | apache__camel | components/camel-avro/src/generated/java/org/apache/camel/dataformat/avro/AvroDataFormatConfigurer.java | {
"start": 726,
"end": 2344
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("InstanceClassName", java.lang.String.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
AvroDataFormat target = (AvroDataFormat) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "instanceclassname":
case "instanceClassName": target.setInstanceClassName(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "instanceclassname":
case "instanceClassName": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
AvroDataFormat target = (AvroDataFormat) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "instanceclassname":
case "instanceClassName": return target.getInstanceClassName();
default: return null;
}
}
}
| AvroDataFormatConfigurer |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/PreemptionContainer.java | {
"start": 1344,
"end": 1792
} | class ____ {
@Private
@Unstable
public static PreemptionContainer newInstance(ContainerId id) {
PreemptionContainer container = Records.newRecord(PreemptionContainer.class);
container.setId(id);
return container;
}
/**
* @return Container referenced by this handle.
*/
@Public
@Evolving
public abstract ContainerId getId();
@Private
@Unstable
public abstract void setId(ContainerId id);
}
| PreemptionContainer |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/Resolver.java | {
"start": 11202,
"end": 11583
} | class ____ extends Action {
public final Action elementAction;
public Container(Schema w, Schema r, GenericData d, Action e) {
super(w, r, d, Action.Type.CONTAINER);
this.elementAction = e;
}
}
/**
* Contains information needed to resolve enumerations. When resolving enums,
* adjustments need to be made in two scenarios: the index for an | Container |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/execution/ParameterResolutionUtilsTests.java | {
"start": 16025,
"end": 16427
} | class ____ implements ParameterResolver {
@Override
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) {
return parameterContext.getParameter().getType() == Number.class;
}
@Override
public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) {
return 42;
}
}
static | NumberParameterResolver |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/TestExecutionListener.java | {
"start": 8571,
"end": 8720
} | class ____ the
* current {@linkplain TestContext#getTestMethod() test method}, which may be
* a {@link org.junit.jupiter.api.Nested @Nested} test | for |
java | netty__netty | transport-native-io_uring/src/test/java/io/netty/channel/uring/IoUringBufferRingTest.java | {
"start": 1804,
"end": 13619
} | class ____ {
@BeforeAll
public static void loadJNI() {
assumeTrue(IoUring.isAvailable());
assumeTrue(IoUring.isRegisterBufferRingSupported());
}
@Test
public void testRegister() {
// using cqeSize on purpose NOT a power of 2
RingBuffer ringBuffer = Native.createRingBuffer(8, 15, 0);
try {
int ringFd = ringBuffer.fd();
long ioUringBufRingAddr = Native.ioUringRegisterBufRing(ringFd, 4, (short) 1, 0);
assumeTrue(
ioUringBufRingAddr > 0,
"ioUringSetupBufRing result must great than 0, but now result is " + ioUringBufRingAddr);
int freeRes = Native.ioUringUnRegisterBufRing(ringFd, ioUringBufRingAddr, 4, (short) 1);
assertEquals(
0,
freeRes,
"ioUringFreeBufRing result must be 0, but now result is " + freeRes
);
// let io_uring to "fix" it
assertEquals(16, ringBuffer.ioUringCompletionQueue().ringCapacity);
} finally {
ringBuffer.close();
}
}
private static ByteBuf unwrapLeakAware(ByteBuf buf) {
// If its a sub-type of WrappedByteBuf we know its because it was wrapped for leak-detection.
if (buf instanceof WrappedByteBuf) {
return buf.unwrap();
}
return buf;
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testProviderBufferRead(boolean incremental) throws InterruptedException {
if (incremental) {
assumeTrue(IoUring.isRegisterBufferRingIncSupported());
}
final BlockingQueue<ByteBuf> bufferSyncer = new LinkedBlockingQueue<>();
IoUringIoHandlerConfig ioUringIoHandlerConfiguration = new IoUringIoHandlerConfig();
IoUringBufferRingConfig bufferRingConfig =
IoUringBufferRingConfig.builder()
.bufferGroupId((short) 1)
.bufferRingSize((short) 2)
.batchSize(2).incremental(incremental)
.allocator(new IoUringFixedBufferRingAllocator(1024))
.batchAllocation(false)
.build();
IoUringBufferRingConfig bufferRingConfig1 =
IoUringBufferRingConfig.builder()
.bufferGroupId((short) 2)
.bufferRingSize((short) 16)
.batchSize(8)
.incremental(incremental)
.allocator(new IoUringFixedBufferRingAllocator(1024))
.batchAllocation(true)
.build();
ioUringIoHandlerConfiguration.setBufferRingConfig(bufferRingConfig, bufferRingConfig1);
MultiThreadIoEventLoopGroup group = new MultiThreadIoEventLoopGroup(1,
IoUringIoHandler.newFactory(ioUringIoHandlerConfiguration)
);
ServerBootstrap serverBootstrap = new ServerBootstrap();
serverBootstrap.channel(IoUringServerSocketChannel.class);
String randomString = UUID.randomUUID().toString();
int randomStringLength = randomString.length();
ArrayBlockingQueue<IoUringBufferRingExhaustedEvent> eventSyncer = new ArrayBlockingQueue<>(1);
Channel serverChannel = serverBootstrap.group(group)
.childHandler(new ChannelInboundHandlerAdapter() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
bufferSyncer.offer((ByteBuf) msg);
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) {
if (evt instanceof IoUringBufferRingExhaustedEvent) {
eventSyncer.add((IoUringBufferRingExhaustedEvent) evt);
}
}
})
.childOption(IoUringChannelOption.IO_URING_BUFFER_GROUP_ID, bufferRingConfig.bufferGroupId())
.bind(NetUtil.LOCALHOST, 0)
.syncUninterruptibly().channel();
Bootstrap clientBoostrap = new Bootstrap();
clientBoostrap.group(group)
.channel(IoUringSocketChannel.class)
.handler(new ChannelInboundHandlerAdapter());
ChannelFuture channelFuture = clientBoostrap.connect(serverChannel.localAddress()).syncUninterruptibly();
assumeTrue(channelFuture.isSuccess());
Channel clientChannel = channelFuture.channel();
//is provider buffer read?
ByteBuf writeBuffer = Unpooled.directBuffer(randomStringLength);
ByteBufUtil.writeAscii(writeBuffer, randomString);
ByteBuf userspaceIoUringBufferElement1 = sendAndRecvMessage(clientChannel, writeBuffer, bufferSyncer);
ByteBuf userspaceIoUringBufferElement2 = sendAndRecvMessage(clientChannel, writeBuffer, bufferSyncer);
ByteBuf readBuffer = sendAndRecvMessage(clientChannel, writeBuffer, bufferSyncer);
readBuffer.release();
// Now we release the buffer and so put it back into the buffer ring.
userspaceIoUringBufferElement1.release();
userspaceIoUringBufferElement2.release();
readBuffer = sendAndRecvMessage(clientChannel, writeBuffer, bufferSyncer);
readBuffer.release();
// The next buffer is expected to be provided out of the ring again.
readBuffer = sendAndRecvMessage(clientChannel, writeBuffer, bufferSyncer);
readBuffer.release();
writeBuffer.release();
serverChannel.close().syncUninterruptibly();
clientChannel.close().syncUninterruptibly();
group.shutdownGracefully();
}
static boolean recvsendBundleEnabled() {
return IoUring.isRecvsendBundleEnabled();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
@EnabledIf("recvsendBundleEnabled")
public void testProviderBufferReadWithRecvsendBundle(boolean incremental) throws InterruptedException {
// See https://lore.kernel.org/io-uring/184f9f92-a682-4205-a15d-89e18f664502@kernel.dk/T/#u
assumeTrue(IoUring.isRecvMultishotEnabled(),
"Only yields expected test results when using multishot atm");
if (incremental) {
assumeTrue(IoUring.isRegisterBufferRingIncSupported());
}
int bufferRingChunkSize = 8;
IoUringIoHandlerConfig ioUringIoHandlerConfiguration = new IoUringIoHandlerConfig();
IoUringBufferRingConfig bufferRingConfig = new IoUringBufferRingConfig(
// let's use a small chunkSize so we are sure a recv will span multiple buffers.
(short) 1, (short) 16, 8, 16 * 16,
incremental, new IoUringFixedBufferRingAllocator(bufferRingChunkSize));
ioUringIoHandlerConfiguration.setBufferRingConfig(bufferRingConfig);
MultiThreadIoEventLoopGroup group = new MultiThreadIoEventLoopGroup(1,
IoUringIoHandler.newFactory(ioUringIoHandlerConfiguration)
);
ServerBootstrap serverBootstrap = new ServerBootstrap();
serverBootstrap.channel(IoUringServerSocketChannel.class);
final BlockingQueue<ByteBuf> buffers = new LinkedBlockingQueue<>();
Channel serverChannel = serverBootstrap.group(group)
.childHandler(new ChannelInboundHandlerAdapter() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
buffers.offer((ByteBuf) msg);
}
})
.childOption(IoUringChannelOption.IO_URING_BUFFER_GROUP_ID, (short) 1)
.bind(new InetSocketAddress(0))
.syncUninterruptibly().channel();
Bootstrap clientBoostrap = new Bootstrap();
clientBoostrap.group(group)
.channel(IoUringSocketChannel.class)
.handler(new ChannelInboundHandlerAdapter());
ChannelFuture channelFuture = clientBoostrap.connect(serverChannel.localAddress()).syncUninterruptibly();
assumeTrue(channelFuture.isSuccess());
Channel clientChannel = channelFuture.channel();
// Create a buffer that will span multiple buffers that are used out of the buffer ring.
ByteBuf writeBuffer = Unpooled.directBuffer(bufferRingChunkSize * 16);
CompositeByteBuf received = Unpooled.compositeBuffer();
try {
// Fill the buffer with something so we can assert if the received bytes are the same.
for (int i = 0; i < writeBuffer.capacity(); i++) {
writeBuffer.writeByte((byte) i);
}
clientChannel.writeAndFlush(writeBuffer.retainedDuplicate()).syncUninterruptibly();
// Aggregate all received buffers until we received everything.
do {
ByteBuf buffer = buffers.take();
received.addComponent(true, buffer);
} while (received.readableBytes() != writeBuffer.readableBytes());
assertEquals(writeBuffer, received);
serverChannel.close().syncUninterruptibly();
clientChannel.close().syncUninterruptibly();
group.shutdownGracefully();
assertTrue(buffers.isEmpty());
} finally {
writeBuffer.release();
received.release();
}
}
private ByteBuf sendAndRecvMessage(Channel clientChannel, ByteBuf writeBuffer, BlockingQueue<ByteBuf> bufferSyncer)
throws InterruptedException {
//retain the buffer to assert
clientChannel.writeAndFlush(writeBuffer.retainedDuplicate()).sync();
ByteBuf readBuffer = bufferSyncer.take();
assertEquals(writeBuffer.readableBytes(), readBuffer.readableBytes());
assertTrue(ByteBufUtil.equals(writeBuffer, readBuffer));
return readBuffer;
}
@Test
public void testCloseEventLoopGroupWhileConnected() throws Exception {
MultiThreadIoEventLoopGroup group = new MultiThreadIoEventLoopGroup(1,
IoUringIoHandler.newFactory()
);
try {
final BlockingQueue<Channel> acceptedChannels = new LinkedBlockingQueue<>();
ServerBootstrap serverBootstrap = new ServerBootstrap();
serverBootstrap.channel(IoUringServerSocketChannel.class);
Channel serverChannel = serverBootstrap.group(group)
.childHandler(new ChannelInboundHandlerAdapter() {
@Override
public void channelActive(ChannelHandlerContext ctx) {
acceptedChannels.add(ctx.channel());
}
})
.bind(new InetSocketAddress(0))
.syncUninterruptibly().channel();
Bootstrap clientBoostrap = new Bootstrap();
clientBoostrap.group(group)
.channel(IoUringSocketChannel.class)
.handler(new ChannelInboundHandlerAdapter());
ChannelFuture channelFuture = clientBoostrap.connect(serverChannel.localAddress());
Channel clientChannel = channelFuture.sync().channel();
group.shutdownGracefully().syncUninterruptibly();
clientChannel.closeFuture().sync();
serverChannel.closeFuture().sync();
acceptedChannels.take().closeFuture().sync();
assertTrue(acceptedChannels.isEmpty());
} catch (Throwable t) {
if (!group.isShutdown()) {
group.shutdownGracefully().syncUninterruptibly();
}
throw t;
}
}
}
| IoUringBufferRingTest |
java | netty__netty | codec-compression/src/main/java/io/netty/handler/codec/compression/SnappyFramedEncoder.java | {
"start": 764,
"end": 887
} | class ____ extends SnappyFrameEncoder {
// Nothing new. Just staying here for backward compatibility.
}
| SnappyFramedEncoder |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/sqlserver/SQLServerWallTest_3.java | {
"start": 901,
"end": 1359
} | class ____ extends TestCase {
public void test_true() throws Exception {
WallProvider provider = new SQLServerWallProvider();
provider.getConfig().setSelectHavingAlwayTrueCheck(true);
assertFalse(provider.checkValid(//
"select * from t where LEN(HOST_NAME()) > 0"));
assertEquals(1, provider.getTableStats().size());
assertTrue(provider.getTableStats().containsKey("t"));
}
}
| SQLServerWallTest_3 |
java | apache__camel | components/camel-cxf/camel-cxf-spring-rest/src/main/java/org/apache/camel/component/cxf/spring/jaxrs/CxfRsClientFactoryBeanDefinitionParser.java | {
"start": 1247,
"end": 3035
} | class ____ extends AbstractCxfBeanDefinitionParser {
public CxfRsClientFactoryBeanDefinitionParser() {
setBeanClass(SpringJAXRSClientFactoryBean.class);
}
@Override
protected void doParse(Element element, ParserContext ctx, BeanDefinitionBuilder bean) {
super.doParse(element, ctx, bean);
bean.addPropertyValue("beanId", resolveId(element, bean.getBeanDefinition(), ctx));
}
@Override
protected void mapElement(ParserContext ctx, BeanDefinitionBuilder bean, Element el, String name) {
if ("properties".equals(name) || "headers".equals(name)) {
Map<?, ?> map = ctx.getDelegate().parseMapElement(el, bean.getBeanDefinition());
bean.addPropertyValue(name, map);
} else if ("binding".equals(name)) {
setFirstChildAsProperty(el, ctx, bean, "bindingConfig");
} else if ("inInterceptors".equals(name) || "inFaultInterceptors".equals(name)
|| "outInterceptors".equals(name) || "outFaultInterceptors".equals(name)) {
List<?> list = ctx.getDelegate().parseListElement(el, bean.getBeanDefinition());
bean.addPropertyValue(name, list);
} else if ("features".equals(name) || "providers".equals(name)
|| "schemaLocations".equals(name) || "modelBeans".equals(name)) {
List<?> list = ctx.getDelegate().parseListElement(el, bean.getBeanDefinition());
bean.addPropertyValue(name, list);
} else if ("model".equals(name)) {
List<UserResource> resources = ResourceUtils.getResourcesFromElement(el);
bean.addPropertyValue("modelBeans", resources);
} else {
setFirstChildAsProperty(el, ctx, bean, name);
}
}
}
| CxfRsClientFactoryBeanDefinitionParser |
java | square__javapoet | src/main/java/com/squareup/javapoet/ClassName.java | {
"start": 6823,
"end": 6968
} | class ____ string. This
* method assumes that the input is ASCII and follows typical Java style (lowercase package
* names, UpperCamelCase | name |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/generics/GenericOneToManyTreatTest.java | {
"start": 5139,
"end": 5237
} | class ____ extends Configuration<AItem> {
}
@Entity( name = "AItem" )
public static | AConfiguration |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/condition/EnabledOnOsCondition.java | {
"start": 676,
"end": 1983
} | class ____ extends AbstractOsBasedExecutionCondition<EnabledOnOs> {
EnabledOnOsCondition() {
super(EnabledOnOs.class);
}
@Override
ConditionEvaluationResult evaluateExecutionCondition(EnabledOnOs annotation) {
boolean osSpecified = annotation.value().length > 0;
boolean archSpecified = annotation.architectures().length > 0;
Preconditions.condition(osSpecified || archSpecified,
"You must declare at least one OS or architecture in @EnabledOnOs");
boolean enabled = isEnabledBasedOnOs(annotation) && isEnabledBasedOnArchitecture(annotation);
String reason = createReason(enabled, osSpecified, archSpecified);
return enabled ? ConditionEvaluationResult.enabled(reason)
: ConditionEvaluationResult.disabled(reason, annotation.disabledReason());
}
private boolean isEnabledBasedOnOs(EnabledOnOs annotation) {
OS[] operatingSystems = annotation.value();
if (operatingSystems.length == 0) {
return true;
}
return Arrays.stream(operatingSystems).anyMatch(OS::isCurrentOs);
}
private boolean isEnabledBasedOnArchitecture(EnabledOnOs annotation) {
String[] architectures = annotation.architectures();
if (architectures.length == 0) {
return true;
}
return Arrays.stream(architectures).anyMatch(CURRENT_ARCHITECTURE::equalsIgnoreCase);
}
}
| EnabledOnOsCondition |
java | google__guava | android/guava-tests/test/com/google/common/collect/ListsTest.java | {
"start": 3563,
"end": 4190
} | class ____ implements Iterable<Integer>, Serializable {
@Override
public Iterator<Integer> iterator() {
return SOME_COLLECTION.iterator();
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
}
private static final List<Integer> SOME_LIST = Lists.newArrayList(1, 2, 3, 4);
private static final List<Integer> SOME_SEQUENTIAL_LIST = new LinkedList<>(asList(1, 2, 3, 4));
private static final List<String> SOME_STRING_LIST = asList("1", "2", "3", "4");
private static final Function<Number, String> SOME_FUNCTION = new SomeFunction();
private static | SomeIterable |
java | apache__camel | components/camel-aws/camel-aws-bedrock/src/main/java/org/apache/camel/component/aws2/bedrock/runtime/stream/ConverseStreamHandler.java | {
"start": 5111,
"end": 6647
} | class ____ {
private String fullText;
private List<String> chunks;
private String stopReason;
private TokenUsage usage;
private int chunkCount;
private software.amazon.awssdk.services.bedrockruntime.model.GuardrailTrace guardrailTrace;
public String getFullText() {
return fullText;
}
public void setFullText(String fullText) {
this.fullText = fullText;
}
public List<String> getChunks() {
return chunks;
}
public void setChunks(List<String> chunks) {
this.chunks = chunks;
}
public String getStopReason() {
return stopReason;
}
public void setStopReason(String stopReason) {
this.stopReason = stopReason;
}
public TokenUsage getUsage() {
return usage;
}
public void setUsage(TokenUsage usage) {
this.usage = usage;
}
public int getChunkCount() {
return chunkCount;
}
public void setChunkCount(int chunkCount) {
this.chunkCount = chunkCount;
}
public software.amazon.awssdk.services.bedrockruntime.model.GuardrailTrace getGuardrailTrace() {
return guardrailTrace;
}
public void setGuardrailTrace(software.amazon.awssdk.services.bedrockruntime.model.GuardrailTrace guardrailTrace) {
this.guardrailTrace = guardrailTrace;
}
}
}
| StreamMetadata |
java | apache__camel | components/camel-kudu/src/test/java/org/apache/camel/component/kudu/KuduComponentConfigurationTest.java | {
"start": 1065,
"end": 2183
} | class ____ extends CamelTestSupport {
@Test
public void createEndpoint() throws Exception {
String host = "localhost";
String port = "7051";
String tableName = "TableName";
KuduOperations operation = KuduOperations.SCAN;
KuduComponent component = new KuduComponent(this.context());
KuduEndpoint endpoint = (KuduEndpoint) component
.createEndpoint("kudu:" + host + ":" + port + "/" + tableName + "?operation=" + operation);
assertEquals(host, endpoint.getHost(), "Host was not correctly detected. ");
assertEquals(port, endpoint.getPort(), "Port was not correctly detected. ");
assertEquals(tableName, endpoint.getTableName(), "Table name was not correctly detected. ");
assertEquals(operation, endpoint.getOperation(), "Operation was not correctly detected. ");
}
@Test
public void wrongUrl() {
KuduComponent component = new KuduComponent(this.context());
assertThrows(Exception.class,
() -> component.createEndpoint("wrong url"));
}
}
| KuduComponentConfigurationTest |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/functions/FilterFunction.java | {
"start": 1556,
"end": 2253
} | interface ____<T> extends Function, Serializable {
/**
* The filter function that evaluates the predicate.
*
* <p><strong>IMPORTANT:</strong> The system assumes that the function does not modify the
* elements on which the predicate is applied. Violating this assumption can lead to incorrect
* results.
*
* @param value The value to be filtered.
* @return True for values that should be retained, false for values to be filtered out.
* @throws Exception This method may throw exceptions. Throwing an exception will cause the
* operation to fail and may trigger recovery.
*/
boolean filter(T value) throws Exception;
}
| FilterFunction |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/objects/Objects_assertDoesNotHaveSameHashCodeAs_Test.java | {
"start": 1640,
"end": 2816
} | class ____ is computed with the Jedi's name only
Jedi luke = new Jedi("Luke", "Red");
// THEN
objects.assertDoesNotHaveSameHashCodeAs(someInfo(), greenYoda, luke);
objects.assertDoesNotHaveSameHashCodeAs(someInfo(), luke, greenYoda);
objects.assertDoesNotHaveSameHashCodeAs(someInfo(), greenYoda, new Jedi("Luke", "green"));
objects.assertDoesNotHaveSameHashCodeAs(someInfo(), new Jedi("Luke", "green"), greenYoda);
}
@Test
void should_throw_error_if_other_is_null() {
assertThatNullPointerException().isThrownBy(() -> objects.assertDoesNotHaveSameHashCodeAs(someInfo(), greenYoda, null))
.withMessage("The object used to compare actual's hash code with should not be null");
}
@Test
void should_fail_if_actual_is_null() {
// GIVEN
Object actual = null;
// WHEN
var error = expectAssertionError(() -> objects.assertDoesNotHaveSameHashCodeAs(someInfo(), actual, "foo"));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_if_actual_has_the_same_hash_code_as_other() {
// GIVEN
AssertionInfo info = someInfo();
// WHEN
// Jedi | hashCode |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/pool/ha/selector/RandomDataSourceRecoverThread.java | {
"start": 1024,
"end": 4158
} | class ____ implements Runnable {
public static final int DEFAULT_RECOVER_INTERVAL_SECONDS = 120;
private static final Log LOG = LogFactory.getLog(RandomDataSourceRecoverThread.class);
private RandomDataSourceSelector selector;
private int recoverIntervalSeconds = DEFAULT_RECOVER_INTERVAL_SECONDS;
private int validationSleepSeconds;
public RandomDataSourceRecoverThread(RandomDataSourceSelector selector) {
this.selector = selector;
}
@Override
public void run() {
while (true) {
if (selector != null && selector.getBlacklist() != null
&& !selector.getBlacklist().isEmpty()) {
LOG.info(selector.getBlacklist().size() + " DataSource in blacklist.");
for (DataSource dataSource : selector.getBlacklist()) {
if (!(dataSource instanceof DruidDataSource)) {
continue;
}
tryOneDataSource((DruidDataSource) dataSource);
}
} else if (selector == null) {
break;
}
sleep();
}
}
private void tryOneDataSource(DruidDataSource dataSource) {
if (dataSource == null) {
return;
}
Connection connection = null;
try {
connection = dataSource.getConnection();
sleepBeforeValidation();
dataSource.validateConnection(connection);
LOG.info(dataSource.getName() + " is available now.");
selector.removeBlacklist(dataSource);
} catch (Exception e) {
LOG.warn("DataSource[" + dataSource.getName() + "] is still unavailable. Exception: "
+ e.getMessage());
} finally {
JdbcUtils.close(connection);
}
}
private void sleepBeforeValidation() {
if (validationSleepSeconds > 0) {
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Sleep " + validationSleepSeconds + " second(s) before validation.");
}
Thread.sleep(validationSleepSeconds * 1000L);
} catch (InterruptedException e) {
// ignore
}
}
}
private void sleep() {
try {
Thread.sleep(recoverIntervalSeconds * 1000L);
} catch (InterruptedException e) {
// ignore
}
}
public int getRecoverIntervalSeconds() {
return recoverIntervalSeconds;
}
public void setRecoverIntervalSeconds(int recoverIntervalSeconds) {
this.recoverIntervalSeconds = recoverIntervalSeconds;
}
public int getValidationSleepSeconds() {
return validationSleepSeconds;
}
public void setValidationSleepSeconds(int validationSleepSeconds) {
this.validationSleepSeconds = validationSleepSeconds;
}
public RandomDataSourceSelector getSelector() {
return selector;
}
public void setSelector(RandomDataSourceSelector selector) {
this.selector = selector;
}
}
| RandomDataSourceRecoverThread |
java | apache__maven | impl/maven-core/src/test/java/org/apache/maven/plugin/PluginParameterExceptionTest.java | {
"start": 1168,
"end": 6915
} | class ____ {
private static final String LS = System.lineSeparator();
@Test
void testMissingRequiredStringArrayTypeParameter() {
MojoDescriptor mojoDescriptor = new MojoDescriptor();
mojoDescriptor.setGoal("goal");
PluginDescriptor pluginDescriptor = new PluginDescriptor();
pluginDescriptor.setGoalPrefix("goalPrefix");
pluginDescriptor.setArtifactId("artifactId");
mojoDescriptor.setPluginDescriptor(pluginDescriptor);
Parameter parameter = new Parameter();
parameter.setType("java.lang.String[]");
parameter.setName("toAddresses");
parameter.setRequired(true);
PluginParameterException exception =
new PluginParameterException(mojoDescriptor, Collections.singletonList(parameter));
assertEquals(
"One or more required plugin parameters are invalid/missing for 'goalPrefix:goal'" + LS
+ LS + "[0] Inside the definition for plugin 'artifactId', specify the following:"
+ LS
+ LS + "<configuration>"
+ LS + " ..."
+ LS + " <toAddresses>"
+ LS + " <item>VALUE</item>"
+ LS + " </toAddresses>"
+ LS + "</configuration>."
+ LS,
exception.buildDiagnosticMessage());
}
@Test
void testMissingRequiredCollectionTypeParameter() {
MojoDescriptor mojoDescriptor = new MojoDescriptor();
mojoDescriptor.setGoal("goal");
PluginDescriptor pluginDescriptor = new PluginDescriptor();
pluginDescriptor.setGoalPrefix("goalPrefix");
pluginDescriptor.setArtifactId("artifactId");
mojoDescriptor.setPluginDescriptor(pluginDescriptor);
Parameter parameter = new Parameter();
parameter.setType("java.util.List");
parameter.setName("toAddresses");
parameter.setRequired(true);
PluginParameterException exception =
new PluginParameterException(mojoDescriptor, Collections.singletonList(parameter));
assertEquals(
"One or more required plugin parameters are invalid/missing for 'goalPrefix:goal'" + LS
+ LS + "[0] Inside the definition for plugin 'artifactId', specify the following:"
+ LS
+ LS + "<configuration>"
+ LS + " ..."
+ LS + " <toAddresses>"
+ LS + " <item>VALUE</item>"
+ LS + " </toAddresses>"
+ LS + "</configuration>."
+ LS,
exception.buildDiagnosticMessage());
}
@Test
void testMissingRequiredMapTypeParameter() {
MojoDescriptor mojoDescriptor = new MojoDescriptor();
mojoDescriptor.setGoal("goal");
PluginDescriptor pluginDescriptor = new PluginDescriptor();
pluginDescriptor.setGoalPrefix("goalPrefix");
pluginDescriptor.setArtifactId("artifactId");
mojoDescriptor.setPluginDescriptor(pluginDescriptor);
Parameter parameter = new Parameter();
parameter.setType("java.util.Map");
parameter.setName("toAddresses");
parameter.setRequired(true);
PluginParameterException exception =
new PluginParameterException(mojoDescriptor, Collections.singletonList(parameter));
assertEquals(
"One or more required plugin parameters are invalid/missing for 'goalPrefix:goal'" + LS
+ LS + "[0] Inside the definition for plugin 'artifactId', specify the following:"
+ LS
+ LS + "<configuration>"
+ LS + " ..."
+ LS + " <toAddresses>"
+ LS + " <KEY>VALUE</KEY>"
+ LS + " </toAddresses>"
+ LS + "</configuration>."
+ LS,
exception.buildDiagnosticMessage());
}
@Test
void testMissingRequiredPropertiesTypeParameter() {
MojoDescriptor mojoDescriptor = new MojoDescriptor();
mojoDescriptor.setGoal("goal");
PluginDescriptor pluginDescriptor = new PluginDescriptor();
pluginDescriptor.setGoalPrefix("goalPrefix");
pluginDescriptor.setArtifactId("artifactId");
mojoDescriptor.setPluginDescriptor(pluginDescriptor);
Parameter parameter = new Parameter();
parameter.setType("java.util.Properties");
parameter.setName("toAddresses");
parameter.setRequired(true);
PluginParameterException exception =
new PluginParameterException(mojoDescriptor, Collections.singletonList(parameter));
assertEquals(
"One or more required plugin parameters are invalid/missing for 'goalPrefix:goal'" + LS
+ LS + "[0] Inside the definition for plugin 'artifactId', specify the following:"
+ LS
+ LS + "<configuration>"
+ LS + " ..."
+ LS + " <toAddresses>"
+ LS + " <property>"
+ LS + " <name>KEY</name>"
+ LS + " <value>VALUE</value>"
+ LS + " </property>"
+ LS + " </toAddresses>"
+ LS + "</configuration>."
+ LS,
exception.buildDiagnosticMessage());
}
}
| PluginParameterExceptionTest |
java | quarkusio__quarkus | extensions/hibernate-validator/runtime/src/main/java/io/quarkus/hibernate/validator/runtime/HibernateBeanValidationConfigValidator.java | {
"start": 1691,
"end": 2247
} | class ____ {
private static ValidatorFactory validatorFactory;
private static Validator validator;
static void initialize(ValidatorFactory validatorFactory) {
ConfigValidatorHolder.validatorFactory = validatorFactory;
ConfigValidatorHolder.validator = validatorFactory.getValidator();
}
static ValidatorFactory getValidatorFactory() {
return validatorFactory;
}
static Validator getValidator() {
return validator;
}
}
}
| ConfigValidatorHolder |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/ArangodbComponentBuilderFactory.java | {
"start": 1422,
"end": 1937
} | interface ____ {
/**
* ArangoDb (camel-arangodb)
* Perform operations on ArangoDb when used as a Document Database, or as a
* Graph Database
*
* Category: database
* Since: 3.5
* Maven coordinates: org.apache.camel:camel-arangodb
*
* @return the dsl builder
*/
static ArangodbComponentBuilder arangodb() {
return new ArangodbComponentBuilderImpl();
}
/**
* Builder for the ArangoDb component.
*/
| ArangodbComponentBuilderFactory |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng4321CliUsesPluginMgmtConfigTest.java | {
"start": 1040,
"end": 1981
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Test that configuration from plugin management also applies to goals that are invoked directly from the
* CLI even when the invoked plugin is neither explicitly present in the build/plugins section nor part of
* the lifecycle mappings for the project's packaging.
*
* @throws Exception in case of failure
*/
@Test
public void testit() throws Exception {
File testDir = extractResources("/mng-4321");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.addCliArgument("org.apache.maven.its.plugins:maven-it-plugin-log-file:2.1-SNAPSHOT:reset");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier.verifyFilePresent("target/passed.log");
}
}
| MavenITmng4321CliUsesPluginMgmtConfigTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/ChoiceWithSimpleExpressionTest.java | {
"start": 894,
"end": 1378
} | class ____ extends ChoiceTest {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start").choice().when(simple("${header.foo} == 'bar'")).to("mock:x")
.when(simple("${in.header.foo} == 'cheese'")).to("mock:y").otherwise().to("mock:z")
.end().to("mock:end");
}
};
}
}
| ChoiceWithSimpleExpressionTest |
java | quarkusio__quarkus | extensions/hibernate-envers/deployment/src/test/java/io/quarkus/hibernate/orm/envers/config/EnversTestValidityStrategyFieldNameOverridesResource.java | {
"start": 303,
"end": 1334
} | class ____ extends AbstractEnversResource {
@GET
public String getValidityStrategyFieldNameOverrides() {
boolean isRevEndTimestampIncluded = getConfiguration().isRevisionEndTimestampEnabled();
if (!isRevEndTimestampIncluded) {
return "Expected audit_strategy_validity_store_revend_timestamp to be true but was false";
}
String revEndFieldName = getConfiguration().getRevisionEndFieldName();
if (!revEndFieldName.equals("REV_END")) {
return "Expected audit_strategy_validity_end_rev_field_name to be REV_END but was: " + revEndFieldName;
}
String revEndTimestampFieldName = getConfiguration().getRevisionEndTimestampFieldName();
if (!revEndTimestampFieldName.equals("REV_END_TSTMP")) {
return "Expected audit_strategy_validity_revend_timestamp_field_name to be REV_END_TSTMP but was: "
+ revEndTimestampFieldName;
}
return "OK";
}
}
| EnversTestValidityStrategyFieldNameOverridesResource |
java | google__dagger | javatests/dagger/internal/codegen/OptionalBindingTest.java | {
"start": 5164,
"end": 5525
} | interface ____ {",
" Optional<String> optionalString();",
"}");
Source childModule =
CompilerTests.javaSource(
"test.ChildModule",
"package test;",
"",
"import dagger.BindsOptionalOf;",
"import dagger.Module;",
"",
"@Module",
" | Child |
java | quarkusio__quarkus | integration-tests/maven/src/test/java/io/quarkus/maven/it/DevMojoIT.java | {
"start": 20060,
"end": 30940
} | class
____.write(Paths.get(testDir.toString(), "src/main/java/org/acme/AnotherClass.java"),
"package org.acme;\nclass ItDoesntMatter{}".getBytes());
// change back to hello
source = new File(testDir, "src/main/java/org/acme/HelloResource.java");
filter(source, Collections.singletonMap("return \"" + uuid + "\";", "return \"hello\";"));
// Wait until we get "hello"
await()
.pollDelay(100, TimeUnit.MILLISECONDS)
.atMost(TestUtils.getDefaultTimeout(), TimeUnit.MINUTES)
.until(() -> devModeClient.getHttpResponse("/app/hello").contains("hello"));
//verify that this was not instrumentation based reload
Assertions.assertNotEquals(secondUUid, devModeClient.getHttpResponse("/app/uuid"));
secondUUid = devModeClient.getHttpResponse("/app/uuid");
}
@Test
public void testThatSourceChangesAreDetectedOnPomChange() throws Exception {
testDir = initProject("projects/classic", "projects/project-classic-run-src-and-pom-change");
runAndCheck(false);
// Edit a Java file too
final File javaSource = new File(testDir, "src/main/java/org/acme/HelloResource.java");
final String uuid = UUID.randomUUID().toString();
filter(javaSource, Collections.singletonMap("return \"hello\";", "return \"hello " + uuid + "\";"));
// edit the application.properties too
final File applicationProps = new File(testDir, "src/main/resources/application.properties");
filter(applicationProps, Collections.singletonMap("greeting=bonjour", "greeting=" + uuid + ""));
// Now edit the pom.xml to trigger the dev mode restart
final File pomSource = new File(testDir, "pom.xml");
filter(pomSource, Collections.singletonMap("<!-- insert test dependencies here -->",
" <dependency>\n" +
" <groupId>io.quarkus</groupId>\n" +
" <artifactId>quarkus-smallrye-openapi</artifactId>\n" +
" </dependency>"));
// Wait until we get the updated responses
await()
.pollDelay(100, TimeUnit.MILLISECONDS)
.atMost(TestUtils.getDefaultTimeout(), TimeUnit.MINUTES)
.until(() -> {
System.out.println(devModeClient.getHttpResponse("/app/hello"));
return devModeClient.getHttpResponse("/app/hello").contains("hello " + uuid);
});
await()
.pollDelay(100, TimeUnit.MILLISECONDS)
.atMost(TestUtils.getDefaultTimeout(), TimeUnit.MINUTES)
.until(() -> devModeClient.getHttpResponse("/app/hello/greeting").contains(uuid));
}
@Test
public void testAlternatePom() throws Exception {
testDir = initProject("projects/classic", "projects/project-classic-alternate-pom");
File pom = new File(testDir, "pom.xml");
if (!pom.exists()) {
throw new IllegalStateException("Failed to locate project's pom.xml at " + pom);
}
final String alternatePomName = "alternate-pom.xml";
File alternatePom = new File(testDir, alternatePomName);
if (alternatePom.exists()) {
alternatePom.delete();
}
Files.copy(pom.toPath(), alternatePom.toPath());
// Now edit the pom.xml to trigger the dev mode restart
filter(alternatePom, Map.of("<!-- insert test dependencies here -->",
" <dependency>\n" +
" <groupId>io.quarkus</groupId>\n" +
" <artifactId>quarkus-smallrye-openapi</artifactId>\n" +
" </dependency>"));
runAndCheck();
assertThat(devModeClient.getHttpResponse("/q/openapi", true)).contains("Resource Not Found");
shutdownTheApp();
runAndCheck("-f", alternatePomName);
devModeClient.getHttpResponse("/q/openapi").contains("hello");
}
@Test
public void testThatTheApplicationIsReloadedOnPomChange() throws MavenInvocationException, IOException {
testDir = initProject("projects/classic", "projects/project-classic-run-pom-change");
runAndCheck();
// Edit the pom.xml.
File source = new File(testDir, "pom.xml");
filter(source, Collections.singletonMap("<!-- insert test dependencies here -->",
" <dependency>\n" +
" <groupId>io.quarkus</groupId>\n" +
" <artifactId>quarkus-smallrye-openapi</artifactId>\n" +
" </dependency>"));
// Wait until we get "uuid"
await()
.pollDelay(100, TimeUnit.MILLISECONDS)
.atMost(TestUtils.getDefaultTimeout(), TimeUnit.MINUTES)
.until(() -> devModeClient.getHttpResponse("/q/openapi").contains("hello"));
}
@Test
public void testProjectWithExtension() throws MavenInvocationException, IOException {
testDir = getTargetDir("projects/project-with-extension");
runAndCheck();
final List<String> artifacts = getNonReloadableArtifacts(
Files.readAllLines(testDir.toPath().resolve("build-project-with-extension.log")));
assertTrue(artifacts.contains("- org.acme:acme-quarkus-ext:1.0-SNAPSHOT"));
assertTrue(artifacts.contains("- org.acme:acme-quarkus-ext-deployment:1.0-SNAPSHOT"));
assertTrue(artifacts.contains("- org.acme:acme-common:1.0-SNAPSHOT"));
assertTrue(artifacts.contains("- org.acme:acme-common-transitive:1.0-SNAPSHOT"));
assertEquals(4, artifacts.size());
}
protected List<String> getNonReloadableArtifacts(final List<String> log) {
final List<String> artifacts = new ArrayList<>();
boolean inWarn = false;
for (String line : log) {
if (inWarn) {
if (line.equals(
"The artifacts above appear to be either dependencies of non-reloadable application dependencies or Quarkus extensions")) {
break;
}
artifacts.add(line);
} else if (line.equals(
"[WARNING] [io.quarkus.bootstrap.devmode.DependenciesFilter] Live reload was disabled for the following project artifacts:")) {
inWarn = true;
}
}
return artifacts;
}
@Test
public void testPomReload() throws MavenInvocationException, IOException {
testDir = initProject("projects/project-with-extension", "projects/pom-reload");
// add the extra dependency to the application module
filter(new File(testDir, "runner/pom.xml"), Map.of(
"<!-- begin comment", "<!-- begin comment -->",
"end comment -->", "<!-- end comment -->"));
// launch the application
run(false);
var localDeps = parseArtifactCoords(devModeClient.getHttpResponse("/app/hello/local-modules"));
assertThat(localDeps).containsExactlyInAnyOrder(
ArtifactCoords.jar("org.acme.extra", "acme-extra", "1.0-SNAPSHOT"),
ArtifactCoords.jar("org.acme", "acme-common-transitive", "1.0-SNAPSHOT"),
ArtifactCoords.jar("org.acme", "acme-common", "1.0-SNAPSHOT"),
ArtifactCoords.jar("org.acme", "acme-library", "1.0-SNAPSHOT"),
ArtifactCoords.jar("org.acme", "acme-quarkus-ext-deployment", "1.0-SNAPSHOT"),
ArtifactCoords.jar("org.acme", "acme-quarkus-ext", "1.0-SNAPSHOT"));
// remove the extra dependency from the application module
filter(new File(testDir, "runner/pom.xml"), Map.of(
"<!-- begin comment -->", "<!-- begin comment",
"<!-- end comment -->", "end comment -->"));
await()
.pollDelay(100, TimeUnit.MILLISECONDS)
.atMost(TestUtils.getDefaultTimeout(), TimeUnit.MINUTES)
.until(() -> {
final String response = devModeClient.getHttpResponse("/app/hello/local-modules");
System.out.println("local-modules: " + response);
return !response.contains("acme-extra");
});
// add the extra dependency to a dependency module
filter(new File(testDir, "library/pom.xml"), Map.of(
"<!-- begin comment", "<!-- begin comment -->",
"end comment -->", "<!-- end comment -->"));
await()
.pollDelay(100, TimeUnit.MILLISECONDS)
.atMost(TestUtils.getDefaultTimeout(), TimeUnit.MINUTES)
.until(() -> {
final String response = devModeClient.getHttpResponse("/app/hello/local-modules");
System.out.println("local-modules: " + response);
return response.contains("acme-extra");
});
}
private static Set<ArtifactCoords> parseArtifactCoords(String s) {
if (s.charAt(0) == '[' && s.charAt(s.length() - 1) == ']') {
s = s.substring(1, s.length() - 1);
}
var arr = s.split(",");
final Set<ArtifactCoords> result = new HashSet<>(arr.length);
for (var i : arr) {
result.add(ArtifactCoords.fromString(i.trim()));
}
return result;
}
@Test
public void testRestClientCustomHeadersExtension() throws MavenInvocationException, IOException {
testDir = getTargetDir("projects/rest-client-custom-headers-extension");
runAndCheck();
final List<String> artifacts = getNonReloadableArtifacts(
Files.readAllLines(testDir.toPath().resolve("build-rest-client-custom-headers-extension.log")));
assertTrue(artifacts.contains("- org.acme:rest-client-custom-headers:1.0-SNAPSHOT"));
assertTrue(artifacts.contains("- org.acme:rest-client-custom-headers-deployment:1.0-SNAPSHOT"));
assertEquals(2, artifacts.size());
assertThat(devModeClient.getHttpResponse("/app/frontend")).isEqualTo("CustomValue1 CustomValue2");
}
@Test
void testTestProfilesAreHandled()
throws MavenInvocationException, IOException {
// This project is a somewhat complex project with a mix of profile-d tests and plain tests, and some tests which exercise that the settings in the profile is honoured
testDir = initProject("projects/test-test-profile");
runAndCheck();
ContinuousTestingMavenTestUtils testingTestUtils = new ContinuousTestingMavenTestUtils();
ContinuousTestingMavenTestUtils.TestStatus results = testingTestUtils.waitForNextCompletion();
//check that the tests in both | Files |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/writing/StaticFactoryInstanceSupplier.java | {
"start": 1176,
"end": 2507
} | class ____ implements FrameworkInstanceSupplier {
private final FrameworkInstanceSupplier frameworkInstanceSupplier;
@AssistedInject
StaticFactoryInstanceSupplier(
@Assisted ContributionBinding binding,
FrameworkInstanceBindingRepresentation.Factory
frameworkInstanceBindingRepresentationFactory) {
this.frameworkInstanceSupplier = () -> staticFactoryCreation(binding);
}
@Override
public MemberSelect memberSelect() {
return frameworkInstanceSupplier.memberSelect();
}
// TODO(bcorso): no-op members injector is currently handled in
// `MembersInjectorProviderCreationExpression`, we should inline the logic here so we won't create
// an extra field for it.
private MemberSelect staticFactoryCreation(ContributionBinding binding) {
switch (binding.kind()) {
case MULTIBOUND_MAP:
return StaticMemberSelects.emptyMapFactory((MultiboundMapBinding) binding);
case MULTIBOUND_SET:
return StaticMemberSelects.emptySetFactory((MultiboundSetBinding) binding);
case PROVISION:
case INJECTION:
return StaticMemberSelects.factoryCreateNoArgumentMethod(binding);
default:
throw new AssertionError(String.format("Invalid binding kind: %s", binding.kind()));
}
}
@AssistedFactory
static | StaticFactoryInstanceSupplier |
java | elastic__elasticsearch | x-pack/plugin/wildcard/src/yamlRestTest/java/org/elasticsearch/xpack/wildcard/WildcardClientYamlTestSuiteIT.java | {
"start": 687,
"end": 1284
} | class ____ extends ESClientYamlSuiteTestCase {
public WildcardClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
@ClassRule
public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("wildcard").build();
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
}
| WildcardClientYamlTestSuiteIT |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-jsonb/deployment/src/test/java/io/quarkus/resteasy/reactive/jsonb/deployment/test/Person.java | {
"start": 117,
"end": 502
} | class ____ {
private String first;
@NotBlank(message = "Title cannot be blank")
private String last;
public String getFirst() {
return first;
}
public void setFirst(String first) {
this.first = first;
}
public String getLast() {
return last;
}
public void setLast(String last) {
this.last = last;
}
}
| Person |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java | {
"start": 23443,
"end": 24855
} | class ____ extends Numeric {
private final Numeric delegate;
private final AggregationScript.LeafFactory script;
public WithScript(Numeric delegate, AggregationScript.LeafFactory script) {
this.delegate = delegate;
this.script = script;
}
@Override
public boolean isFloatingPoint() {
return true; // even if the underlying source produces longs, scripts can change them to doubles
}
@Override
public boolean needsScores() {
return script.needs_score();
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException {
return new Bytes.WithScript.BytesValues(delegate.bytesValues(context), script.newInstance(context));
}
@Override
public SortedNumericLongValues longValues(LeafReaderContext context) throws IOException {
return new LongValues(delegate.longValues(context), script.newInstance(context));
}
@Override
public SortedNumericDoubleValues doubleValues(LeafReaderContext context) throws IOException {
return new DoubleValues(delegate.doubleValues(context), script.newInstance(context));
}
static final | WithScript |
java | apache__camel | components/camel-test/camel-test-spring-junit5/src/main/java/org/apache/camel/test/spring/junit5/CamelSpringBootExecutionListener.java | {
"start": 1164,
"end": 6157
} | class ____ extends AbstractTestExecutionListener {
protected static final ThreadLocal<ConfigurableApplicationContext> threadApplicationContext = new ThreadLocal<>();
private static final Logger LOG = LoggerFactory.getLogger(CamelSpringBootExecutionListener.class);
private static final String PROPERTY_SKIP_STARTING_CAMEL_CONTEXT = "skipStartingCamelContext";
/**
* Returns the precedence used by Spring to choose the appropriate execution order of test listeners.
*
* See {@link SpringTestExecutionListenerSorter#getPrecedence(Class)} for more.
*/
@Override
public int getOrder() {
return SpringTestExecutionListenerSorter.getPrecedence(getClass());
}
@Override
public void beforeTestClass(TestContext testContext) throws Exception {
// prevent other extensions to start the Camel context
preventContextStart();
}
@Override
public void prepareTestInstance(TestContext testContext) throws Exception {
LOG.info("CamelSpringBootExecutionListener preparing: {}", testContext.getTestClass());
Class<?> testClass = testContext.getTestClass();
// need to prepare this before we load spring application context
CamelAnnotationsHandler.handleDisableJmx(testClass);
CamelAnnotationsHandler.handleExcludeRoutes(testClass);
// prevent the Camel context to be started to be able to extend it.
preventContextStart();
ConfigurableApplicationContext context = (ConfigurableApplicationContext) testContext.getApplicationContext();
CamelAnnotationsHandler.handleUseOverridePropertiesWithPropertiesComponent(context, testClass);
// Post CamelContext(s) instantiation but pre CamelContext(s) start setup
CamelAnnotationsHandler.handleProvidesBreakpoint(context, testClass);
CamelAnnotationsHandler.handleShutdownTimeout(context, testClass);
CamelAnnotationsHandler.handleMockEndpoints(context, testClass);
CamelAnnotationsHandler.handleMockEndpointsAndSkip(context, testClass);
CamelAnnotationsHandler.handleStubEndpoints(context, testClass);
CamelAnnotationsHandler.handleAutoStartupExclude(context, testClass);
System.clearProperty(PROPERTY_SKIP_STARTING_CAMEL_CONTEXT);
SpringCamelContext.setNoStart(false);
}
/**
* Sets the {@link SpringCamelContext#setNoStart(boolean)} and the system property
* <code>skipStartingCamelContext</code>to <code>true</code> to let us customizing the Camel context with
* {@link CamelAnnotationsHandler} before it has been started. It's needed as early as possible to prevent other
* extensions to start it <b>and</b> before every test run.
*/
private void preventContextStart() {
SpringCamelContext.setNoStart(true);
System.setProperty(PROPERTY_SKIP_STARTING_CAMEL_CONTEXT, "true");
}
@Override
public void beforeTestMethod(TestContext testContext) throws Exception {
LOG.info("CamelSpringBootExecutionListener before: {}.{}", testContext.getTestClass(),
testContext.getTestMethod().getName());
Class<?> testClass = testContext.getTestClass();
String testName = testContext.getTestMethod().getName();
ConfigurableApplicationContext context = (ConfigurableApplicationContext) testContext.getApplicationContext();
threadApplicationContext.set(context);
// mark Camel to be startable again and start Camel
System.clearProperty(PROPERTY_SKIP_STARTING_CAMEL_CONTEXT);
// route coverage/dump need to know the test method
CamelAnnotationsHandler.handleRouteCoverageEnable(context, testClass, s -> testName);
CamelAnnotationsHandler.handleRouteDumpEnable(context, testClass, s -> testName);
LOG.info("Initialized CamelSpringBootExecutionListener now ready to start CamelContext");
CamelAnnotationsHandler.handleCamelContextStartup(context, testClass);
}
@Override
public void afterTestMethod(TestContext testContext) throws Exception {
LOG.info("CamelSpringBootExecutionListener after: {}.{}", testContext.getTestClass(),
testContext.getTestMethod().getName());
Class<?> testClass = testContext.getTestClass();
String testName = testContext.getTestMethod().getName();
ConfigurableApplicationContext context = threadApplicationContext.get();
if (context != null && context.isRunning()) {
// dump route coverage for each test method so its accurate
// statistics
// even if spring application context is running (i.e. its not
// dirtied per test method)
CamelAnnotationsHandler.handleRouteCoverageDump(context, testClass, s -> testName);
// also dump route as either xml or yaml
CamelAnnotationsHandler.handleRouteDump(context, testClass, s -> testName);
}
}
}
| CamelSpringBootExecutionListener |
java | quarkusio__quarkus | extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/staticmethods/InterceptedStaticMethodsProcessor.java | {
"start": 11833,
"end": 22687
} | class
____ initAllMethodName = "init_static_intercepted_methods";
for (Map.Entry<DotName, List<InterceptedStaticMethodBuildItem>> entry : interceptedStaticMethodsMap.entrySet()) {
String initializerName = entry.getKey() + INITIALIZER_CLASS_SUFFIX;
baseToGeneratedInitializer.put(entry.getKey(), initializerName);
gizmo.class_(initializerName, cc -> {
cc.final_();
List<MethodDesc> initMethods = new ArrayList<>();
for (InterceptedStaticMethodBuildItem interceptedStaticMethod : entry.getValue()) {
initMethods.add(generateInit(beanArchiveIndex.getIndex(), cc, interceptedStaticMethod,
reflectiveMethods, phase.getBeanProcessor()));
generateForward(cc, interceptedStaticMethod);
}
cc.staticMethod(initAllMethodName, mc -> {
mc.body(bc -> {
for (MethodDesc initMethod : initMethods) {
bc.invokeStatic(initMethod);
}
bc.return_();
});
});
});
}
// Transform all declaring classes
// For each intercepted static methods create a copy and modify the original method to delegate to the relevant initializer
for (Map.Entry<DotName, List<InterceptedStaticMethodBuildItem>> entry : interceptedStaticMethodsMap.entrySet()) {
transformers.produce(new BytecodeTransformerBuildItem(entry.getKey().toString(),
new InterceptedStaticMethodsEnhancer(baseToGeneratedInitializer.get(entry.getKey()), entry.getValue())));
}
// Generate a global initializer that calls all other initializers; this initializer must be loaded by the runtime ClassLoader
gizmo.class_(InterceptedStaticMethodsRecorder.INITIALIZER_CLASS_NAME, cc -> {
cc.final_();
cc.staticInitializer(bc -> {
for (String initializerClass : baseToGeneratedInitializer.values()) {
bc.invokeStatic(ClassMethodDesc.of(ClassDesc.of(initializerClass), initAllMethodName, void.class));
}
bc.return_();
});
});
}
private MethodDesc generateInit(IndexView index, io.quarkus.gizmo2.creator.ClassCreator cc,
InterceptedStaticMethodBuildItem interceptedStaticMethod,
BuildProducer<ReflectiveMethodBuildItem> reflectiveMethods, BeanProcessor beanProcessor) {
MethodInfo btMethod = interceptedStaticMethod.getMethod();
List<InterceptorInfo> interceptors = interceptedStaticMethod.getInterceptors();
Set<AnnotationInstance> btBindings = interceptedStaticMethod.getBindings();
// init_interceptMe_hash()
return cc.staticMethod("init_" + btMethod.name() + "_" + interceptedStaticMethod.getHash(), mc -> {
mc.private_();
mc.body(b0 -> {
LocalVar creationalContext = b0.localVar("creationalContext",
b0.new_(ConstructorDesc.of(CreationalContextImpl.class, Contextual.class),
Const.ofNull(Contextual.class)));
// 1. Interceptor chain
LocalVar chain = b0.localVar("chain", b0.blockExpr(List.class, b1 -> {
if (interceptors.size() == 1) {
// List<InvocationContextImpl.InterceptorInvocation> chain = List.of(...);
b1.yield(b1.listOf(createInterceptorInvocation(interceptors.get(0), b1, creationalContext)));
} else {
// List<InvocationContextImpl.InterceptorInvocation> chain = new ArrayList<>();
LocalVar list = b1.localVar("list", b1.new_(ArrayList.class));
for (InterceptorInfo interceptor : interceptors) {
// m1Chain.add(InvocationContextImpl.InterceptorInvocation.aroundInvoke(p3,interceptorInstanceMap.get(InjectableInterceptor.getIdentifier())))
b1.withList(list).add(createInterceptorInvocation(interceptor, b1, creationalContext));
}
b1.yield(list);
}
}));
// 2. Method method = Reflections.findMethod(...)
LocalVar rtMethod = b0.localVar("method", b0.blockExpr(Reflection2Gizmo.classDescOf(Method.class), b1 -> {
// TODO load from TCCL
Expr className = Const.of(classDescOf(btMethod.declaringClass()));
Expr name = Const.of(btMethod.name());
// TODO load from TCCL
Expr params = b1.newArray(Class.class, btMethod.parameterTypes(), type -> Const.of(classDescOf(type)));
b1.yield(b1.invokeStatic(REFLECTIONS_FIND_METHOD, className, name, params));
}));
// 3. Interceptor bindings
LocalVar rtBindings = b0.localVar("bindings", b0.blockExpr(Set.class, b1 -> {
if (btBindings.size() == 1) {
b1.yield(b1.setOf(createBindingLiteral(index, b1, btBindings.iterator().next(),
beanProcessor.getAnnotationLiteralProcessor())));
} else {
LocalVar set = b1.localVar("set", b1.new_(HashSet.class));
for (AnnotationInstance btBinding : btBindings) {
b1.withSet(set).add(createBindingLiteral(index, b1, btBinding,
beanProcessor.getAnnotationLiteralProcessor()));
}
b1.yield(set);
}
}));
// 4. Forwarding function
// BiFunction<Object, InvocationContext, Object> forward = (ignored, ctx) -> Foo.interceptMe_original((java.lang.String)ctx.getParameters()[0])
Expr forwardingFunc = b0.lambda(BiFunction.class, lc -> {
lc.parameter("ignored", 0);
ParamVar ctx = lc.parameter("ctx", 1);
lc.body(lbc -> {
ClassDesc[] params;
Expr[] args;
if (btMethod.parametersCount() == 0) {
params = new ClassDesc[0];
args = new Expr[0];
} else {
params = new ClassDesc[btMethod.parametersCount()];
args = new Expr[btMethod.parametersCount()];
LocalVar ctxParams = lbc.localVar("params", lbc.invokeInterface(
MethodDesc.of(InvocationContext.class, "getParameters", Object[].class), ctx));
for (int i = 0; i < args.length; i++) {
params[i] = classDescOf(btMethod.parameterType(i));
args[i] = ctxParams.elem(i);
}
}
ClassDesc declaringClass = classDescOf(interceptedStaticMethod.getTarget());
String methodName = btMethod.name() + ORIGINAL_METHOD_COPY_SUFFIX;
MethodTypeDesc methodType = MethodTypeDesc.of(classDescOf(btMethod.returnType()), params);
MethodDesc targetMethod = interceptedStaticMethod.getTarget().isInterface()
? InterfaceMethodDesc.of(declaringClass, methodName, methodType)
: ClassMethodDesc.of(declaringClass, methodName, methodType);
Expr ret = lbc.invokeStatic(targetMethod, args);
lbc.return_(ret.isVoid() ? Const.ofNull(Object.class) : ret);
});
});
// Now create metadata for the given intercepted method
Expr metadata = b0.new_(INTERCEPTED_METHOD_METADATA_CONSTRUCTOR,
chain, rtMethod, rtBindings, forwardingFunc);
// Call InterceptedStaticMethods.register()
b0.invokeStatic(INTERCEPTED_STATIC_METHODS_REGISTER, Const.of(interceptedStaticMethod.getHash()),
metadata);
b0.return_();
// Needed when running on native image
reflectiveMethods.produce(new ReflectiveMethodBuildItem(getClass().getName(), btMethod));
});
});
}
private Expr createInterceptorInvocation(InterceptorInfo interceptor, BlockCreator bc,
LocalVar parentCreationalContext) {
Expr arc = bc.invokeStatic(ARC_REQUIRE_CONTAINER);
Expr bean = bc.invokeInterface(ARC_CONTAINER_BEAN, arc, Const.of(interceptor.getIdentifier()));
LocalVar interceptorBean = bc.localVar("interceptor", bc.cast(bean, InjectableInterceptor.class));
Expr creationalContext = bc.invokeStatic(CREATIONAL_CTX_CHILD, parentCreationalContext);
Expr interceptorInstance = bc.invokeInterface(INJECTABLE_REF_PROVIDER_GET, interceptorBean,
creationalContext);
return bc.invokeStatic(INTERCEPTOR_INVOCATION_AROUND_INVOKE, interceptorBean, interceptorInstance);
}
private Expr createBindingLiteral(IndexView index, BlockCreator bc,
AnnotationInstance binding, AnnotationLiteralProcessor annotationLiterals) {
ClassInfo bindingClass = index.getClassByName(binding.name());
return annotationLiterals.create(bc, bindingClass, binding);
}
private void generateForward(io.quarkus.gizmo2.creator.ClassCreator cc,
InterceptedStaticMethodBuildItem interceptedStaticMethod) {
MethodInfo method = interceptedStaticMethod.getMethod();
cc.staticMethod(interceptedStaticMethod.getForwardingMethodName(), mc -> {
mc.returning(classDescOf(method.returnType()));
List<ParamVar> params = new ArrayList<>();
for (MethodParameterInfo param : method.parameters()) {
String name = param.name();
if (name == null) {
name = "p" + param.position();
}
params.add(mc.parameter(name, classDescOf(param.type())));
}
mc.body(bc -> {
Expr args = bc.newArray(Object.class, params);
Expr result = bc.invokeStatic(INTERCEPTED_STATIC_METHODS_AROUND_INVOKE,
Const.of(interceptedStaticMethod.getHash()), args);
bc.return_(method.returnType().kind() == Type.Kind.VOID ? Const.ofVoid() : result);
});
});
}
static | String |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/enums/EnumDefaultReadTest.java | {
"start": 421,
"end": 477
} | enum ____ {
ZERO,
ONE;
}
| SimpleEnum |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/data/Lobs.java | {
"start": 1102,
"end": 3705
} | class ____ {
private Integer id1;
private Integer id2;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
id1 = scope.fromTransaction( em -> {
LobTestEntity lte = new LobTestEntity( "abc", new byte[] {0, 1, 2}, new char[] {'x', 'y', 'z'} );
em.persist( lte );
return lte.getId();
} );
scope.inTransaction( em -> {
LobTestEntity lte = em.find( LobTestEntity.class, id1 );
lte.setStringLob( "def" );
lte.setByteLob( new byte[] {3, 4, 5} );
lte.setCharLob( new char[] {'h', 'i', 'j'} );
} );
// this creates a revision history for a Lob-capable entity but the change is on a non-audited
// field and so it should only generate 1 revision, the initial persist.
id2 = scope.fromTransaction( em -> {
LobTestEntity lte2 = new LobTestEntity( "abc", new byte[] {0, 1, 2}, new char[] {'x', 'y', 'z'} );
lte2.setData( "Hi" );
em.persist( lte2 );
return lte2.getId();
} );
scope.inTransaction( em -> {
LobTestEntity lte2 = em.find( LobTestEntity.class, id2 );
lte2.setData( "Hello World" );
} );
}
@Test
public void testRevisionsCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 1, 2 ), auditReader.getRevisions( LobTestEntity.class, id1 ) );
} );
}
@Test
public void testHistoryOfId1(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
LobTestEntity ver1 = new LobTestEntity( id1, "abc", new byte[] {0, 1, 2}, new char[] {'x', 'y', 'z'} );
LobTestEntity ver2 = new LobTestEntity( id1, "def", new byte[] {3, 4, 5}, new char[] {'h', 'i', 'j'} );
assertEquals( ver1, auditReader.find( LobTestEntity.class, id1, 1 ) );
assertEquals( ver2, auditReader.find( LobTestEntity.class, id1, 2 ) );
} );
}
@Test
@JiraKey(value = "HHH-10734")
public void testRevisionsCountsForAuditedArraysWithNoChanges(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 3 ), auditReader.getRevisions( LobTestEntity.class, id2 ) );
} );
}
@Test
@JiraKey(value = "HHH-10734")
public void testHistoryOfId2(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
LobTestEntity ver1 = new LobTestEntity( id2, "abc", new byte[] {0, 1, 2}, new char[] {'x', 'y', 'z'} );
assertEquals( ver1, auditReader.find( LobTestEntity.class, id2, 3 ) );
} );
}
}
| Lobs |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/YearMonthConverterTest.java | {
"start": 2497,
"end": 2938
} | class ____ implements Serializable {
@Id
private Long id;
@Convert( converter = YearMonthConverter.class )
private YearMonth yearMonth;
public DemoEntity() {
}
public DemoEntity(Long id, YearMonth yearMonth) {
this.id = id;
this.yearMonth = yearMonth;
}
public Long getId() {
return id;
}
public YearMonth getYearMonth() {
return yearMonth;
}
}
@Converter( autoApply = true )
public static | DemoEntity |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/MisplacedScopeAnnotationsTest.java | {
"start": 5256,
"end": 6029
} | class ____ {
@Provides
@Singleton
@Named("bar")
int something(@Named("bar") Integer bar) {
return 42;
}
}
""")
.doTest();
}
@Test
public void negativeCase_scopeAnnotationIsAlsoQualifier() {
compilationHelper
.addSourceLines(
"Foo.java",
"""
import dagger.Provides;
import dagger.Module;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Qualifier;
import javax.inject.Scope;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@Module
| Foo |
java | spring-projects__spring-security | oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/registration/ClientRegistrations.java | {
"start": 2182,
"end": 14073
} | class ____ {
private static final String OIDC_METADATA_PATH = "/.well-known/openid-configuration";
private static final String OAUTH_METADATA_PATH = "/.well-known/oauth-authorization-server";
private static final RestTemplate rest = new RestTemplate();
static {
SimpleClientHttpRequestFactory requestFactory = new SimpleClientHttpRequestFactory();
requestFactory.setConnectTimeout(30_000);
requestFactory.setReadTimeout(30_000);
rest.setRequestFactory(requestFactory);
}
private static final ParameterizedTypeReference<Map<String, Object>> typeReference = new ParameterizedTypeReference<>() {
};
private ClientRegistrations() {
}
/**
* Creates a {@link ClientRegistration.Builder} using the provided map representation
* of an <a href=
* "https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationResponse">OpenID
* Provider Configuration Response</a> to initialize the
* {@link ClientRegistration.Builder}.
*
* <p>
* This is useful when the OpenID Provider Configuration is not available at a
* well-known location, or if custom validation is needed for the issuer location
* (e.g. if the issuer is only accessible from a back-channel URI that is different
* from the issuer value in the configuration).
* </p>
*
* <p>
* Example usage:
* </p>
* <pre>
* RequestEntity<Void> request = RequestEntity.get(metadataEndpoint).build();
* ParameterizedTypeReference<Map<String, Object>> typeReference = new ParameterizedTypeReference<>() {};
* Map<String, Object> configuration = rest.exchange(request, typeReference).getBody();
* // Validate configuration.get("issuer") as per in the OIDC specification
* ClientRegistration registration = ClientRegistrations.fromOidcConfiguration(configuration)
* .clientId("client-id")
* .clientSecret("client-secret")
* .build();
* </pre>
* @param the OpenID Provider configuration map
* @return the {@link ClientRegistration} built from the configuration
*/
public static ClientRegistration.Builder fromOidcConfiguration(Map<String, Object> configuration) {
OIDCProviderMetadata metadata = parse(configuration, OIDCProviderMetadata::parse);
ClientRegistration.Builder builder = withProviderConfiguration(metadata, metadata.getIssuer().getValue());
builder.jwkSetUri(metadata.getJWKSetURI().toASCIIString());
if (metadata.getUserInfoEndpointURI() != null) {
builder.userInfoUri(metadata.getUserInfoEndpointURI().toASCIIString());
}
return builder;
}
/**
* Creates a {@link ClientRegistration.Builder} using the provided <a href=
* "https://openid.net/specs/openid-connect-core-1_0.html#IssuerIdentifier">Issuer</a>
* by making an <a href=
* "https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationRequest">OpenID
* Provider Configuration Request</a> and using the values in the <a href=
* "https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationResponse">OpenID
* Provider Configuration Response</a> to initialize the
* {@link ClientRegistration.Builder}.
*
* <p>
* For example, if the issuer provided is "https://example.com", then an "OpenID
* Provider Configuration Request" will be made to
* "https://example.com/.well-known/openid-configuration". The result is expected to
* be an "OpenID Provider Configuration Response".
* </p>
*
* <p>
* Example usage:
* </p>
* <pre>
* ClientRegistration registration = ClientRegistrations.fromOidcIssuerLocation("https://example.com")
* .clientId("client-id")
* .clientSecret("client-secret")
* .build();
* </pre>
* @param issuer the <a href=
* "https://openid.net/specs/openid-connect-core-1_0.html#IssuerIdentifier">Issuer</a>
* @return a {@link ClientRegistration.Builder} that was initialized by the OpenID
* Provider Configuration.
*/
public static ClientRegistration.Builder fromOidcIssuerLocation(String issuer) {
Assert.hasText(issuer, "issuer cannot be empty");
return getBuilder(issuer, oidc(issuer));
}
/**
* Creates a {@link ClientRegistration.Builder} using the provided <a href=
* "https://openid.net/specs/openid-connect-core-1_0.html#IssuerIdentifier">Issuer</a>
* by querying three different discovery endpoints serially, using the values in the
* first successful response to initialize. If an endpoint returns anything other than
* a 200 or a 4xx, the method will exit without attempting subsequent endpoints.
*
* The three endpoints are computed as follows, given that the {@code issuer} is
* composed of a {@code host} and a {@code path}:
*
* <ol>
* <li>{@code host/.well-known/openid-configuration/path}, as defined in
* <a href="https://tools.ietf.org/html/rfc8414#section-5">RFC 8414's Compatibility
* Notes</a>.</li>
* <li>{@code issuer/.well-known/openid-configuration}, as defined in <a href=
* "https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationRequest">
* OpenID Provider Configuration</a>.</li>
* <li>{@code host/.well-known/oauth-authorization-server/path}, as defined in
* <a href="https://tools.ietf.org/html/rfc8414#section-3.1">Authorization Server
* Metadata Request</a>.</li>
* </ol>
*
* Note that the second endpoint is the equivalent of calling
* {@link ClientRegistrations#fromOidcIssuerLocation(String)}.
*
* <p>
* Example usage:
* </p>
* <pre>
* ClientRegistration registration = ClientRegistrations.fromIssuerLocation("https://example.com")
* .clientId("client-id")
* .clientSecret("client-secret")
* .build();
* </pre>
* @param issuer
* @return a {@link ClientRegistration.Builder} that was initialized by one of the
* described endpoints
*/
public static ClientRegistration.Builder fromIssuerLocation(String issuer) {
Assert.hasText(issuer, "issuer cannot be empty");
return getBuilder(issuer, oidc(issuer), oidcRfc8414(issuer), oauth(issuer));
}
static Supplier<ClientRegistration.Builder> oidc(String issuer) {
UriComponents uri = oidcUri(issuer);
// @formatter:on
return () -> {
RequestEntity<Void> request = RequestEntity.get(uri.toUriString()).build();
Map<String, Object> configuration = rest.exchange(request, typeReference).getBody();
OIDCProviderMetadata metadata = parse(configuration, OIDCProviderMetadata::parse);
ClientRegistration.Builder builder = withProviderConfiguration(metadata, issuer)
.jwkSetUri(metadata.getJWKSetURI().toASCIIString());
if (metadata.getUserInfoEndpointURI() != null) {
builder.userInfoUri(metadata.getUserInfoEndpointURI().toASCIIString());
}
return builder;
};
}
static UriComponents oidcUri(String issuer) {
UriComponents uri = UriComponentsBuilder.fromUriString(issuer).build();
// @formatter:off
return UriComponentsBuilder.newInstance().uriComponents(uri)
.replacePath(uri.getPath() + OIDC_METADATA_PATH)
.build();
}
static Supplier<ClientRegistration.Builder> oidcRfc8414(String issuer) {
UriComponents uri = oidcRfc8414Uri(issuer);
// @formatter:on
return getRfc8414Builder(issuer, uri);
}
static UriComponents oidcRfc8414Uri(String issuer) {
UriComponents uri = UriComponentsBuilder.fromUriString(issuer).build();
// @formatter:off
return UriComponentsBuilder.newInstance().uriComponents(uri)
.replacePath(OIDC_METADATA_PATH + uri.getPath())
.build();
}
static Supplier<ClientRegistration.Builder> oauth(String issuer) {
UriComponents uri = oauthUri(issuer);
return getRfc8414Builder(issuer, uri);
}
static UriComponents oauthUri(String issuer) {
UriComponents uri = UriComponentsBuilder.fromUriString(issuer).build();
// @formatter:off
return UriComponentsBuilder.newInstance().uriComponents(uri)
.replacePath(OAUTH_METADATA_PATH + uri.getPath())
.build();
// @formatter:on
}
private static Supplier<ClientRegistration.Builder> getRfc8414Builder(String issuer, UriComponents uri) {
return () -> {
RequestEntity<Void> request = RequestEntity.get(uri.toUriString()).build();
Map<String, Object> configuration = rest.exchange(request, typeReference).getBody();
AuthorizationServerMetadata metadata = parse(configuration, AuthorizationServerMetadata::parse);
ClientRegistration.Builder builder = withProviderConfiguration(metadata, issuer);
URI jwkSetUri = metadata.getJWKSetURI();
if (jwkSetUri != null) {
builder.jwkSetUri(jwkSetUri.toASCIIString());
}
String userinfoEndpoint = (String) configuration.get("userinfo_endpoint");
if (userinfoEndpoint != null) {
builder.userInfoUri(userinfoEndpoint);
}
return builder;
};
}
@SafeVarargs
private static ClientRegistration.Builder getBuilder(String issuer,
Supplier<ClientRegistration.Builder>... suppliers) {
String errorMessage = "Unable to resolve Configuration with the provided Issuer of \"" + issuer + "\"";
List<String> errors = new ArrayList<>();
for (Supplier<ClientRegistration.Builder> supplier : suppliers) {
try {
return supplier.get();
}
catch (HttpClientErrorException ex) {
if (!ex.getStatusCode().is4xxClientError()) {
throw ex;
}
errors.add(ex.getMessage());
// else try another endpoint
}
catch (IllegalArgumentException | IllegalStateException ex) {
throw ex;
}
catch (RuntimeException ex) {
throw new IllegalArgumentException(errorMessage, ex);
}
}
if (!errors.isEmpty()) {
throw new IllegalArgumentException(errorMessage + ", errors: " + errors);
}
throw new IllegalArgumentException(errorMessage);
}
private static <T> T parse(Map<String, Object> body, ThrowingFunction<JSONObject, T, ParseException> parser) {
try {
return parser.apply(new JSONObject(body));
}
catch (ParseException ex) {
throw new RuntimeException(ex);
}
}
private static ClientRegistration.Builder withProviderConfiguration(AuthorizationServerMetadata metadata,
String issuer) {
String metadataIssuer = metadata.getIssuer().getValue();
Assert.state(issuer.equals(metadataIssuer),
() -> "The Issuer \"" + metadataIssuer + "\" provided in the configuration metadata did "
+ "not match the requested issuer \"" + issuer + "\"");
String name = URI.create(issuer).getHost();
ClientAuthenticationMethod method = getClientAuthenticationMethod(metadata.getTokenEndpointAuthMethods());
Map<String, Object> configurationMetadata = new LinkedHashMap<>(metadata.toJSONObject());
// @formatter:off
return ClientRegistration.withRegistrationId(name)
.userNameAttributeName(IdTokenClaimNames.SUB)
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.clientAuthenticationMethod(method)
.redirectUri("{baseUrl}/{action}/oauth2/code/{registrationId}")
.authorizationUri((metadata.getAuthorizationEndpointURI() != null) ? metadata.getAuthorizationEndpointURI().toASCIIString() : null)
.providerConfigurationMetadata(configurationMetadata)
.tokenUri(metadata.getTokenEndpointURI().toASCIIString())
.issuerUri(issuer)
.clientName(issuer);
// @formatter:on
}
private static ClientAuthenticationMethod getClientAuthenticationMethod(
List<com.nimbusds.oauth2.sdk.auth.ClientAuthenticationMethod> metadataAuthMethods) {
if (metadataAuthMethods == null || metadataAuthMethods
.contains(com.nimbusds.oauth2.sdk.auth.ClientAuthenticationMethod.CLIENT_SECRET_BASIC)) {
// If null, the default includes client_secret_basic
return ClientAuthenticationMethod.CLIENT_SECRET_BASIC;
}
if (metadataAuthMethods.contains(com.nimbusds.oauth2.sdk.auth.ClientAuthenticationMethod.CLIENT_SECRET_POST)) {
return ClientAuthenticationMethod.CLIENT_SECRET_POST;
}
if (metadataAuthMethods.contains(com.nimbusds.oauth2.sdk.auth.ClientAuthenticationMethod.NONE)) {
return ClientAuthenticationMethod.NONE;
}
return null;
}
private | ClientRegistrations |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesCancellationTests.java | {
"start": 4333,
"end": 5114
} | class ____ extends BucketCollector {
public AtomicInteger count = new AtomicInteger();
@Override
public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx) throws IOException {
return new LeafBucketCollector() {
@Override
public void collect(int doc, long owningBucketOrd) throws IOException {
count.incrementAndGet();
}
};
}
@Override
public void preCollection() throws IOException {
}
@Override
public void postCollection() throws IOException {
}
@Override
public ScoreMode scoreMode() {
return ScoreMode.COMPLETE;
}
}
}
| CountingBucketCollector |
java | apache__camel | components/camel-kafka/src/main/java/org/apache/camel/component/kafka/consumer/support/batching/KafkaRecordBatchingProcessor.java | {
"start": 2354,
"end": 14462
} | class ____ implements Synchronization {
private final ExceptionHandler exceptionHandler;
private final int size;
public CommitSynchronization(ExceptionHandler exceptionHandler, int size) {
this.exceptionHandler = exceptionHandler;
this.size = size;
}
@Override
public void onComplete(Exchange exchange) {
LOG.debug("Calling commit on {} exchanges using {}", size, commitManager.getClass().getSimpleName());
commitManager.commit();
}
@Override
public void onFailure(Exchange exchange) {
Exception cause = exchange.getException();
if (cause != null) {
// Check if breakOnFirstError should be applied
if (configuration.isBreakOnFirstError()) {
if (LOG.isWarnEnabled()) {
LOG.warn("Error during processing {} from batch due to {}", exchange, cause.getMessage());
LOG.warn("Will break on first error in batch processing mode as configured.");
}
// Commit the batch where the error occurred (similar to streaming mode behavior)
// This allows the consumer to move forward and potentially retry the problematic batch
LOG.debug("Calling commit on {} exchanges using {} due to breakOnFirstError", size,
commitManager.getClass().getSimpleName());
commitManager.commit();
} else {
// Standard error handling - just log and continue (original behavior)
exceptionHandler.handleException(
"Error during processing exchange. Will attempt to process the message on next poll.", exchange,
cause);
}
} else {
LOG.warn(
"Skipping auto-commit on the batch because processing the exchanged has failed and the error was not correctly handled");
}
}
}
public KafkaRecordBatchingProcessor(KafkaConfiguration configuration, Processor processor, CommitManager commitManager) {
this.configuration = configuration;
this.processor = processor;
this.commitManager = commitManager;
this.exchangeList = new ArrayBlockingQueue<>(configuration.getMaxPollRecords());
}
public Exchange toExchange(
KafkaConsumer camelKafkaConsumer, TopicPartition topicPartition, ConsumerRecord<Object, Object> consumerRecord) {
final Exchange exchange = camelKafkaConsumer.createExchange(false);
Message message = exchange.getMessage();
setupExchangeMessage(message, consumerRecord);
propagateHeaders(configuration, consumerRecord, exchange);
if (configuration.isAllowManualCommit()) {
KafkaManualCommit manual = commitManager.getManualCommit(exchange, topicPartition, consumerRecord);
message.setHeader(KafkaConstants.MANUAL_COMMIT, manual);
}
return exchange;
}
public ProcessingResult processExchange(KafkaConsumer camelKafkaConsumer, ConsumerRecords<Object, Object> consumerRecords) {
LOG.debug("There's {} records to process ... max poll is set to {}", consumerRecords.count(),
configuration.getMaxPollRecords());
// Aggregate all consumer records in a single exchange
if (exchangeList.isEmpty()) {
timeoutWatch.takenAndRestart();
}
// If timeout has expired, process current batch but continue to handle new records
if (hasExpiredRecords(consumerRecords)) {
LOG.debug(
"The polling timeout has expired with {} records in cache. Dispatching the incomplete batch for processing",
exchangeList.size());
// poll timeout has elapsed, so check for expired records
ProcessingResult result = processBatch(camelKafkaConsumer);
exchangeList.clear();
timeoutWatch.takenAndRestart(); // restart timer after processing expired batch
// if breakOnFirstError was hit, return immediately
if (result.isBreakOnErrorHit()) {
return result;
}
}
// Always add new records after handling any expiration
for (ConsumerRecord<Object, Object> consumerRecord : consumerRecords) {
TopicPartition tp = new TopicPartition(consumerRecord.topic(), consumerRecord.partition());
Exchange childExchange = toExchange(camelKafkaConsumer, tp, consumerRecord);
exchangeList.add(childExchange);
if (exchangeList.size() >= configuration.getMaxPollRecords()) {
ProcessingResult result = processBatch(camelKafkaConsumer);
exchangeList.clear();
timeoutWatch.takenAndRestart(); // restart timer after batch processed
// if breakOnFirstError was hit, return immediately
if (result.isBreakOnErrorHit()) {
return result;
}
}
}
// None of the states provided by the processing result are relevant for batch processing. We can simply return the
// default state
return ProcessingResult.newUnprocessed();
}
private boolean hasExpiredRecords(ConsumerRecords<Object, Object> consumerRecords) {
// no records in batch
if (exchangeList.isEmpty()) {
return false;
}
// timeout is only triggered if we no new records
boolean timeout = consumerRecords.isEmpty() && timeoutWatch.taken() >= configuration.getPollTimeoutMs();
// interval is triggered if enabled, and it has been X time since last batch completion
boolean interval = configuration.getBatchingIntervalMs() != null
&& intervalWatch.taken() >= configuration.getBatchingIntervalMs();
return timeout || interval;
}
private ProcessingResult processBatch(KafkaConsumer camelKafkaConsumer) {
intervalWatch.restart();
// Create the bundle exchange
Exchange exchange = camelKafkaConsumer.createExchange(false);
Message message = exchange.getMessage();
var exchanges = exchangeList.stream().toList();
message.setBody(exchanges);
ProcessingResult result = ProcessingResult.newUnprocessed();
try {
if (configuration.isAllowManualCommit()) {
Exchange last = exchanges.isEmpty() ? null : exchanges.get(exchanges.size() - 1);
if (last != null) {
message.setHeader(KafkaConstants.MANUAL_COMMIT, last.getMessage().getHeader(KafkaConstants.MANUAL_COMMIT));
}
result = manualCommitResultProcessing(camelKafkaConsumer, exchange, exchanges);
} else {
result = autoCommitResultProcessing(camelKafkaConsumer, exchange, exchanges);
}
} finally {
// Release the exchange
camelKafkaConsumer.releaseExchange(exchange, false);
}
return result;
}
/*
* The flow to execute when using auto-commit
*/
private ProcessingResult autoCommitResultProcessing(
KafkaConsumer camelKafkaConsumer, Exchange exchange, java.util.List<Exchange> exchanges) {
ExceptionHandler exceptionHandler = camelKafkaConsumer.getExceptionHandler();
CommitSynchronization commitSynchronization = new CommitSynchronization(exceptionHandler, exchanges.size());
exchange.getExchangeExtension().addOnCompletion(commitSynchronization);
try {
processor.process(exchange);
} catch (Exception e) {
exchange.setException(e);
}
if (exchange.getException() != null) {
// For auto-commit mode with breakOnFirstError, we let the CommitSynchronization handle the commit
// but we need to return the appropriate ProcessingResult to trigger reconnection
if (configuration.isBreakOnFirstError()) {
// Find the first exchange with data to provide context for the ProcessingResult
Exchange firstExchange = exchanges.isEmpty() ? null : exchanges.get(0);
if (firstExchange != null) {
Message message = firstExchange.getMessage();
String topic = (String) message.getHeader(KafkaConstants.TOPIC);
Integer partition = (Integer) message.getHeader(KafkaConstants.PARTITION);
Long offset = (Long) message.getHeader(KafkaConstants.OFFSET);
if (topic != null && partition != null && offset != null) {
return new ProcessingResult(true, true, topic, partition, offset);
}
}
// Fallback if no record info available
return new ProcessingResult(true, true);
}
// If breakOnFirstError is not enabled, let the CommitSynchronization.onFailure() handle it normally
}
return ProcessingResult.newUnprocessed();
}
/*
* The flow to execute when the integrations perform manual commit on their own
*/
private ProcessingResult manualCommitResultProcessing(
KafkaConsumer camelKafkaConsumer, Exchange exchange, java.util.List<Exchange> exchanges) {
try {
processor.process(exchange);
} catch (Exception e) {
exchange.setException(e);
}
if (exchange.getException() != null) {
ExceptionHandler exceptionHandler = camelKafkaConsumer.getExceptionHandler();
boolean breakOnErrorHit = processException(exchange, exceptionHandler);
if (breakOnErrorHit) {
// Find the first exchange with data to provide context
Exchange firstExchange = exchanges.isEmpty() ? null : exchanges.get(0);
if (firstExchange != null) {
Message message = firstExchange.getMessage();
String topic = (String) message.getHeader(KafkaConstants.TOPIC);
Integer partition = (Integer) message.getHeader(KafkaConstants.PARTITION);
Long offset = (Long) message.getHeader(KafkaConstants.OFFSET);
if (topic != null && partition != null && offset != null) {
return new ProcessingResult(true, true, topic, partition, offset);
}
}
// Fallback if no record info available
return new ProcessingResult(true, true);
}
}
return ProcessingResult.newUnprocessed();
}
private boolean processException(Exchange exchange, ExceptionHandler exceptionHandler) {
// processing failed due to an unhandled exception, what should we do
if (configuration.isBreakOnFirstError()) {
// we are failing and we should break out
if (LOG.isWarnEnabled()) {
Exception exc = exchange.getException();
LOG.warn("Error during processing {} from batch due to {}", exchange, exc.getMessage());
LOG.warn("Will break on first error in batch processing mode as configured.");
}
// when route uses NOOP Commit Manager it will rely
// on the route implementation to explicitly commit offset
// when route uses Synch/Asynch Commit Manager it will
// ALWAYS commit the offset for the failing record
// and will ALWAYS retry it
commitManager.commit();
// break out of processing
return true;
} else {
// will handle/log the exception and then continue to next
exceptionHandler.handleException("Error during processing", exchange, exchange.getException());
}
return false;
}
}
| CommitSynchronization |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java | {
"start": 78701,
"end": 79002
} | class ____<T> {",
" public Test(GenericWithImmutableParam<T> param) { }",
"}")
.doTest();
}
@Test
public void typecast_noViolation() {
withImmutableTypeParameterGeneric()
.addSourceLines(
"MutableClass.java",
"""
| Test |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/array/BeanToArrayTest_date.java | {
"start": 2877,
"end": 2947
} | class ____ {
public Date v1;
public Date v2;
}
}
| Model |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/samples/standalone/ReactiveReturnTypeTests.java | {
"start": 1674,
"end": 2171
} | class ____ {
@Test // SPR-16869
public void sseWithFlux() throws Exception {
MockMvc mockMvc = MockMvcBuilders.standaloneSetup(ReactiveController.class).build();
MvcResult mvcResult = mockMvc.perform(get("/spr16869"))
.andExpect(request().asyncStarted())
.andExpect(status().isOk())
.andReturn();
mockMvc.perform(asyncDispatch(mvcResult))
.andExpect(content().string("data:event0\n\ndata:event1\n\ndata:event2\n\n"));
}
@RestController
static | ReactiveReturnTypeTests |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RSet.java | {
"start": 959,
"end": 10688
} | interface ____<V> extends Set<V>, RExpirable, RSetAsync<V>, RSortable<Set<V>> {
/**
* Adds all elements contained in the specified collection.
* Returns number of added elements.
*
* @param c - collection of elements to add
* @return number of added elements
*/
int addAllCounted(Collection<? extends V> c);
/**
* Removes all elements contained in the specified collection.
* Returns number of removed elements.
*
* @param c - collection of elements to add
* @return number of removed elements
*/
int removeAllCounted(Collection<? extends V> c);
/**
* Returns <code>RCountDownLatch</code> instance associated with <code>value</code>
*
* @param value - set value
* @return RCountDownLatch object
*/
RCountDownLatch getCountDownLatch(V value);
/**
* Returns <code>RPermitExpirableSemaphore</code> instance associated with <code>value</code>
*
* @param value - set value
* @return RPermitExpirableSemaphore object
*/
RPermitExpirableSemaphore getPermitExpirableSemaphore(V value);
/**
* Returns <code>RSemaphore</code> instance associated with <code>value</code>
*
* @param value - set value
* @return RSemaphore object
*/
RSemaphore getSemaphore(V value);
/**
* Returns <code>RLock</code> instance associated with <code>value</code>
*
* @param value - set value
* @return RLock object
*/
RLock getFairLock(V value);
/**
* Returns <code>RReadWriteLock</code> instance associated with <code>value</code>
*
* @param value - set value
* @return RReadWriteLock object
*/
RReadWriteLock getReadWriteLock(V value);
/**
* Returns lock instance associated with <code>value</code>
*
* @param value - set value
* @return RLock object
*/
RLock getLock(V value);
/**
* Returns stream of elements fetches elements in a batch.
* Batch size is defined by <code>count</code> param.
*
* @param count - size of elements batch
* @return stream of elements
*/
Stream<V> stream(int count);
/**
* Returns stream of elements fetches elements in a batch.
* Batch size is defined by <code>count</code> param.
* If pattern is not null then only elements match this pattern are loaded.
*
* @param pattern - search pattern
* @param count - size of elements batch
* @return stream of elements
*/
Stream<V> stream(String pattern, int count);
/**
* Returns stream of elements.
* If pattern is not null then only elements match this pattern are loaded.
*
* @param pattern - search pattern
* @return stream of elements
*/
Stream<V> stream(String pattern);
/**
* Returns elements iterator fetches elements in a batch.
* Batch size is defined by <code>count</code> param.
*
* @param count - size of elements batch
* @return iterator
*/
Iterator<V> iterator(int count);
/**
* Returns elements iterator fetches elements in a batch.
* Batch size is defined by <code>count</code> param.
* If pattern is not null then only elements match this pattern are loaded.
*
* @param pattern - search pattern
* @param count - size of elements batch
* @return iterator
*/
Iterator<V> iterator(String pattern, int count);
/**
* Returns elements iterator.
* If <code>pattern</code> is not null then only elements match this pattern are loaded.
*
* @param pattern - search pattern
* @return iterator
*/
Iterator<V> iterator(String pattern);
/**
* Returns element iterator that can be shared across multiple applications.
* Creating multiple iterators on the same object with this method will result in a single shared iterator.
* See {@linkplain RSet#distributedIterator(String, String, int)} for creating different iterators.
* @param count batch size
* @return shared elements iterator
*/
Iterator<V> distributedIterator(int count);
/**
* Returns iterator over elements that match specified pattern. Iterator can be shared across multiple applications.
* Creating multiple iterators on the same object with this method will result in a single shared iterator.
* See {@linkplain RSet#distributedIterator(String, String, int)} for creating different iterators.
* @param pattern element pattern
* @return shared elements iterator
*/
Iterator<V> distributedIterator(String pattern);
/**
* Returns iterator over elements that match specified pattern. Iterator can be shared across multiple applications.
* Creating multiple iterators on the same object with this method will result in a single shared iterator.
* Iterator name must be resolved to the same hash slot as set name.
* @param pattern element pattern
* @param count batch size
* @param iteratorName redis object name to which cursor will be saved
* @return shared elements iterator
*/
Iterator<V> distributedIterator(String iteratorName, String pattern, int count);
/**
* Returns <code>RMapReduce</code> object associated with this object
*
* @param <KOut> output key
* @param <VOut> output value
* @return MapReduce instance
*/
<KOut, VOut> RCollectionMapReduce<V, KOut, VOut> mapReduce();
/**
* Removes and returns random elements limited by <code>amount</code>
*
* @param amount of random elements
* @return random elements
*/
Set<V> removeRandom(int amount);
/**
* Removes and returns random element
*
* @return random element
*/
V removeRandom();
/**
* Returns random element
*
* @return random element
*/
V random();
/**
* Returns random elements from set limited by <code>count</code>
*
* @param count - values amount to return
* @return random elements
*/
Set<V> random(int count);
/**
* Move a member from this set to the given destination set in.
*
* @param destination the destination set
* @param member the member to move
* @return true if the element is moved, false if the element is not a
* member of this set or no operation was performed
*/
boolean move(String destination, V member);
/**
* Read all elements at once
*
* @return values
*/
Set<V> readAll();
/**
* Union sets specified by name and write to current set.
* If current set already exists, it is overwritten.
*
* @param names - name of sets
* @return size of union
*/
int union(String... names);
/**
* Union sets specified by name with current set
* without current set state change.
*
* @param names - name of sets
* @return values
*/
Set<V> readUnion(String... names);
/**
* Diff sets specified by name and write to current set.
* If current set already exists, it is overwritten.
*
* @param names - name of sets
* @return values
*/
int diff(String... names);
/**
* Diff sets specified by name with current set.
* Without current set state change.
*
* @param names - name of sets
* @return values
*/
Set<V> readDiff(String... names);
/**
* Intersection sets specified by name and write to current set.
* If current set already exists, it is overwritten.
*
* @param names - name of sets
* @return size of intersection
*/
int intersection(String... names);
/**
* Intersection sets specified by name with current set
* without current set state change.
*
* @param names - name of sets
* @return values
*/
Set<V> readIntersection(String... names);
/**
* Counts elements of set as a result of sets intersection with current set.
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param names - name of sets
* @return amount of elements
*/
Integer countIntersection(String... names);
/**
* Counts elements of set as a result of sets intersection with current set.
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param names - name of sets
* @param limit - sets intersection limit
* @return amount of elements
*/
Integer countIntersection(int limit, String... names);
/**
* Tries to add elements only if none of them in set.
*
* @param values - values to add
* @return <code>true</code> if elements successfully added,
* otherwise <code>false</code>.
*/
boolean tryAdd(V... values);
/**
* Check if each element is contained in the specified collection.
* Returns contained elements.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param c - collection to check
* @return contained elements
*/
List<V> containsEach(Collection<V> c);
/**
* Adds object event listener
*
* @see org.redisson.api.listener.TrackingListener
* @see org.redisson.api.listener.SetAddListener
* @see org.redisson.api.listener.SetRemoveListener
* @see org.redisson.api.listener.SetRemoveRandomListener
* @see org.redisson.api.ExpiredObjectListener
* @see org.redisson.api.DeletedObjectListener
*
* @param listener - object event listener
* @return listener id
*/
int addListener(ObjectListener listener);
}
| RSet |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetention.java | {
"start": 804,
"end": 3610
} | class ____ the {@link DataStreamGlobalRetentionSettings}.
*/
public record DataStreamGlobalRetention(@Nullable TimeValue defaultRetention, @Nullable TimeValue maxRetention) implements Writeable {
public static final TimeValue MIN_RETENTION_VALUE = TimeValue.timeValueSeconds(10);
/**
* @param defaultRetention the default retention or null if it's undefined
* @param maxRetention the max retention or null if it's undefined
* @throws IllegalArgumentException when the default retention is greater than the max retention.
*/
public DataStreamGlobalRetention(TimeValue defaultRetention, TimeValue maxRetention) {
if (defaultRetention != null && maxRetention != null && defaultRetention.getMillis() > maxRetention.getMillis()) {
throw new IllegalArgumentException(
"Default global retention ["
+ defaultRetention.getStringRep()
+ "] cannot be greater than the max global retention ["
+ maxRetention.getStringRep()
+ "]."
);
}
if (validateRetentionValue(defaultRetention) == false || validateRetentionValue(maxRetention) == false) {
throw new IllegalArgumentException("Global retention values should be greater than " + MIN_RETENTION_VALUE.getStringRep());
}
this.defaultRetention = defaultRetention;
this.maxRetention = maxRetention;
}
/**
* Helper method that creates a global retention object or returns null in case both retentions are null
*/
@Nullable
public static DataStreamGlobalRetention create(@Nullable TimeValue defaultRetention, @Nullable TimeValue maxRetention) {
if (defaultRetention == null && maxRetention == null) {
return null;
}
return new DataStreamGlobalRetention(defaultRetention, maxRetention);
}
private boolean validateRetentionValue(@Nullable TimeValue retention) {
return retention == null || retention.getMillis() >= MIN_RETENTION_VALUE.getMillis();
}
public static DataStreamGlobalRetention read(StreamInput in) throws IOException {
return new DataStreamGlobalRetention(in.readOptionalTimeValue(), in.readOptionalTimeValue());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalTimeValue(defaultRetention);
out.writeOptionalTimeValue(maxRetention);
}
@Override
public String toString() {
return "DataStreamGlobalRetention{"
+ "defaultRetention="
+ (defaultRetention == null ? "null" : defaultRetention.getStringRep())
+ ", maxRetention="
+ (maxRetention == null ? "null" : maxRetention.getStringRep())
+ '}';
}
}
| for |
java | apache__maven | compat/maven-compat/src/test/java/org/apache/maven/artifact/resolver/TestMavenWorkspaceReader.java | {
"start": 1184,
"end": 2655
} | class ____ implements MavenWorkspaceReader {
static final String REPO_LAYOUT = "test";
static final String REPO_URL = "https://test/me";
static final String REPO_ID = "custom";
static final String GROUP_ID = "org.apache.maven";
static final String ARTIFACT_ID = "this.is.a.test";
static final String VERSION = "99.99";
private static final WorkspaceRepository WORKSPACE_REPOSITORY = new WorkspaceRepository(REPO_LAYOUT);
@Override
public WorkspaceRepository getRepository() {
return WORKSPACE_REPOSITORY;
}
@Override
public File findArtifact(Artifact artifact) {
return null;
}
@Override
public List<String> findVersions(Artifact artifact) {
return Collections.emptyList();
}
@Override
public Model findModel(Artifact artifact) {
if (GROUP_ID.equals(artifact.getGroupId())
&& ARTIFACT_ID.equals(artifact.getArtifactId())
&& VERSION.equals(artifact.getVersion())) {
Model m = new Model();
m.setArtifactId(ARTIFACT_ID);
m.setGroupId(GROUP_ID);
m.setVersion(VERSION);
Repository repository = new Repository();
repository.setId(REPO_ID);
repository.setUrl(REPO_URL);
repository.setLayout(REPO_LAYOUT);
m.getRepositories().add(repository);
return m;
}
return null;
}
}
| TestMavenWorkspaceReader |
java | grpc__grpc-java | api/src/main/java/io/grpc/LoadBalancer.java | {
"start": 37300,
"end": 45743
} | class ____ {
/**
* Creates a Subchannel, which is a logical connection to the given group of addresses which are
* considered equivalent. The {@code attrs} are custom attributes associated with this
* Subchannel, and can be accessed later through {@link Subchannel#getAttributes
* Subchannel.getAttributes()}.
*
* <p>The LoadBalancer is responsible for closing unused Subchannels, and closing all
* Subchannels within {@link #shutdown}.
*
* <p>It must be called from {@link #getSynchronizationContext the Synchronization Context}
*
* @return Must return a valid Subchannel object, may not return null.
*
* @since 1.22.0
*/
public Subchannel createSubchannel(CreateSubchannelArgs args) {
throw new UnsupportedOperationException();
}
/**
* Create an out-of-band channel for the LoadBalancer’s own RPC needs, e.g., talking to an
* external load-balancer service.
*
* <p>The LoadBalancer is responsible for closing unused OOB channels, and closing all OOB
* channels within {@link #shutdown}.
*
* @since 1.4.0
*/
public abstract ManagedChannel createOobChannel(EquivalentAddressGroup eag, String authority);
/**
* Create an out-of-band channel for the LoadBalancer's own RPC needs, e.g., talking to an
* external load-balancer service. This version of the method allows multiple EAGs, so different
* addresses can have different authorities.
*
* <p>The LoadBalancer is responsible for closing unused OOB channels, and closing all OOB
* channels within {@link #shutdown}.
* */
public ManagedChannel createOobChannel(List<EquivalentAddressGroup> eag,
String authority) {
throw new UnsupportedOperationException();
}
/**
* Updates the addresses used for connections in the {@code Channel} that was created by {@link
* #createOobChannel(EquivalentAddressGroup, String)}. This is superior to {@link
* #createOobChannel(EquivalentAddressGroup, String)} when the old and new addresses overlap,
* since the channel can continue using an existing connection.
*
* @throws IllegalArgumentException if {@code channel} was not returned from {@link
* #createOobChannel}
* @since 1.4.0
*/
public void updateOobChannelAddresses(ManagedChannel channel, EquivalentAddressGroup eag) {
throw new UnsupportedOperationException();
}
/**
* Updates the addresses with a new EAG list. Connection is continued when old and new addresses
* overlap.
* */
public void updateOobChannelAddresses(ManagedChannel channel,
List<EquivalentAddressGroup> eag) {
throw new UnsupportedOperationException();
}
/**
* Creates an out-of-band channel for LoadBalancer's own RPC needs, e.g., talking to an external
* load-balancer service, that is specified by a target string. See the documentation on
* {@link ManagedChannelBuilder#forTarget} for the format of a target string.
*
* <p>The target string will be resolved by a {@link NameResolver} created according to the
* target string.
*
* <p>The LoadBalancer is responsible for closing unused OOB channels, and closing all OOB
* channels within {@link #shutdown}.
*
* @since 1.20.0
*/
public ManagedChannel createResolvingOobChannel(String target) {
return createResolvingOobChannelBuilder(target).build();
}
/**
* Creates an out-of-band channel builder for LoadBalancer's own RPC needs, e.g., talking to an
* external load-balancer service, that is specified by a target string. See the documentation
* on {@link ManagedChannelBuilder#forTarget} for the format of a target string.
*
* <p>The target string will be resolved by a {@link NameResolver} created according to the
* target string.
*
* <p>The returned oob-channel builder defaults to use the same authority and ChannelCredentials
* (without bearer tokens) as the parent channel's for authentication. This is different from
* {@link #createResolvingOobChannelBuilder(String, ChannelCredentials)}.
*
* <p>The LoadBalancer is responsible for closing unused OOB channels, and closing all OOB
* channels within {@link #shutdown}.
*
* @deprecated Use {@link #createResolvingOobChannelBuilder(String, ChannelCredentials)}
* instead.
* @since 1.31.0
*/
@Deprecated
public ManagedChannelBuilder<?> createResolvingOobChannelBuilder(String target) {
throw new UnsupportedOperationException("Not implemented");
}
/**
* Creates an out-of-band channel builder for LoadBalancer's own RPC needs, e.g., talking to an
* external load-balancer service, that is specified by a target string and credentials. See
* the documentation on {@link Grpc#newChannelBuilder} for the format of a target string.
*
* <p>The target string will be resolved by a {@link NameResolver} created according to the
* target string.
*
* <p>The LoadBalancer is responsible for closing unused OOB channels, and closing all OOB
* channels within {@link #shutdown}.
*
* @since 1.35.0
*/
public ManagedChannelBuilder<?> createResolvingOobChannelBuilder(
String target, ChannelCredentials creds) {
throw new UnsupportedOperationException();
}
/**
* Set a new state with a new picker to the channel.
*
* <p>When a new picker is provided via {@code updateBalancingState()}, the channel will apply
* the picker on all buffered RPCs, by calling {@link SubchannelPicker#pickSubchannel(
* LoadBalancer.PickSubchannelArgs)}.
*
* <p>The channel will hold the picker and use it for all RPCs, until {@code
* updateBalancingState()} is called again and a new picker replaces the old one. If {@code
* updateBalancingState()} has never been called, the channel will buffer all RPCs until a
* picker is provided.
*
* <p>It should be called from the Synchronization Context. Currently will log a warning if
* violated. It will become an exception eventually. See <a
* href="https://github.com/grpc/grpc-java/issues/5015">#5015</a> for the background.
*
* <p>The passed state will be the channel's new state. The SHUTDOWN state should not be passed
* and its behavior is undefined.
*
* @since 1.6.0
*/
public abstract void updateBalancingState(
@Nonnull ConnectivityState newState, @Nonnull SubchannelPicker newPicker);
/**
* Call {@link NameResolver#refresh} on the channel's resolver.
*
* <p>It should be called from the Synchronization Context. Currently will log a warning if
* violated. It will become an exception eventually. See <a
* href="https://github.com/grpc/grpc-java/issues/5015">#5015</a> for the background.
*
* @since 1.18.0
*/
public void refreshNameResolution() {
throw new UnsupportedOperationException();
}
/**
* Historically the channel automatically refreshes name resolution if any subchannel
* connection is broken. It's transitioning to let load balancers make the decision. To
* avoid silent breakages, the channel checks if {@link #refreshNameResolution} is called
* by the load balancer. If not, it will do it and log a warning. This will be removed in
* the future and load balancers are completely responsible for triggering the refresh.
* See <a href="https://github.com/grpc/grpc-java/issues/8088">#8088</a> for the background.
*
* <p>This should rarely be used, but sometimes the address for the subchannel wasn't
* provided by the name resolver and a refresh needs to be directed somewhere else instead.
* Then you can call this method to disable the short-tem check for detecting LoadBalancers
* that need to be updated for the new expected behavior.
*
* @since 1.38.0
* @deprecated Warning has been removed
*/
@ExperimentalApi("https://github.com/grpc/grpc-java/issues/8088")
@Deprecated
public void ignoreRefreshNameResolutionCheck() {
// no-op
}
/**
* Returns a {@link SynchronizationContext} that runs tasks in the same Synchronization Context
* as that the callback methods on the {@link LoadBalancer} | Helper |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/pool/oracle/OracleDeprecated.java | {
"start": 173,
"end": 389
} | class ____ extends TestCase {
public void test_deprecated() throws Exception {
DruidDataSource ds = new DruidDataSource();
ds.setDriverClassName(JdbcConstants.ORACLE_DRIVER2);
}
}
| OracleDeprecated |
java | spring-projects__spring-framework | spring-jms/src/main/java/org/springframework/jms/connection/TransactionAwareConnectionFactoryProxy.java | {
"start": 3512,
"end": 3923
} | class ____ JMS 2.0 {@code JMSContext}
* calls and therefore requires the JMS 2.0 API to be present at runtime.
* It may nevertheless run against a JMS 1.1 driver (bound to the JMS 2.0 API)
* as long as no actual JMS 2.0 calls are triggered by the application's setup.
*
* @author Juergen Hoeller
* @since 2.0
* @see UserCredentialsConnectionFactoryAdapter
* @see SingleConnectionFactory
*/
public | delegates |
java | quarkusio__quarkus | integration-tests/narayana-jta/src/test/java/io/quarkus/narayana/jta/TransactionConfPropTest.java | {
"start": 506,
"end": 3930
} | class ____ {
@Inject
TransactionManager tm;
/*
* verify that the objectStore directory path for JTA can be configured
*/
@Test
void testObjectStoreDirPath() {
// verify that the quarkus configuration took effect
Assertions.assertEquals("target/tx-object-store", // this value is set via application.properties
arjPropertyManager.getObjectStoreEnvironmentBean().getObjectStoreDir());
}
@Test
public void testObjectStoreExist() throws Exception {
tm.begin();
assertTrue(tm.getTransaction().enlistResource(new XAResource() {
@Override
public void start(Xid arg0, int arg1) throws XAException {
}
@Override
public boolean setTransactionTimeout(int arg0) throws XAException {
return false;
}
@Override
public void rollback(Xid arg0) throws XAException {
}
@Override
public Xid[] recover(int arg0) throws XAException {
return null;
}
@Override
public int prepare(Xid arg0) throws XAException {
return 0;
}
@Override
public boolean isSameRM(XAResource arg0) throws XAException {
return false;
}
@Override
public int getTransactionTimeout() throws XAException {
return 0;
}
@Override
public void forget(Xid arg0) throws XAException {
}
@Override
public void end(Xid arg0, int arg1) throws XAException {
}
@Override
public void commit(Xid arg0, boolean arg1) throws XAException {
}
}));
assertTrue(tm.getTransaction().enlistResource(new XAResource() {
@Override
public void start(Xid xid, int flags) throws XAException {
}
@Override
public boolean setTransactionTimeout(int seconds) throws XAException {
return false;
}
@Override
public void rollback(Xid xid) throws XAException {
}
@Override
public Xid[] recover(int flag) throws XAException {
return null;
}
@Override
public int prepare(Xid xid) throws XAException {
return 0;
}
@Override
public boolean isSameRM(XAResource xares) throws XAException {
return false;
}
@Override
public int getTransactionTimeout() throws XAException {
return 0;
}
@Override
public void forget(Xid xid) throws XAException {
}
@Override
public void end(Xid xid, int flags) throws XAException {
}
@Override
public void commit(Xid xid, boolean onePhase) throws XAException {
}
}));
try {
tm.commit();
} catch (Exception e) {
tm.rollback();
}
// checking if the object-store is present in expected location
File f = new File("target/tx-object-store");
assertTrue(f.exists());
}
}
| TransactionConfPropTest |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/rules/ResolveCallByArgumentsRule.java | {
"start": 5699,
"end": 18168
} | class ____
extends RuleExpressionVisitor<List<ResolvedExpression>> {
private final @Nullable SurroundingInfo surroundingInfo;
ResolvingCallVisitor(ResolutionContext context, @Nullable SurroundingInfo surroundingInfo) {
super(context);
this.surroundingInfo = surroundingInfo;
}
@Override
public List<ResolvedExpression> visit(UnresolvedCallExpression unresolvedCall) {
final FunctionDefinition definition;
// clean functions that were not registered in a catalog
if (unresolvedCall.getFunctionIdentifier().isEmpty()) {
definition =
prepareInlineUserDefinedFunction(unresolvedCall.getFunctionDefinition());
} else {
definition = unresolvedCall.getFunctionDefinition();
}
final String functionName =
unresolvedCall
.getFunctionIdentifier()
.map(FunctionIdentifier::toString)
.orElseGet(definition::toString);
final TypeInference typeInference = getTypeInferenceOrNull(definition);
// Reorder named arguments and add replacements for optional ones
final UnresolvedCallExpression adaptedCall =
executeAssignment(functionName, definition, typeInference, unresolvedCall);
// resolve the children with information from the current call
final List<ResolvedExpression> resolvedArgs = new ArrayList<>();
final int argCount = adaptedCall.getChildren().size();
for (int i = 0; i < argCount; i++) {
final SurroundingInfo surroundingInfo;
if (typeInference == null) {
surroundingInfo = null;
} else {
surroundingInfo =
SurroundingInfo.of(
functionName,
definition,
typeInference,
argCount,
i,
resolutionContext.isGroupedAggregation());
}
final ResolvingCallVisitor childResolver =
new ResolvingCallVisitor(resolutionContext, surroundingInfo);
resolvedArgs.addAll(adaptedCall.getChildren().get(i).accept(childResolver));
}
if (definition == BuiltInFunctionDefinitions.FLATTEN) {
return executeFlatten(resolvedArgs);
}
return Collections.singletonList(
runTypeInference(
functionName,
adaptedCall,
typeInference,
resolvedArgs,
surroundingInfo));
}
@Override
protected List<ResolvedExpression> defaultMethod(Expression expression) {
if (expression instanceof ResolvedExpression) {
return Collections.singletonList((ResolvedExpression) expression);
}
throw new TableException("Unexpected unresolved expression: " + expression);
}
private List<ResolvedExpression> executeFlatten(List<ResolvedExpression> args) {
if (args.size() != 1) {
throw new ValidationException("Invalid number of arguments for flattening.");
}
final ResolvedExpression composite = args.get(0);
final LogicalType compositeType = composite.getOutputDataType().getLogicalType();
if (hasLegacyTypes(compositeType)) {
return flattenLegacyCompositeType(composite);
}
return flattenCompositeType(composite);
}
private List<ResolvedExpression> flattenCompositeType(ResolvedExpression composite) {
final DataType dataType = composite.getOutputDataType();
final LogicalType type = dataType.getLogicalType();
if (!isCompositeType(type)) {
return singletonList(composite);
}
final List<DataType> fieldDataTypes = DataTypeUtils.flattenToDataTypes(dataType);
final List<String> fieldNames = DataTypeUtils.flattenToNames(dataType);
return IntStream.range(0, fieldDataTypes.size())
.mapToObj(
idx -> {
final DataType fieldDataType = fieldDataTypes.get(idx);
final DataType nullableFieldDataType;
if (type.isNullable()) {
nullableFieldDataType = fieldDataType.nullable();
} else {
nullableFieldDataType = fieldDataType;
}
return resolutionContext
.postResolutionFactory()
.get(
composite,
valueLiteral(fieldNames.get(idx)),
nullableFieldDataType);
})
.collect(Collectors.toList());
}
private List<ResolvedExpression> flattenLegacyCompositeType(ResolvedExpression composite) {
final TypeInformation<?> resultType =
fromDataTypeToLegacyInfo(composite.getOutputDataType());
if (!(resultType instanceof CompositeType)) {
return singletonList(composite);
}
final CompositeType<?> compositeType = (CompositeType<?>) resultType;
return IntStream.range(0, resultType.getArity())
.mapToObj(
idx ->
resolutionContext
.postResolutionFactory()
.get(
composite,
valueLiteral(
compositeType.getFieldNames()[idx]),
fromLegacyInfoToDataType(
compositeType.getTypeAt(idx))))
.collect(Collectors.toList());
}
/** Temporary method until all calls define a type inference. */
private @Nullable TypeInference getTypeInferenceOrNull(FunctionDefinition definition) {
final TypeInference inference =
definition.getTypeInference(resolutionContext.typeFactory());
if (inference.getOutputTypeStrategy() != TypeStrategies.MISSING) {
return SystemTypeInference.of(definition.getKind(), inference);
} else {
return null;
}
}
private UnresolvedCallExpression executeAssignment(
String functionName,
FunctionDefinition definition,
@Nullable TypeInference inference,
UnresolvedCallExpression unresolvedCall) {
// Assignment cannot be a top-level expression,
// it must be located within a function call
if (definition == BuiltInFunctionDefinitions.ASSIGNMENT) {
throw new ValidationException(
"Named arguments via asArgument() can only be used within function calls.");
}
// Skip assignment for special calls
if (inference == null) {
return unresolvedCall;
}
final List<Expression> actualArgs = unresolvedCall.getChildren();
final List<StaticArgument> declaredArgs = inference.getStaticArguments().orElse(null);
final Map<String, Expression> namedArgs = collectAssignments(functionName, actualArgs);
if (namedArgs.isEmpty()) {
// Use position-based call but append defaults for
// optional arguments at the end if necessary.
final List<Expression> reorderedArgs =
appendDefaultPositionedArguments(declaredArgs, actualArgs);
fillInPtfSpecificPositionedArguments(
functionName, definition, declaredArgs, reorderedArgs);
return unresolvedCall.replaceArgs(reorderedArgs);
}
if (declaredArgs == null) {
throw new ValidationException(
String.format(
"Invalid call to function '%s'. "
+ "The function does not support named arguments. "
+ "Please pass the arguments based on positions (i.e. without asArgument()).",
functionName));
}
fillInDefaultNamedArguments(declaredArgs, namedArgs);
fillInPtfSpecificNamedArguments(
functionName, definition, declaredArgs, namedArgs, actualArgs);
try {
validateAssignments(declaredArgs, namedArgs);
} catch (ValidationException e) {
throw new ValidationException(
String.format(
"Invalid call to function '%s'. If the call uses named arguments, "
+ "a valid name has to be provided for all passed arguments. %s",
functionName, e.getMessage()));
}
final List<Expression> reorderedArgs =
declaredArgs.stream()
.map(arg -> namedArgs.get(arg.getName()))
.collect(Collectors.toList());
return unresolvedCall.replaceArgs(reorderedArgs);
}
private Map<String, Expression> collectAssignments(
String functionName, List<Expression> actualArgs) {
final Map<String, Expression> namedArgs = new HashMap<>();
actualArgs.stream()
.map(this::extractAssignment)
.filter(Objects::nonNull)
.forEach(
assignment -> {
if (namedArgs.containsKey(assignment.getKey())) {
throw new ValidationException(
String.format(
"Invalid call to function '%s'. "
+ "Duplicate named argument found: %s",
functionName, assignment.getKey()));
}
namedArgs.put(assignment.getKey(), assignment.getValue());
});
return namedArgs;
}
private Map.Entry<String, Expression> extractAssignment(Expression e) {
final List<Expression> children = e.getChildren();
if (!isFunction(e, BuiltInFunctionDefinitions.ASSIGNMENT) || children.size() != 2) {
return null;
}
final String name = ExpressionUtils.stringValue(children.get(0));
if (name == null) {
return null;
}
return Map.entry(name, children.get(1));
}
private void fillInPtfSpecificNamedArguments(
String functionName,
FunctionDefinition definition,
List<StaticArgument> declaredArgs,
Map<String, Expression> namedArgs,
List<Expression> actualArgs) {
// Since functions can be unregistered (i.e. inline in Table API), the API helps PTFs in
// finding arguments.
if (definition.getKind() != FunctionKind.PROCESS_TABLE) {
return;
}
// The 'uid' argument will be derived from the toString of FunctionDefinition.
// For UDFs, this is the simple | ResolvingCallVisitor |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/format/MapFormatShapeTest.java | {
"start": 1027,
"end": 1646
} | class ____
{
public Map476AsPOJO a;
public Map476Base b;
@JsonFormat(shape=JsonFormat.Shape.POJO)
public Map476Base c;
public Bean476Container(int forA, int forB, int forC) {
if (forA != 0) {
a = new Map476AsPOJO();
a.put("value", forA);
}
if (forB != 0) {
b = new Map476Base();
b.put("value", forB);
}
if (forC != 0) {
c = new Map476Base();
c.put("value", forC);
}
}
}
static | Bean476Container |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/procedures/BoundProcedure.java | {
"start": 1178,
"end": 2104
} | interface ____ extends Procedure {
/**
* Returns parameters of this procedure.
*/
ProcedureParameter[] parameters();
/**
* Indicates whether this procedure is deterministic.
*/
boolean isDeterministic();
/**
* Executes this procedure with the given input.
* <p>
* Spark validates and rearranges arguments provided in the CALL statement to ensure that
* the order and data types of the fields in {@code input} matches the expected order and
* types defined by {@link #parameters() parameters}.
* <p>
* Each procedure can return any number of result sets. Each result set is represented by
* a {@link Scan scan} that reports the type of records it produces and can be used to
* collect the output, if needed. If a result set is local and does not a distributed job,
* implementations should use {@link LocalScan}.
*/
Iterator<Scan> call(InternalRow input);
}
| BoundProcedure |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_withRepresentation_Test.java | {
"start": 2119,
"end": 2150
} | class ____ {
}
private | Example |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/condition/DisabledIf.java | {
"start": 3605,
"end": 3750
} | class ____ use
* as a condition for the test's or container's execution.
*
* <p>Condition methods must be static if located outside the test | to |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/type/TypeHandlerRegistryTest.java | {
"start": 5969,
"end": 6838
} | class ____<E extends Enum<E> & SomeInterface> extends BaseTypeHandler<E> {
public SomeInterfaceTypeHandler(Type type) {
super();
}
@Override
public void setNonNullParameter(PreparedStatement ps, int i, E parameter, JdbcType jdbcType) throws SQLException {
}
@Override
public E getNullableResult(ResultSet rs, String columnName) throws SQLException {
return null;
}
@Override
public E getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
return null;
}
@Override
public E getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {
return null;
}
}
@Test
void demoTypeHandlerForSuperInterface() {
typeHandlerRegistry.register(SomeInterfaceTypeHandler.class);
// Since 3.6.x, registering type handler against super | SomeInterfaceTypeHandler |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.