language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoBuilderCompilationTest.java | {
"start": 36980,
"end": 37177
} | interface ____ {",
" boolean broken();",
" }",
"",
" @AutoBuilder(callMethod = \"annotationType\", ofClass = MyAnnot.class)",
" | MyAnnot |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/component/ApiMethodParser.java | {
"start": 15852,
"end": 18968
} | class ____ {
private final String name;
private final Class<?> resultType;
private final List<ApiMethodArg> arguments;
private final List<ApiMethodArg> properties;
private final Method method;
private final String description;
private final String signature;
private String uniqueName;
ApiMethodModel(String name, Class<?> resultType, List<ApiMethodArg> arguments, List<ApiMethodArg> properties,
Method method,
String description, String signature) {
this.name = name;
this.resultType = resultType;
this.arguments = arguments;
this.properties = properties;
this.method = method;
this.description = description;
this.signature = signature;
}
ApiMethodModel(String uniqueName, String name, Class<?> resultType, List<ApiMethodArg> arguments,
List<ApiMethodArg> properties, Method method, String description, String signature) {
this.name = name;
this.uniqueName = uniqueName;
this.resultType = resultType;
this.arguments = arguments;
this.properties = properties;
this.method = method;
this.description = description;
this.signature = signature;
}
public String getUniqueName() {
return uniqueName;
}
public String getName() {
return name;
}
public Class<?> getResultType() {
return resultType;
}
public Method getMethod() {
return method;
}
public List<ApiMethodArg> getArguments() {
return arguments;
}
public List<ApiMethodArg> getProperties() {
return properties;
}
public List<ApiMethodArg> getArgumentsAndProperties() {
List<ApiMethodArg> answer = new ArrayList<>();
if (arguments != null && !arguments.isEmpty()) {
answer.addAll(arguments);
}
if (properties != null && !properties.isEmpty()) {
answer.addAll(properties);
}
return answer;
}
public String getDescription() {
return description;
}
public String getSignature() {
return signature;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder(256);
builder.append(resultType.getName()).append(" ");
builder.append(name).append("(");
for (ApiMethodArg argument : arguments) {
builder.append(argument.getType().getCanonicalName()).append(" ");
builder.append(argument.getName()).append(", ");
}
if (!arguments.isEmpty()) {
builder.delete(builder.length() - 2, builder.length());
}
builder.append(");");
return builder.toString();
}
}
}
| ApiMethodModel |
java | google__guava | guava/src/com/google/common/graph/ForwardingValueGraph.java | {
"start": 981,
"end": 3167
} | class ____<N, V> extends AbstractValueGraph<N, V> {
abstract ValueGraph<N, V> delegate();
@Override
public Set<N> nodes() {
return delegate().nodes();
}
/**
* Defer to {@link AbstractValueGraph#edges()} (based on {@link #successors(Object)}) for full
* edges() implementation.
*/
@Override
protected long edgeCount() {
return delegate().edges().size();
}
@Override
public boolean isDirected() {
return delegate().isDirected();
}
@Override
public boolean allowsSelfLoops() {
return delegate().allowsSelfLoops();
}
@Override
public ElementOrder<N> nodeOrder() {
return delegate().nodeOrder();
}
@Override
public ElementOrder<N> incidentEdgeOrder() {
return delegate().incidentEdgeOrder();
}
@Override
public Set<N> adjacentNodes(N node) {
return delegate().adjacentNodes(node);
}
@Override
public Set<N> predecessors(N node) {
return delegate().predecessors(node);
}
@Override
public Set<N> successors(N node) {
return delegate().successors(node);
}
@Override
public int degree(N node) {
return delegate().degree(node);
}
@Override
public int inDegree(N node) {
return delegate().inDegree(node);
}
@Override
public int outDegree(N node) {
return delegate().outDegree(node);
}
@Override
public boolean hasEdgeConnecting(N nodeU, N nodeV) {
return delegate().hasEdgeConnecting(nodeU, nodeV);
}
@Override
public boolean hasEdgeConnecting(EndpointPair<N> endpoints) {
return delegate().hasEdgeConnecting(endpoints);
}
@Override
public Optional<V> edgeValue(N nodeU, N nodeV) {
return delegate().edgeValue(nodeU, nodeV);
}
@Override
public Optional<V> edgeValue(EndpointPair<N> endpoints) {
return delegate().edgeValue(endpoints);
}
@Override
public @Nullable V edgeValueOrDefault(N nodeU, N nodeV, @Nullable V defaultValue) {
return delegate().edgeValueOrDefault(nodeU, nodeV, defaultValue);
}
@Override
public @Nullable V edgeValueOrDefault(EndpointPair<N> endpoints, @Nullable V defaultValue) {
return delegate().edgeValueOrDefault(endpoints, defaultValue);
}
}
| ForwardingValueGraph |
java | google__guava | android/guava/src/com/google/common/collect/MultimapBuilder.java | {
"start": 17265,
"end": 17886
} | class ____<
K0 extends @Nullable Object, V0 extends @Nullable Object>
extends MultimapBuilder<K0, V0> {
SetMultimapBuilder() {}
@Override
public abstract <K extends K0, V extends V0> SetMultimap<K, V> build();
@Override
public <K extends K0, V extends V0> SetMultimap<K, V> build(
Multimap<? extends K, ? extends V> multimap) {
return (SetMultimap<K, V>) super.<K, V>build(multimap);
}
}
/**
* A specialization of {@link MultimapBuilder} that generates {@link SortedSetMultimap} instances.
*
* @since 16.0
*/
public abstract static | SetMultimapBuilder |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/test/MockUtils.java | {
"start": 1559,
"end": 2853
} | class ____ extends Clock {
private Instant instant;
private final ZoneId zone;
public VirtualClock(Instant initialInstant, ZoneId zone) {
this.instant = initialInstant;
this.zone = zone;
}
public VirtualClock() {
this(Instant.EPOCH, ZoneId.systemDefault());
}
public void setInstant(Instant newFixedInstant) {
this.instant = newFixedInstant;
}
public void advanceTimeBy(Duration duration) {
this.instant = this.instant.plus(duration);
}
@Override
public ZoneId getZone() {
return zone;
}
@Override
public Clock withZone(ZoneId zone) {
if (zone.equals(this.zone)) { // intentional NPE
return this;
}
return new VirtualClock(instant, zone);
}
@Override
public long millis() {
return instant.toEpochMilli();
}
@Override
public Instant instant() {
return instant;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof VirtualClock) {
VirtualClock other = (VirtualClock) obj;
return instant.equals(other.instant) && zone.equals(other.zone);
}
return false;
}
@Override
public int hashCode() {
return instant.hashCode() ^ zone.hashCode();
}
@Override
public String toString() {
return "VirtualClock[" + instant + "," + zone + "]";
}
}
}
| VirtualClock |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/http/websocketx/extensions/compression/WebSocketClientCompressionHandler.java | {
"start": 1148,
"end": 2072
} | class ____ extends WebSocketClientExtensionHandler {
/**
* @deprecated Use {@link WebSocketClientCompressionHandler#WebSocketClientCompressionHandler(int)}
*/
@Deprecated
public static final WebSocketClientCompressionHandler INSTANCE = new WebSocketClientCompressionHandler();
private WebSocketClientCompressionHandler() {
this(0);
}
/**
* Constructor with default configuration.
* @param maxAllocation
* Maximum size of the decompression buffer. Must be >= 0. If zero, maximum size is not limited.
*/
public WebSocketClientCompressionHandler(int maxAllocation) {
super(new PerMessageDeflateClientExtensionHandshaker(maxAllocation),
new DeflateFrameClientExtensionHandshaker(false, maxAllocation),
new DeflateFrameClientExtensionHandshaker(true, maxAllocation));
}
}
| WebSocketClientCompressionHandler |
java | elastic__elasticsearch | modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java | {
"start": 2654,
"end": 3444
} | class ____.
*/
public WhitelistMethod(
String origin,
String augmentedCanonicalClassName,
String methodName,
String returnCanonicalTypeName,
List<String> canonicalTypeNameParameters,
List<Object> painlessAnnotations
) {
this.origin = Objects.requireNonNull(origin);
this.augmentedCanonicalClassName = augmentedCanonicalClassName;
this.methodName = methodName;
this.returnCanonicalTypeName = Objects.requireNonNull(returnCanonicalTypeName);
this.canonicalTypeNameParameters = List.copyOf(canonicalTypeNameParameters);
this.painlessAnnotations = painlessAnnotations.stream()
.collect(Collectors.toUnmodifiableMap(Object::getClass, Function.identity()));
}
}
| documentation |
java | quarkusio__quarkus | extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsPropertiesUtil.java | {
"start": 353,
"end": 2758
} | class ____ {
private static final String STREAMS_OPTION_PREFIX = "kafka-streams.";
private static final String QUARKUS_STREAMS_OPTION_PREFIX = "quarkus." + STREAMS_OPTION_PREFIX;
private static boolean isKafkaStreamsProperty(String prefix, String property) {
return property.startsWith(prefix);
}
private static void includeKafkaStreamsProperty(Config config, Properties kafkaStreamsProperties, String prefix,
String property) {
Optional<String> value = config.getOptionalValue(property, String.class);
if (value.isPresent()) {
kafkaStreamsProperties.setProperty(property.substring(prefix.length()), value.get());
}
}
private static void addHotReplacementInterceptor(Properties kafkaStreamsProperties) {
String interceptorConfig = HotReplacementInterceptor.class.getName();
Object originalInterceptorConfig = kafkaStreamsProperties
.get(StreamsConfig.consumerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG));
if (originalInterceptorConfig != null) {
interceptorConfig = interceptorConfig + "," + originalInterceptorConfig;
}
kafkaStreamsProperties.put(StreamsConfig.consumerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG), interceptorConfig);
}
private static Properties kafkaStreamsProperties(String prefix) {
Properties kafkaStreamsProperties = new Properties();
Config config = ConfigProvider.getConfig();
for (String property : config.getPropertyNames()) {
if (isKafkaStreamsProperty(prefix, property)) {
includeKafkaStreamsProperty(config, kafkaStreamsProperties, prefix, property);
}
}
return kafkaStreamsProperties;
}
public static Properties appKafkaStreamsProperties() {
return kafkaStreamsProperties(STREAMS_OPTION_PREFIX);
}
public static Properties quarkusKafkaStreamsProperties() {
return kafkaStreamsProperties(QUARKUS_STREAMS_OPTION_PREFIX);
}
public static Properties buildKafkaStreamsProperties(LaunchMode launchMode) {
Properties kafkaStreamsProperties = appKafkaStreamsProperties();
if (launchMode == LaunchMode.DEVELOPMENT) {
addHotReplacementInterceptor(kafkaStreamsProperties);
}
return kafkaStreamsProperties;
}
}
| KafkaStreamsPropertiesUtil |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/HibernateJpaParametersParameterAccessor.java | {
"start": 1625,
"end": 3169
} | class ____ extends JpaParametersParameterAccessor {
private final BasicTypeRegistry typeHelper;
/**
* Creates a new {@link ParametersParameterAccessor}.
*
* @param parameters must not be {@literal null}.
* @param values must not be {@literal null}.
* @param em must not be {@literal null}.
*/
HibernateJpaParametersParameterAccessor(JpaParameters parameters, Object[] values, EntityManager em) {
super(parameters, values);
this.typeHelper = em.getEntityManagerFactory() //
.unwrap(SessionFactoryImplementor.class) //
.getTypeConfiguration() //
.getBasicTypeRegistry();
}
@Override
@SuppressWarnings("unchecked")
public @Nullable Object getValue(Parameter parameter) {
Object value = super.getValue(parameter.getIndex());
if (value != null) {
return value;
}
BasicType<?> type = typeHelper.getRegisteredType(parameter.getType());
if (type == null) {
return null;
}
return new TypedParameterValue<>(type, null);
}
/**
* For Hibernate, check if the incoming parameterValue can be wrapped inside a {@link TypedParameterValue} before
* extracting.
*
* @param parameterValue a parameterValue that is either a plain value or a {@link TypedParameterValue} containing a
* {@literal Date}.
* @since 3.0.4
*/
@Override
protected Object potentiallyUnwrap(Object parameterValue) {
return (parameterValue instanceof TypedParameterValue<?> typedParameterValue) //
? typedParameterValue.value() //
: parameterValue;
}
}
| HibernateJpaParametersParameterAccessor |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedSortedSetDocValuesSyntheticFieldLoader.java | {
"start": 4953,
"end": 5614
} | class ____ implements DocValuesLoader, DocValuesFieldValues {
private final SortedSetDocValues dv;
private boolean hasValue;
FlattenedFieldDocValuesLoader(final SortedSetDocValues dv) {
this.dv = dv;
}
@Override
public boolean advanceToDoc(int docId) throws IOException {
return hasValue = dv.advanceExact(docId);
}
@Override
public int count() {
return hasValue ? dv.docValueCount() : 0;
}
@Override
public SortedSetDocValues getValues() {
return dv;
}
}
private static | FlattenedFieldDocValuesLoader |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/synonyms/DeleteSynonymRuleActionRequestSerializingTests.java | {
"start": 666,
"end": 1924
} | class ____ extends AbstractWireSerializingTestCase<DeleteSynonymRuleAction.Request> {
@Override
protected Writeable.Reader<DeleteSynonymRuleAction.Request> instanceReader() {
return DeleteSynonymRuleAction.Request::new;
}
@Override
protected DeleteSynonymRuleAction.Request createTestInstance() {
return new DeleteSynonymRuleAction.Request(randomIdentifier(), randomIdentifier(), randomBoolean());
}
@Override
protected DeleteSynonymRuleAction.Request mutateInstance(DeleteSynonymRuleAction.Request instance) throws IOException {
String synonymsSetId = instance.synonymsSetId();
String synonymRuleId = instance.synonymRuleId();
boolean refresh = instance.refresh();
switch (between(0, 2)) {
case 0 -> synonymsSetId = randomValueOtherThan(synonymsSetId, () -> randomIdentifier());
case 1 -> synonymRuleId = randomValueOtherThan(synonymRuleId, () -> randomIdentifier());
case 2 -> refresh = refresh == false;
default -> throw new AssertionError("Illegal randomisation branch");
}
return new DeleteSynonymRuleAction.Request(synonymsSetId, synonymRuleId, refresh);
}
}
| DeleteSynonymRuleActionRequestSerializingTests |
java | apache__flink | flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActor.java | {
"start": 22262,
"end": 23331
} | interface ____ {
default State start(PekkoRpcActor<?> pekkoRpcActor, ClassLoader flinkClassLoader) {
throw new RpcInvalidStateException(invalidStateTransitionMessage(StartedState.STARTED));
}
default State stop() {
throw new RpcInvalidStateException(invalidStateTransitionMessage(StoppedState.STOPPED));
}
default State terminate(PekkoRpcActor<?> pekkoRpcActor, ClassLoader flinkClassLoader) {
throw new RpcInvalidStateException(
invalidStateTransitionMessage(TerminatingState.TERMINATING));
}
default State finishTermination() {
return TerminatedState.TERMINATED;
}
default boolean isRunning() {
return false;
}
default String invalidStateTransitionMessage(State targetState) {
return String.format(
"RpcActor is currently in state %s and cannot go into state %s.",
this, targetState);
}
}
@SuppressWarnings("Singleton")
| State |
java | apache__logging-log4j2 | log4j-api-test/src/test/java/org/apache/logging/log4j/LoggerTest.java | {
"start": 27167,
"end": 27727
} | class ____ {
int status;
String message;
public Response(final int status, final String message) {
this.status = status;
this.message = message;
}
public int getStatus() {
return status;
}
public void setStatus(final int status) {
this.status = status;
}
public String getMessage() {
return message;
}
public void setMessage(final String message) {
this.message = message;
}
}
}
| Response |
java | apache__flink | flink-metrics/flink-metrics-core/src/main/java/org/apache/flink/events/Events.java | {
"start": 899,
"end": 1129
} | enum ____ {
CheckpointEvent,
JobStatusChangeEvent,
JobFailureEvent,
AllSubtasksStatusChangeEvent;
public EventBuilder builder(Class<?> classScope) {
return Event.builder(classScope, name());
}
}
| Events |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/selection/generics/ErroneousSource2.java | {
"start": 206,
"end": 640
} | class ____ {
private WildCardExtendsWrapper<TypeA> fooWildCardExtendsTypeAFailure;
public WildCardExtendsWrapper<TypeA> getFooWildCardExtendsTypeAFailure() {
return fooWildCardExtendsTypeAFailure;
}
public void setFooWildCardExtendsTypeAFailure(WildCardExtendsWrapper<TypeA> fooWildCardExtendsTypeAFailure) {
this.fooWildCardExtendsTypeAFailure = fooWildCardExtendsTypeAFailure;
}
}
| ErroneousSource2 |
java | apache__kafka | storage/src/main/java/org/apache/kafka/server/log/remote/metadata/storage/RemoteLogMetadataCache.java | {
"start": 1652,
"end": 1919
} | class ____ an in-memory cache of remote log segment metadata. This maintains the lineage of segments
* with respect to leader epochs.
* <p>
* Remote log segment can go through the state transitions as mentioned in {@link RemoteLogSegmentState}.
* <p>
* This | provides |
java | apache__flink | flink-connectors/flink-connector-base/src/main/java/org/apache/flink/connector/base/source/hybrid/SwitchSourceEvent.java | {
"start": 1113,
"end": 1935
} | class ____ implements SourceEvent {
private static final long serialVersionUID = 1L;
private final int sourceIndex;
private final Source source;
private final boolean finalSource;
/**
* Constructor.
*
* @param sourceIndex
*/
public SwitchSourceEvent(int sourceIndex, Source source, boolean finalSource) {
this.sourceIndex = sourceIndex;
this.source = source;
this.finalSource = finalSource;
}
public int sourceIndex() {
return sourceIndex;
}
public Source source() {
return source;
}
public boolean isFinalSource() {
return finalSource;
}
@Override
public String toString() {
return this.getClass().getSimpleName() + '{' + "sourceIndex=" + sourceIndex + '}';
}
}
| SwitchSourceEvent |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/mvdedupe/BatchEncoder.java | {
"start": 11784,
"end": 12435
} | class ____ implements Decoder {
@Override
public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int count) {
IntBlock.Builder b = (IntBlock.Builder) builder;
for (int i = 0; i < count; i++) {
if (isNull.isNull(i)) {
b.appendNull();
} else {
BytesRef e = encoded[i];
b.appendInt((int) intHandle.get(e.bytes, e.offset));
e.offset += Integer.BYTES;
e.length -= Integer.BYTES;
}
}
}
}
protected static final | IntsDecoder |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java | {
"start": 14468,
"end": 14823
} | class ____",
canonicalClassName
);
}
existingClass = canonicalClassNamesToClasses.get(canonicalClassName);
if (existingClass != null && existingClass != clazz) {
throw lookupException(
"class [%s] cannot represent multiple java classes with the same name from different | loaders |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/SnapshotDirectory.java | {
"start": 6366,
"end": 7022
} | class ____ extends SnapshotDirectory {
PermanentSnapshotDirectory(@Nonnull Path directory) {
super(directory);
}
@Override
public DirectoryStateHandle completeSnapshotAndGetHandle() throws IOException {
if (State.COMPLETED == state.get()
|| state.compareAndSet(State.ONGOING, State.COMPLETED)) {
return DirectoryStateHandle.forPathWithSize(directory);
} else {
throw new IOException(
"Expected state " + State.ONGOING + " but found state " + state.get());
}
}
}
}
| PermanentSnapshotDirectory |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/FlatpackEndpointBuilderFactory.java | {
"start": 40500,
"end": 43080
} | interface ____ extends EndpointProducerBuilder {
default FlatpackEndpointProducerBuilder basic() {
return (FlatpackEndpointProducerBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedFlatpackEndpointProducerBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedFlatpackEndpointProducerBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
/**
* Builder for endpoint for the Flatpack component.
*/
public | AdvancedFlatpackEndpointProducerBuilder |
java | apache__rocketmq | store/src/main/java/org/apache/rocketmq/store/ha/DefaultHAConnection.java | {
"start": 1410,
"end": 5264
} | class ____ implements HAConnection {
/**
* Transfer Header buffer size. Schema: physic offset and body size. Format:
*
* <pre>
* βββββββββββββββββββββββββββββββββββββββββββββββββ¬ββββββββββββββββββββββββ
* β physicOffset β bodySize β
* β (8bytes) β (4bytes) β
* βββββββββββββββββββββββββββββββββββββββββββββββββ΄ββββββββββββββββββββββββ€
* β β
* β Transfer Header β
* </pre>
* <p>
*/
public static final int TRANSFER_HEADER_SIZE = 8 + 4;
private static final Logger log = LoggerFactory.getLogger(LoggerName.STORE_LOGGER_NAME);
private final DefaultHAService haService;
private final SocketChannel socketChannel;
private final String clientAddress;
private WriteSocketService writeSocketService;
private ReadSocketService readSocketService;
private volatile HAConnectionState currentState = HAConnectionState.TRANSFER;
private volatile long slaveRequestOffset = -1;
private volatile long slaveAckOffset = -1;
private FlowMonitor flowMonitor;
public DefaultHAConnection(final DefaultHAService haService, final SocketChannel socketChannel) throws IOException {
this.haService = haService;
this.socketChannel = socketChannel;
this.clientAddress = this.socketChannel.socket().getRemoteSocketAddress().toString();
this.socketChannel.configureBlocking(false);
this.socketChannel.socket().setSoLinger(false, -1);
this.socketChannel.socket().setTcpNoDelay(true);
if (NettySystemConfig.socketSndbufSize > 0) {
this.socketChannel.socket().setReceiveBufferSize(NettySystemConfig.socketSndbufSize);
}
if (NettySystemConfig.socketRcvbufSize > 0) {
this.socketChannel.socket().setSendBufferSize(NettySystemConfig.socketRcvbufSize);
}
this.writeSocketService = new WriteSocketService(this.socketChannel);
this.readSocketService = new ReadSocketService(this.socketChannel);
this.haService.getConnectionCount().incrementAndGet();
this.flowMonitor = new FlowMonitor(haService.getDefaultMessageStore().getMessageStoreConfig());
}
public void start() {
changeCurrentState(HAConnectionState.TRANSFER);
this.flowMonitor.start();
this.readSocketService.start();
this.writeSocketService.start();
}
public void shutdown() {
changeCurrentState(HAConnectionState.SHUTDOWN);
this.writeSocketService.shutdown(true);
this.readSocketService.shutdown(true);
this.flowMonitor.shutdown(true);
this.close();
}
public void close() {
if (this.socketChannel != null) {
try {
this.socketChannel.close();
} catch (IOException e) {
log.error("", e);
}
}
}
public SocketChannel getSocketChannel() {
return socketChannel;
}
public void changeCurrentState(HAConnectionState currentState) {
log.info("change state to {}", currentState);
this.currentState = currentState;
}
@Override
public HAConnectionState getCurrentState() {
return currentState;
}
@Override
public String getClientAddress() {
return this.clientAddress;
}
@Override
public long getSlaveAckOffset() {
return slaveAckOffset;
}
public long getTransferredByteInSecond() {
return this.flowMonitor.getTransferredByteInSecond();
}
public long getTransferFromWhere() {
return writeSocketService.getNextTransferFromWhere();
}
| DefaultHAConnection |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/requests/DescribeTopicPartitionsResponse.java | {
"start": 1247,
"end": 3487
} | class ____ extends AbstractResponse {
private final DescribeTopicPartitionsResponseData data;
public DescribeTopicPartitionsResponse(DescribeTopicPartitionsResponseData data) {
super(ApiKeys.DESCRIBE_TOPIC_PARTITIONS);
this.data = data;
}
@Override
public DescribeTopicPartitionsResponseData data() {
return data;
}
@Override
public int throttleTimeMs() {
return data.throttleTimeMs();
}
@Override
public void maybeSetThrottleTimeMs(int throttleTimeMs) {
data.setThrottleTimeMs(throttleTimeMs);
}
@Override
public boolean shouldClientThrottle(short version) {
return true;
}
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
data.topics().forEach(topicResponse -> {
topicResponse.partitions().forEach(p -> updateErrorCounts(errorCounts, Errors.forCode(p.errorCode())));
updateErrorCounts(errorCounts, Errors.forCode(topicResponse.errorCode()));
});
return errorCounts;
}
public static DescribeTopicPartitionsResponse parse(Readable readable, short version) {
return new DescribeTopicPartitionsResponse(
new DescribeTopicPartitionsResponseData(readable, version));
}
public static TopicPartitionInfo partitionToTopicPartitionInfo(
DescribeTopicPartitionsResponseData.DescribeTopicPartitionsResponsePartition partition,
Map<Integer, Node> nodes) {
return new TopicPartitionInfo(
partition.partitionIndex(),
nodes.get(partition.leaderId()),
partition.replicaNodes().stream().map(id -> nodes.getOrDefault(id, new Node(id, "", -1))).collect(Collectors.toList()),
partition.isrNodes().stream().map(id -> nodes.getOrDefault(id, new Node(id, "", -1))).collect(Collectors.toList()),
partition.eligibleLeaderReplicas().stream().map(id -> nodes.getOrDefault(id, new Node(id, "", -1))).collect(Collectors.toList()),
partition.lastKnownElr().stream().map(id -> nodes.getOrDefault(id, new Node(id, "", -1))).collect(Collectors.toList()));
}
}
| DescribeTopicPartitionsResponse |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/RedundantNullCheckTest.java | {
"start": 21736,
"end": 22389
} | class ____ {
String getString() {
return "foo";
}
void foo(boolean b) {
String s = getString();
if (b) {
s = null; // s is not effectively final
}
if (s == null) {
/* This is fine */
}
}
}
""")
.doTest();
}
@Test
public void positive_localVariable_annotatedNullable() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import org.jspecify.annotations.Nullable;
| Test |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/FilterChainImplTest3.java | {
"start": 568,
"end": 2744
} | class ____ extends TestCase {
private DruidDataSource dataSource;
protected void setUp() throws Exception {
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setFilters("stat,log4j,wall,encoding");
dataSource.getProxyFilters().add(new FilterAdapter() {
});
dataSource.setDbType("mysql");
dataSource.setDriver(new MockDriver() {
public ResultSet executeQuery(MockStatementBase stmt, String sql) throws SQLException {
return null;
}
public MockStatement createMockStatement(MockConnection conn) {
return new MockStatement(conn) {
public ResultSet getResultSet() throws SQLException {
return null;
}
};
}
});
dataSource.init();
}
protected void tearDown() throws Exception {
JdbcUtils.close(dataSource);
}
public void test_executeQuery() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement("select ?");
stmt.setNull(1, Types.VARCHAR);
assertNull(stmt.executeQuery());
stmt.close();
conn.close();
}
public void test_executeQuery_2() throws Exception {
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareCall("select ?");
stmt.setNull(1, Types.VARCHAR);
assertNull(stmt.executeQuery());
stmt.close();
conn.close();
}
public void test_executeQuery_3() throws Exception {
Connection conn = dataSource.getConnection();
Statement stmt = conn.createStatement();
assertNull(stmt.executeQuery("select 1"));
stmt.close();
conn.close();
}
public void test_execute() throws Exception {
Connection conn = dataSource.getConnection();
Statement stmt = conn.createStatement();
stmt.execute("select 1");
assertNull(stmt.getResultSet());
stmt.close();
conn.close();
}
}
| FilterChainImplTest3 |
java | apache__camel | components/camel-telegram/src/main/java/org/apache/camel/component/telegram/model/SendLocationMessage.java | {
"start": 1013,
"end": 2577
} | class ____ extends OutgoingMessage {
@JsonProperty("longitude")
private double longitude;
@JsonProperty("latitude")
private double latitude;
@JsonProperty("live_period")
private Integer livePeriod;
@JsonProperty("reply_markup")
private ReplyMarkup replyMarkup;
public SendLocationMessage() {
}
public SendLocationMessage(double latitude, double longitude) {
this.setLatitude(latitude);
this.setLongitude(longitude);
}
public void setLatitude(double latitude) {
this.latitude = latitude;
}
public void setLongitude(double longitude) {
this.longitude = longitude;
}
public void setLivePeriod(Integer livePeriod) {
this.livePeriod = livePeriod;
}
public ReplyMarkup getReplyMarkup() {
return replyMarkup;
}
public void setReplyMarkup(ReplyMarkup replyMarkup) {
this.replyMarkup = replyMarkup;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("SendLocationMessage{");
sb.append("latitude=").append(latitude).append('\'');
sb.append(", longitude=").append(longitude).append('\'');
sb.append(", livePeriod=").append(livePeriod).append('\'');
sb.append(", disableNotification=").append(disableNotification).append('\'');
sb.append(", replyToMessageId=").append(replyToMessageId).append('\'');
sb.append(", replyMarkup=").append(replyMarkup);
sb.append('}');
return sb.toString();
}
}
| SendLocationMessage |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng3372DirectInvocationOfPluginsTest.java | {
"start": 1014,
"end": 2578
} | class ____ extends AbstractMavenIntegrationTestCase {
@Test
public void testitMNG3372() throws Exception {
// The testdir is computed from the location of this
// file.
File testBaseDir = extractResources("/mng-3372/direct-using-prefix");
File plugin = new File(testBaseDir, "plugin");
File project = new File(testBaseDir, "project");
File settingsFile = new File(testBaseDir, "settings.xml");
Verifier verifier = newVerifier(plugin.getAbsolutePath());
verifier.deleteArtifacts("org.apache.maven.its.mng3372");
verifier.getSystemProperties().setProperty("updateReleaseInfo", "true");
verifier.addCliArguments("clean", "install");
verifier.execute();
verifier = newVerifier(project.getAbsolutePath());
verifier.addCliArgument("-s");
verifier.addCliArgument("\"" + settingsFile.getAbsolutePath() + "\"");
verifier.addCliArgument("mng3372:test");
verifier.execute();
verifier.verifyErrorFreeLog();
}
@Test
public void testDependencyTreeInvocation() throws Exception {
// The testdir is computed from the location of this
// file.
File testBaseDir = extractResources("/mng-3372/dependency-tree");
Verifier verifier = newVerifier(testBaseDir.getAbsolutePath());
verifier.addCliArgument("-U");
verifier.addCliArgument("dependency:tree");
verifier.execute();
verifier.verifyErrorFreeLog();
}
}
| MavenITmng3372DirectInvocationOfPluginsTest |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/spi/MappingExclusionProvider.java | {
"start": 292,
"end": 2121
} | interface ____ is used to control if MapStruct is allowed to generate automatic sub-mapping for
* a given {@link TypeElement}.
* <p>
* When generating the implementation of a mapping method, MapStruct will apply the following routine for each
* attribute pair in the source and target object:
* <p>
* <ul>
* <li>If source and target attribute have the same type, the value will be simply copied from source to target.
* If the attribute is a collection (e.g. a `List`) a copy of the collection will be set into the target
* attribute.</li>
* <li>If source and target attribute type differ, check whether there is a another mapping method which has the
* type of the source attribute as parameter type and the type of the target attribute as return type. If such a
* method exists it will be invoked in the generated mapping implementation.</li>
* <li>If no such method exists MapStruct will look whether a built-in conversion for the source and target type
* of the attribute exists. If this is the case, the generated mapping code will apply this conversion.</li>
* <li>If no such method was found MapStruct will try to generate an automatic sub-mapping method that will do
* the mapping between the source and target attributes</li>
* <li>If MapStruct could not create a name based mapping method an error will be raised at build time,
* indicating the non-mappable attribute and its path.</li>
* </ul>
* <p>
* With this SPI the last step before raising an error can be controlled. i.e. A user can control whether MapStruct
* is allowed to generate such automatic sub-mapping method (for the source or target type) or not.
*
* @author Filip Hrisafov
* @since 1.2
*/
@Experimental("This SPI can have its signature changed in subsequent releases")
public | that |
java | google__guice | core/test/com/google/inject/errors/MissingImplementationErrorTest.java | {
"start": 6614,
"end": 7323
} | class ____ extends AbstractModule {
@Override
protected void configure() {}
@Provides
Optional<String> provideString() {
return Optional.of("ignored");
}
@Provides
Optional<Integer> provideInteger(com.google.common.base.Optional<String> dep) {
return Optional.of(123);
}
}
@Test
public void testMismatchedOptionals() {
CreationException exception =
assertThrows(
CreationException.class, () -> Guice.createInjector(new MismatchedOptionalsModule()));
assertGuiceErrorEqualsIgnoreLineNumber(
exception.getMessage(), "missing_implementation_with_mismatched_optionals.txt");
}
private static final | MismatchedOptionalsModule |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/aot/BeanRegistrationCodeFragmentsDecorator.java | {
"start": 1573,
"end": 3594
} | class ____ implements BeanRegistrationCodeFragments {
private final BeanRegistrationCodeFragments delegate;
protected BeanRegistrationCodeFragmentsDecorator(BeanRegistrationCodeFragments delegate) {
Assert.notNull(delegate, "Delegate must not be null");
this.delegate = delegate;
}
@Override
public ClassName getTarget(RegisteredBean registeredBean) {
return this.delegate.getTarget(registeredBean);
}
@Override
public CodeBlock generateNewBeanDefinitionCode(GenerationContext generationContext,
ResolvableType beanType, BeanRegistrationCode beanRegistrationCode) {
return this.delegate.generateNewBeanDefinitionCode(generationContext, beanType, beanRegistrationCode);
}
@Override
public CodeBlock generateSetBeanDefinitionPropertiesCode(
GenerationContext generationContext, BeanRegistrationCode beanRegistrationCode,
RootBeanDefinition beanDefinition, Predicate<String> attributeFilter) {
return this.delegate.generateSetBeanDefinitionPropertiesCode(
generationContext, beanRegistrationCode, beanDefinition, attributeFilter);
}
@Override
public CodeBlock generateSetBeanInstanceSupplierCode(
GenerationContext generationContext, BeanRegistrationCode beanRegistrationCode,
CodeBlock instanceSupplierCode, List<MethodReference> postProcessors) {
return this.delegate.generateSetBeanInstanceSupplierCode(generationContext,
beanRegistrationCode, instanceSupplierCode, postProcessors);
}
@Override
public CodeBlock generateInstanceSupplierCode(
GenerationContext generationContext, BeanRegistrationCode beanRegistrationCode,
boolean allowDirectSupplierShortcut) {
return this.delegate.generateInstanceSupplierCode(generationContext,
beanRegistrationCode, allowDirectSupplierShortcut);
}
@Override
public CodeBlock generateReturnCode(
GenerationContext generationContext, BeanRegistrationCode beanRegistrationCode) {
return this.delegate.generateReturnCode(generationContext, beanRegistrationCode);
}
}
| BeanRegistrationCodeFragmentsDecorator |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/builder/BuilderDeleteTest.java | {
"start": 241,
"end": 661
} | class ____ extends TestCase {
public void test_0() throws Exception {
SQLDeleteBuilder builder = SQLBuilderFactory.createDeleteBuilder(JdbcConstants.MYSQL);
builder.from("mytable")
.whereAnd("f1 > 0");
String sql = builder.toString();
System.out.println(sql);
assertEquals("DELETE FROM mytable"
+ "\nWHERE f1 > 0", sql);
}
}
| BuilderDeleteTest |
java | apache__maven | impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvnup/goals/GAVUtilsTest.java | {
"start": 2054,
"end": 7122
} | class ____ {
@Test
@DisplayName("should extract Artifact from complete POM")
void shouldExtractGAVFromCompletePOM() throws Exception {
String pomXml = PomBuilder.create()
.groupId("com.example")
.artifactId("test-project")
.version("1.0.0")
.build();
Document document = Document.of(pomXml);
UpgradeContext context = createMockContext();
Coordinates gav = AbstractUpgradeStrategy.extractArtifactCoordinatesWithParentResolution(context, document);
assertNotNull(gav);
assertEquals("com.example", gav.groupId());
assertEquals("test-project", gav.artifactId());
assertEquals("1.0.0", gav.version());
}
@Test
@DisplayName("should extract Artifact with parent inheritance")
void shouldExtractGAVWithParentInheritance() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.example</groupId>
<artifactId>parent-project</artifactId>
<version>1.0.0</version>
</parent>
<artifactId>child-project</artifactId>
<!-- groupId and version inherited from parent -->
</project>
""";
Document document = Document.of(pomXml);
UpgradeContext context = createMockContext();
Coordinates gav = AbstractUpgradeStrategy.extractArtifactCoordinatesWithParentResolution(context, document);
assertNotNull(gav);
assertEquals("com.example", gav.groupId());
assertEquals("child-project", gav.artifactId());
assertEquals("1.0.0", gav.version());
}
@Test
@DisplayName("should handle partial parent inheritance")
void shouldHandlePartialParentInheritance() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.example</groupId>
<artifactId>parent-project</artifactId>
<version>1.0.0</version>
</parent>
<groupId>com.example.child</groupId>
<artifactId>child-project</artifactId>
<version>2.0.0</version>
</project>
""";
Document document = Document.of(pomXml);
UpgradeContext context = createMockContext();
Coordinates gav = AbstractUpgradeStrategy.extractArtifactCoordinatesWithParentResolution(context, document);
assertNotNull(gav);
assertEquals("com.example.child", gav.groupId());
assertEquals("child-project", gav.artifactId());
assertEquals("2.0.0", gav.version());
}
@ParameterizedTest
@MethodSource("provideInvalidGAVScenarios")
@DisplayName("should return null for invalid Artifact scenarios")
void shouldReturnNullForInvalidGAVScenarios(
String groupId, String artifactId, String version, String description) throws Exception {
String pomXml = PomBuilder.create()
.groupId(groupId)
.artifactId(artifactId)
.version(version)
.build();
Document document = Document.of(pomXml);
UpgradeContext context = createMockContext();
Coordinates gav = AbstractUpgradeStrategy.extractArtifactCoordinatesWithParentResolution(context, document);
assertNull(gav, description);
}
private static Stream<Arguments> provideInvalidGAVScenarios() {
return Stream.of(
Arguments.of(
null, "incomplete-project", null, "Should return null for missing groupId and version"),
Arguments.of("com.example", null, "1.0.0", "Should return null for missing artifactId"),
Arguments.of(null, null, "1.0.0", "Should return null for missing groupId and artifactId"),
Arguments.of("com.example", "test-project", null, "Should return null for missing version"),
Arguments.of("", "test-project", "1.0.0", "Should return null for empty groupId"),
Arguments.of("com.example", "", "1.0.0", "Should return null for empty artifactId"),
Arguments.of("com.example", "test-project", "", "Should return null for empty version"));
}
}
@Nested
@DisplayName("Artifact Computation")
| GAVExtractionTests |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ProcessTableFunctionTestUtils.java | {
"start": 19583,
"end": 20182
} | class ____ extends AppendProcessTableFunctionBase {
public void eval(
@StateHint(type = @DataTypeHint("ROW<i INT>")) Row s1,
@StateHint(type = @DataTypeHint("ROW<s STRING>")) Row s2,
@ArgumentHint(SET_SEMANTIC_TABLE) Row r) {
collectObjects(s1, s2, r);
Integer i = s1.<Integer>getFieldAs("i");
if (i == null) {
i = 0;
}
s1.setField("i", i + 1);
s2.setField("s", i.toString());
}
}
/** Testing function. */
public static | MultiStateFunction |
java | apache__kafka | server/src/main/java/org/apache/kafka/network/metrics/RequestChannelMetrics.java | {
"start": 1089,
"end": 2346
} | class ____ {
private final Map<String, RequestMetrics> metricsMap;
public RequestChannelMetrics(Set<ApiKeys> enabledApis) {
metricsMap = new HashMap<>();
for (ApiKeys apiKey : enabledApis) {
metricsMap.put(apiKey.name, new RequestMetrics(apiKey.name));
}
for (String name : List.of(
RequestMetrics.CONSUMER_FETCH_METRIC_NAME,
RequestMetrics.FOLLOW_FETCH_METRIC_NAME,
RequestMetrics.VERIFY_PARTITIONS_IN_TXN_METRIC_NAME,
RequestMetrics.LIST_CLIENT_METRICS_RESOURCES_METRIC_NAME
)) {
metricsMap.put(name, new RequestMetrics(name));
}
}
public RequestChannelMetrics(ApiMessageType.ListenerType scope) {
this(ApiKeys.apisForListener(scope));
}
public RequestMetrics apply(String metricName) {
RequestMetrics requestMetrics = metricsMap.get(metricName);
if (requestMetrics == null) {
throw new NoSuchElementException("No RequestMetrics for " + metricName);
}
return requestMetrics;
}
public void close() {
for (RequestMetrics requestMetrics : metricsMap.values()) {
requestMetrics.removeMetrics();
}
}
}
| RequestChannelMetrics |
java | google__gson | gson/src/main/java/com/google/gson/ReflectionAccessFilter.java | {
"start": 7099,
"end": 8307
} | class ____ returns {@link
* FilterResult#INDECISIVE}.
*
* <p>This filter is mainly intended to prevent depending on implementation details of the Android
* platform.
*
* <p>Note that this filter might not cover all standard Android classes. Currently only classes
* in an {@code android.*} or {@code androidx.*} package, and standard Java classes in a {@code
* java.*} or {@code javax.*} package are considered. The set of detected classes might be
* expanded in the future without prior notice.
*
* @see FilterResult#BLOCK_ALL
*/
ReflectionAccessFilter BLOCK_ALL_ANDROID =
new ReflectionAccessFilter() {
@Override
public FilterResult check(Class<?> rawClass) {
return ReflectionAccessFilterHelper.isAndroidType(rawClass)
? FilterResult.BLOCK_ALL
: FilterResult.INDECISIVE;
}
@Override
public String toString() {
return "ReflectionAccessFilter#BLOCK_ALL_ANDROID";
}
};
/**
* Blocks all reflection access to members of classes belonging to programming language platforms,
* such as Java, Android, Kotlin or Scala.
*
* <p>If this filter encounters a | it |
java | quarkusio__quarkus | extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/TemplateProducer.java | {
"start": 15950,
"end": 16399
} | class ____ extends InjectableTemplateInstanceImpl {
private final String identifier;
private InjectableFragmentTemplateInstanceImpl(String identifier) {
this.identifier = identifier;
}
@Override
protected Template template() {
return super.template().getFragment(identifier);
}
}
}
static | InjectableFragmentTemplateInstanceImpl |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/runtime/src/main/java/io/quarkus/rest/client/reactive/QuarkusRestClientBuilder.java | {
"start": 1403,
"end": 1925
} | interface ____ extends Configurable<QuarkusRestClientBuilder> {
static QuarkusRestClientBuilder newBuilder() {
RestClientBuilderImpl delegate = new RestClientBuilderImpl();
for (RestClientBuilderListener listener : ServiceLoader.load(RestClientBuilderListener.class)) {
listener.onNewBuilder(delegate);
}
return new QuarkusRestClientBuilderImpl(delegate);
}
/**
* Specifies the base URL to be used when making requests. Assuming that the | QuarkusRestClientBuilder |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java | {
"start": 8826,
"end": 9210
} | enum ____ {
NAMENODE ("NameNode"),
BACKUP ("Backup Node"),
CHECKPOINT("Checkpoint Node");
private String description = null;
NamenodeRole(String arg) {this.description = arg;}
@Override
public String toString() {
return description;
}
}
/**
* Block replica states, which it can go through while being constructed.
*/
| NamenodeRole |
java | apache__kafka | raft/src/test/java/org/apache/kafka/raft/internals/RecordsIteratorTest.java | {
"start": 3708,
"end": 17332
} | class ____ {
private static final RecordSerde<String> STRING_SERDE = new StringSerde();
private static Stream<Arguments> emptyRecords() throws IOException {
return Stream.of(
FileRecords.open(TestUtils.tempFile()),
MemoryRecords.EMPTY
).map(Arguments::of);
}
@ParameterizedTest
@MethodSource("emptyRecords")
void testEmptyRecords(Records records) {
testIterator(List.of(), records, true);
}
@Property(tries = 50)
public void testMemoryRecords(
@ForAll CompressionType compressionType,
@ForAll long seed
) {
List<TestBatch<String>> batches = createBatches(seed);
MemoryRecords memRecords = buildRecords(compressionType, batches);
testIterator(batches, memRecords, true);
}
@Property(tries = 50)
public void testFileRecords(
@ForAll CompressionType compressionType,
@ForAll long seed
) throws IOException {
List<TestBatch<String>> batches = createBatches(seed);
MemoryRecords memRecords = buildRecords(compressionType, batches);
FileRecords fileRecords = FileRecords.open(TestUtils.tempFile());
fileRecords.append(memRecords);
testIterator(batches, fileRecords, true);
fileRecords.close();
}
@Property(tries = 50)
public void testCrcValidation(
@ForAll CompressionType compressionType,
@ForAll long seed
) throws IOException {
List<TestBatch<String>> batches = createBatches(seed);
MemoryRecords memRecords = buildRecords(compressionType, batches);
// Read the Batch CRC for the first batch from the buffer
ByteBuffer readBuf = memRecords.buffer();
readBuf.position(DefaultRecordBatch.CRC_OFFSET);
int actualCrc = readBuf.getInt();
// Corrupt the CRC on the first batch
memRecords.buffer().putInt(DefaultRecordBatch.CRC_OFFSET, actualCrc + 1);
assertThrows(CorruptRecordException.class, () -> testIterator(batches, memRecords, true));
FileRecords fileRecords = FileRecords.open(TestUtils.tempFile());
fileRecords.append(memRecords);
assertThrows(CorruptRecordException.class, () -> testIterator(batches, fileRecords, true));
// Verify check does not trigger when doCrcValidation is false
assertDoesNotThrow(() -> testIterator(batches, memRecords, false));
assertDoesNotThrow(() -> testIterator(batches, fileRecords, false));
// Fix the corruption
memRecords.buffer().putInt(DefaultRecordBatch.CRC_OFFSET, actualCrc);
// Verify check does not trigger when the corruption is fixed
assertDoesNotThrow(() -> testIterator(batches, memRecords, true));
FileRecords moreFileRecords = FileRecords.open(TestUtils.tempFile());
moreFileRecords.append(memRecords);
assertDoesNotThrow(() -> testIterator(batches, moreFileRecords, true));
fileRecords.close();
moreFileRecords.close();
}
@Test
public void testControlRecordIterationWithKraftVersion0() {
AtomicReference<ByteBuffer> buffer = new AtomicReference<>(null);
RecordsSnapshotWriter.Builder builder = new RecordsSnapshotWriter.Builder()
.setTime(new MockTime())
.setKraftVersion(KRaftVersion.KRAFT_VERSION_0)
.setVoterSet(Optional.empty())
.setRawSnapshotWriter(
new MockRawSnapshotWriter(new OffsetAndEpoch(100, 10), buffer::set)
);
try (RecordsSnapshotWriter<String> snapshot = builder.build(STRING_SERDE)) {
snapshot.append(List.of("a", "b", "c"));
snapshot.append(List.of("d", "e", "f"));
snapshot.append(List.of("g", "h", "i"));
snapshot.freeze();
}
try (RecordsIterator<String> iterator = createIterator(
MemoryRecords.readableRecords(buffer.get()),
BufferSupplier.NO_CACHING,
true
)
) {
// Consume the control record batch
Batch<String> batch = iterator.next();
assertEquals(1, batch.controlRecords().size());
// Check snapshot header control record
assertEquals(ControlRecordType.SNAPSHOT_HEADER, batch.controlRecords().get(0).type());
assertEquals(new SnapshotHeaderRecord(), batch.controlRecords().get(0).message());
// Consume the iterator until we find a control record
do {
batch = iterator.next();
}
while (batch.controlRecords().isEmpty());
// Check snapshot footer control record
assertEquals(1, batch.controlRecords().size());
assertEquals(ControlRecordType.SNAPSHOT_FOOTER, batch.controlRecords().get(0).type());
assertEquals(new SnapshotFooterRecord(), batch.controlRecords().get(0).message());
// Snapshot footer must be last record
assertFalse(iterator.hasNext());
}
}
@Test
public void testControlRecordIterationWithKraftVersion1() {
AtomicReference<ByteBuffer> buffer = new AtomicReference<>(null);
VoterSet voterSet = VoterSet.fromMap(
VoterSetTest.voterMap(IntStream.of(1, 2, 3), true)
);
RecordsSnapshotWriter.Builder builder = new RecordsSnapshotWriter.Builder()
.setTime(new MockTime())
.setKraftVersion(KRaftVersion.KRAFT_VERSION_1)
.setVoterSet(Optional.of(voterSet))
.setRawSnapshotWriter(
new MockRawSnapshotWriter(new OffsetAndEpoch(100, 10), buffer::set)
);
try (RecordsSnapshotWriter<String> snapshot = builder.build(STRING_SERDE)) {
snapshot.append(List.of("a", "b", "c"));
snapshot.append(List.of("d", "e", "f"));
snapshot.append(List.of("g", "h", "i"));
snapshot.freeze();
}
try (RecordsIterator<String> iterator = createIterator(
MemoryRecords.readableRecords(buffer.get()),
BufferSupplier.NO_CACHING,
true
)
) {
// Consume the control record batch
Batch<String> batch = iterator.next();
assertEquals(3, batch.controlRecords().size());
// Check snapshot header control record
assertEquals(ControlRecordType.SNAPSHOT_HEADER, batch.controlRecords().get(0).type());
assertEquals(new SnapshotHeaderRecord(), batch.controlRecords().get(0).message());
// Check kraft version control record
assertEquals(ControlRecordType.KRAFT_VERSION, batch.controlRecords().get(1).type());
assertEquals(new KRaftVersionRecord().setKRaftVersion((short) 1), batch.controlRecords().get(1).message());
// Check the voters control record
assertEquals(ControlRecordType.KRAFT_VOTERS, batch.controlRecords().get(2).type());
assertEquals(voterSet.toVotersRecord((short) 0), batch.controlRecords().get(2).message());
// Consume the iterator until we find a control record
do {
batch = iterator.next();
}
while (batch.controlRecords().isEmpty());
// Check snapshot footer control record
assertEquals(1, batch.controlRecords().size());
assertEquals(ControlRecordType.SNAPSHOT_FOOTER, batch.controlRecords().get(0).type());
assertEquals(new SnapshotFooterRecord(), batch.controlRecords().get(0).message());
// Snapshot footer must be last record
assertFalse(iterator.hasNext());
}
}
@ParameterizedTest
@EnumSource(
value = ControlRecordType.class,
names = {"LEADER_CHANGE", "SNAPSHOT_HEADER", "SNAPSHOT_FOOTER", "KRAFT_VERSION", "KRAFT_VOTERS"}
)
void testWithAllSupportedControlRecords(ControlRecordType type) {
MemoryRecords records = buildControlRecords(type);
ApiMessage expectedMessage = defaultControlRecord(type);
try (RecordsIterator<String> iterator = createIterator(records, BufferSupplier.NO_CACHING, true)) {
assertTrue(iterator.hasNext());
assertEquals(
List.of(ControlRecord.of(expectedMessage)),
iterator.next().controlRecords()
);
assertFalse(iterator.hasNext());
}
}
@Test
void testControlRecordTypeValues() {
// If this test fails then it means that ControlRecordType was changed. Please review the
// implementation for RecordsIterator to see if it needs to be updated based on the changes
// to ControlRecordType.
assertEquals(8, ControlRecordType.values().length);
}
private void testIterator(
List<TestBatch<String>> expectedBatches,
Records records,
boolean validateCrc
) {
Set<ByteBuffer> allocatedBuffers = Collections.newSetFromMap(new IdentityHashMap<>());
try (RecordsIterator<String> iterator = createIterator(
records,
mockBufferSupplier(allocatedBuffers),
validateCrc
)
) {
for (TestBatch<String> batch : expectedBatches) {
assertTrue(iterator.hasNext());
assertEquals(batch, TestBatch.from(iterator.next()));
}
assertFalse(iterator.hasNext());
assertThrows(NoSuchElementException.class, iterator::next);
}
assertEquals(Set.of(), allocatedBuffers);
}
static RecordsIterator<String> createIterator(
Records records,
BufferSupplier bufferSupplier,
boolean validateCrc
) {
return new RecordsIterator<>(
records,
STRING_SERDE,
bufferSupplier,
Records.HEADER_SIZE_UP_TO_MAGIC,
validateCrc,
new LogContext()
);
}
static BufferSupplier mockBufferSupplier(Set<ByteBuffer> buffers) {
BufferSupplier bufferSupplier = Mockito.mock(BufferSupplier.class);
Mockito.when(bufferSupplier.get(Mockito.anyInt())).thenAnswer(invocation -> {
int size = invocation.getArgument(0);
ByteBuffer buffer = ByteBuffer.allocate(size);
buffers.add(buffer);
return buffer;
});
Mockito.doAnswer(invocation -> {
ByteBuffer released = invocation.getArgument(0);
buffers.remove(released);
return null;
}).when(bufferSupplier).release(Mockito.any(ByteBuffer.class));
return bufferSupplier;
}
public static List<TestBatch<String>> createBatches(long seed) {
Random random = new Random(seed);
long baseOffset = random.nextInt(100);
int epoch = random.nextInt(3) + 1;
long appendTimestamp = random.nextInt(1000);
int numberOfBatches = random.nextInt(100) + 1;
List<TestBatch<String>> batches = new ArrayList<>(numberOfBatches);
for (int i = 0; i < numberOfBatches; i++) {
int numberOfRecords = random.nextInt(100) + 1;
List<String> records = random
.ints(numberOfRecords, 0, 10)
.mapToObj(String::valueOf)
.collect(Collectors.toList());
batches.add(new TestBatch<>(baseOffset, epoch, appendTimestamp, records));
baseOffset += records.size();
if (i % 5 == 0) {
epoch += random.nextInt(3);
}
appendTimestamp += random.nextInt(1000);
}
return batches;
}
public static MemoryRecords buildControlRecords(ControlRecordType type) {
ByteBuffer buffer = ByteBuffer.allocate(128);
try (MemoryRecordsBuilder builder = new MemoryRecordsBuilder(
buffer,
RecordBatch.CURRENT_MAGIC_VALUE,
Compression.NONE,
TimestampType.CREATE_TIME,
0, // initialOffset
0, // timestamp
RecordBatch.NO_PRODUCER_ID,
RecordBatch.NO_PRODUCER_EPOCH,
RecordBatch.NO_SEQUENCE,
false,
true,
1, // leaderEpoch
buffer.capacity()
)
) {
final Message message = defaultControlRecord(type);
builder.appendControlRecord(
0,
type,
MessageUtil.toByteBufferAccessor(message, defaultControlRecordVersion(type)).buffer()
);
}
buffer.flip();
return MemoryRecords.readableRecords(buffer);
}
public static MemoryRecords buildRecords(
CompressionType compressionType,
List<TestBatch<String>> batches
) {
Compression compression = Compression.of(compressionType).build();
ByteBuffer buffer = ByteBuffer.allocate(102400);
for (TestBatch<String> batch : batches) {
BatchBuilder<String> builder = new BatchBuilder<>(
buffer,
STRING_SERDE,
compression,
batch.baseOffset,
batch.appendTimestamp,
batch.epoch,
1024
);
for (String record : batch.records) {
builder.appendRecord(record, null);
}
builder.build();
}
buffer.flip();
return MemoryRecords.readableRecords(buffer);
}
public static final | RecordsIteratorTest |
java | apache__camel | components/camel-ai/camel-neo4j/src/main/java/org/apache/camel/component/neo4j/Neo4jConstants.java | {
"start": 1236,
"end": 3785
} | class ____ {
@Metadata(description = "The operation to be performed.", javaType = "String",
enums = "CREATE_NODE,DELETE_NODE,RETRIEVE_NODES,RETRIEVE_NODES_AND_UPDATE_WITH_CYPHER_QUERY,ADD_OR_DELETE_NODE_WITH_CYPHER_QUERY,CREATE_VECTOR_INDEX,DROP_VECTOR_INDEX,CREATE_VECTOR,VECTOR_SIMILARITY_SEARCH")
public static final String OPERATION = "CamelNeo4jOperation";
@Metadata(description = "MATCH properties for the generated MATCH query. Needed only if we are matching properties and values. Example: {name: 'Alice'} ",
javaType = "String")
public static final String MATCH_PROPERTIES = "CamelNeo4jMatchProperties";
@Metadata(description = "Query Result", javaType = "String")
public static final String QUERY_RESULT = "CamelNeo4jQueryResult";
@Metadata(description = "Query Number of nodes created", javaType = "Long")
public static final String QUERY_RESULT_NODES_CREATED = "CamelNeo4jQueryResultNodesCreated";
@Metadata(description = "Query Number of nodes deleted", javaType = "Long")
public static final String QUERY_RESULT_NODES_DELETED = "CamelNeo4jQueryResultNodesDeleted";
@Metadata(description = "Query executed contains update", javaType = "Boolean")
public static final String QUERY_RESULT_CONTAINS_UPDATES = "CamelNeo4jQueryResultContainsUpdates";
@Metadata(description = "Query executed number of relationships created", javaType = "Long")
public static final String QUERY_RESULT_RELATIONSHIPS_CREATED = "CamelNeo4jQueryResultRelationshipsCreated";
@Metadata(description = "Query executed number of relationships deleted", javaType = "Long")
public static final String QUERY_RESULT_RELATIONSHIPS_DELETED = "CamelNeo4jQueryResultRelationshipsDeleted";
@Metadata(description = "Number of nodes retrieved", javaType = "Long")
public static final String QUERY_RETRIEVE_SIZE = "CamelNeo4jQueryResultRetrieveSize";
@Metadata(description = "Query execution time in Milliseconds", javaType = "Long")
public static final String QUERY_RETRIEVE_LIST_NEO4J_NODES = "CamelNeo4jQueryResultListNeo4jNodes";
@Metadata(description = "Vector Id for the embedding", javaType = "String")
public static final String VECTOR_ID = "CamelNeo4jVectorEmbeddingId";
@Metadata(description = "Label for the Node - used when inserting from Embeddings", javaType = "String")
public static final String LABEL = "CamelNeo4jLabel";
}
}
| Headers |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/model/StringLoader.java | {
"start": 3017,
"end": 3422
} | class ____
implements ModelLoaderFactory<String, AssetFileDescriptor> {
@Override
public ModelLoader<String, AssetFileDescriptor> build(
@NonNull MultiModelLoaderFactory multiFactory) {
return new StringLoader<>(multiFactory.build(Uri.class, AssetFileDescriptor.class));
}
@Override
public void teardown() {
// Do nothing.
}
}
}
| AssetFileDescriptorFactory |
java | spring-projects__spring-framework | spring-webflux/src/main/java/org/springframework/web/reactive/result/method/annotation/CookieValueMethodArgumentResolver.java | {
"start": 1491,
"end": 3148
} | class ____ extends AbstractNamedValueSyncArgumentResolver {
/**
* Create a new {@link CookieValueMethodArgumentResolver} instance.
* @param factory a bean factory to use for resolving {@code ${...}}
* placeholder and {@code #{...}} SpEL expressions in default values;
* or {@code null} if default values are not expected to contain expressions
* @param registry for checking reactive type wrappers
*/
public CookieValueMethodArgumentResolver(@Nullable ConfigurableBeanFactory factory,
ReactiveAdapterRegistry registry) {
super(factory, registry);
}
@Override
public boolean supportsParameter(MethodParameter param) {
return checkAnnotatedParamNoReactiveWrapper(param, CookieValue.class, (annot, type) -> true);
}
@Override
protected NamedValueInfo createNamedValueInfo(MethodParameter parameter) {
CookieValue ann = parameter.getParameterAnnotation(CookieValue.class);
Assert.state(ann != null, "No CookieValue annotation");
return new CookieValueNamedValueInfo(ann);
}
@Override
protected @Nullable Object resolveNamedValue(String name, MethodParameter parameter, ServerWebExchange exchange) {
HttpCookie cookie = exchange.getRequest().getCookies().getFirst(name);
Class<?> paramType = parameter.getNestedParameterType();
if (HttpCookie.class.isAssignableFrom(paramType)) {
return cookie;
}
return (cookie != null ? cookie.getValue() : null);
}
@Override
protected void handleMissingValue(String name, MethodParameter parameter) {
throw new MissingRequestValueException(
name, parameter.getNestedParameterType(), "cookie", parameter);
}
private static final | CookieValueMethodArgumentResolver |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/PercentRankAggFunction.java | {
"start": 1739,
"end": 2390
} | class ____ extends RankAggFunction implements SizeBasedWindowFunction {
private final ValueLiteralExpression one = valueLiteral(1);
public PercentRankAggFunction(LogicalType[] orderKeyTypes) {
super(orderKeyTypes);
}
@Override
public Expression getValueExpression() {
return ifThenElse(
greaterThan(windowSizeAttribute(), one),
div(
cast(minus(sequence, one), typeLiteral(DataTypes.DOUBLE())),
cast(minus(windowSizeAttribute(), one), typeLiteral(DataTypes.DOUBLE()))),
valueLiteral(0.0d));
}
}
| PercentRankAggFunction |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/build/BuilderMetadata.java | {
"start": 5669,
"end": 5834
} | interface ____ {
/**
* Return run image metadata.
* @return the run image metadata
*/
RunImage getRunImage();
/**
* Run image metadata.
*/
| Stack |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Length.java | {
"start": 784,
"end": 1358
} | class ____ extends UnaryStringFunction {
public Length(Source source, Expression field) {
super(source, field);
}
@Override
protected NodeInfo<Length> info() {
return NodeInfo.create(this, Length::new, field());
}
@Override
protected Length replaceChild(Expression newChild) {
return new Length(source(), newChild);
}
@Override
protected StringOperation operation() {
return StringOperation.LENGTH;
}
@Override
public DataType dataType() {
return DataTypes.INTEGER;
}
}
| Length |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/ObjectWriter.java | {
"start": 47518,
"end": 47874
} | class ____
implements java.io.Serializable
{
private static final long serialVersionUID = 1L;
public final static Prefetch empty = new Prefetch(null, null, null);
/**
* Specified root serialization type to use; can be same
* as runtime type, but usually one of its super types
* (parent | Prefetch |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLShowOutlinesStatement.java | {
"start": 903,
"end": 1816
} | class ____ extends SQLStatementImpl implements SQLShowStatement {
private SQLExpr where;
private SQLOrderBy orderBy;
private SQLLimit limit;
@Override
protected void accept0(SQLASTVisitor visitor) {
visitor.visit(this);
visitor.endVisit(this);
}
public SQLExpr getWhere() {
return where;
}
public void setWhere(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.where = x;
}
public SQLOrderBy getOrderBy() {
return orderBy;
}
public void setOrderBy(SQLOrderBy x) {
if (x != null) {
x.setParent(this);
}
this.orderBy = x;
}
public SQLLimit getLimit() {
return limit;
}
public void setLimit(SQLLimit x) {
if (x != null) {
x.setParent(this);
}
this.limit = x;
}
}
| SQLShowOutlinesStatement |
java | quarkusio__quarkus | extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/AgroalOpenTelemetryWrapper.java | {
"start": 120,
"end": 377
} | class ____ implements Function<AgroalDataSource, AgroalDataSource> {
@Override
public AgroalDataSource apply(AgroalDataSource originalDataSource) {
return new OpenTelemetryAgroalDataSource(originalDataSource);
}
}
| AgroalOpenTelemetryWrapper |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/notfound/LazyNotFoundManyToOneNonUpdatableNonInsertableTest.java | {
"start": 1389,
"end": 2265
} | class ____ {
private static int ID = 1;
@Test
public void test(SessionFactoryScope scope) {
scope.inTransaction( session -> {
Lazy p = new Lazy();
p.id = ID;
User u = new User();
u.id = ID;
u.setLazy( p );
session.persist( u );
}
);
scope.inTransaction( session -> session.remove( session.get( Lazy.class, ID ) ) );
scope.inTransaction( session -> {
User user = session.find( User.class, ID );
// per UserGuide (and simply correct behavior), `@NotFound` forces EAGER fetching
assertThat( Hibernate.isPropertyInitialized( user, "lazy" ) )
.describedAs( "`User#lazy` is not eagerly initialized due to presence of `@NotFound`" )
.isTrue();
assertNull( user.getLazy() );
}
);
}
@Entity(name="User")
@Table(name = "USER_TABLE")
public static | LazyNotFoundManyToOneNonUpdatableNonInsertableTest |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/metrics/SampleMetrics.java | {
"start": 976,
"end": 1499
} | class ____ {
public static final MetricNameTemplate METRIC1 = new MetricNameTemplate("name", "group", "The first metric used in testMetricName()", "key1", "key2");
public static final MetricNameTemplate METRIC2 = new MetricNameTemplate("name", "group", "The second metric used in testMetricName()", "key1", "key2");
public static final MetricNameTemplate METRIC_WITH_INHERITED_TAGS = new MetricNameTemplate("inherited.tags", "group", "inherited.tags in testMetricName", "parent-tag", "child-tag");
}
| SampleMetrics |
java | apache__camel | components/camel-ai/camel-langchain4j-agent/src/generated/java/org/apache/camel/component/langchain4j/agent/LangChain4jAgentConfigurationConfigurer.java | {
"start": 753,
"end": 2817
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.component.langchain4j.agent.LangChain4jAgentConfiguration target = (org.apache.camel.component.langchain4j.agent.LangChain4jAgentConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "agent": target.setAgent(property(camelContext, org.apache.camel.component.langchain4j.agent.api.Agent.class, value)); return true;
case "agentfactory":
case "agentFactory": target.setAgentFactory(property(camelContext, org.apache.camel.component.langchain4j.agent.api.AgentFactory.class, value)); return true;
case "tags": target.setTags(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "agent": return org.apache.camel.component.langchain4j.agent.api.Agent.class;
case "agentfactory":
case "agentFactory": return org.apache.camel.component.langchain4j.agent.api.AgentFactory.class;
case "tags": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.component.langchain4j.agent.LangChain4jAgentConfiguration target = (org.apache.camel.component.langchain4j.agent.LangChain4jAgentConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "agent": return target.getAgent();
case "agentfactory":
case "agentFactory": return target.getAgentFactory();
case "tags": return target.getTags();
default: return null;
}
}
}
| LangChain4jAgentConfigurationConfigurer |
java | elastic__elasticsearch | x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/cache/TransportSearchableSnapshotsNodeCachesStatsAction.java | {
"start": 9741,
"end": 10859
} | class ____ extends BaseNodesResponse<NodeCachesStatsResponse> implements ToXContentObject {
public NodesCachesStatsResponse(ClusterName clusterName, List<NodeCachesStatsResponse> nodes, List<FailedNodeException> failures) {
super(clusterName, nodes, failures);
}
@Override
protected List<NodeCachesStatsResponse> readNodesFrom(StreamInput in) {
return TransportAction.localOnly();
}
@Override
protected void writeNodesTo(StreamOutput out, List<NodeCachesStatsResponse> nodes) {
TransportAction.localOnly();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
{
builder.startObject("nodes");
for (NodeCachesStatsResponse node : getNodes()) {
node.toXContent(builder, params);
}
builder.endObject();
}
builder.endObject();
return builder;
}
}
}
| NodesCachesStatsResponse |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/AllNestedConditionsTests.java | {
"start": 2719,
"end": 2800
} | class ____ {
}
@Conditional(NonSpringBootCondition.class)
static | HasPropertyB |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/basic/BasicEnhancementTest.java | {
"start": 1290,
"end": 4898
} | class ____ {
@Test
public void basicManagedTest() {
SimpleEntity entity = new SimpleEntity();
// Call the new ManagedEntity methods
assertTyping( ManagedEntity.class, entity );
ManagedEntity managedEntity = (ManagedEntity) entity;
assertSame( entity, managedEntity.$$_hibernate_getEntityInstance() );
assertNull( managedEntity.$$_hibernate_getEntityEntry() );
managedEntity.$$_hibernate_setEntityEntry( EnhancerTestUtils.makeEntityEntry() );
assertNotNull( managedEntity.$$_hibernate_getEntityEntry() );
managedEntity.$$_hibernate_setEntityEntry( null );
assertNull( managedEntity.$$_hibernate_getEntityEntry() );
managedEntity.$$_hibernate_setNextManagedEntity( managedEntity );
managedEntity.$$_hibernate_setPreviousManagedEntity( managedEntity );
assertSame( managedEntity, managedEntity.$$_hibernate_getNextManagedEntity() );
assertSame( managedEntity, managedEntity.$$_hibernate_getPreviousManagedEntity() );
}
@Test
@Jira("HHH-13439")
public void enhancementInfoTest() {
EnhancementInfo info = SimpleEntity.class.getAnnotation( EnhancementInfo.class );
assertNotNull( info, "EnhancementInfo was not applied" );
assertEquals( Version.getVersionString(), info.version() );
}
@Test
public void basicInterceptableTest() {
SimpleEntity entity = new SimpleEntity();
assertTyping( PersistentAttributeInterceptable.class, entity );
PersistentAttributeInterceptable interceptableEntity = (PersistentAttributeInterceptable) entity;
assertNull( interceptableEntity.$$_hibernate_getInterceptor() );
interceptableEntity.$$_hibernate_setInterceptor( new ObjectAttributeMarkerInterceptor() );
assertNotNull( interceptableEntity.$$_hibernate_getInterceptor() );
assertNull( EnhancerTestUtils.getFieldByReflection( entity, "anUnspecifiedObject" ) );
entity.setAnObject( new Object() );
assertSame( ObjectAttributeMarkerInterceptor.WRITE_MARKER, EnhancerTestUtils.getFieldByReflection( entity, "anUnspecifiedObject" ) );
assertSame( ObjectAttributeMarkerInterceptor.READ_MARKER, entity.getAnObject() );
entity.setAnObject( null );
assertSame( ObjectAttributeMarkerInterceptor.WRITE_MARKER, EnhancerTestUtils.getFieldByReflection( entity, "anUnspecifiedObject" ) );
}
@Test
public void basicExtendedEnhancementTest() {
// test uses ObjectAttributeMarkerInterceptor to ensure that field access is routed through enhanced methods
SimpleEntity entity = new SimpleEntity();
( (PersistentAttributeInterceptable) entity ).$$_hibernate_setInterceptor( new ObjectAttributeMarkerInterceptor() );
Object decoy = new Object();
entity.anUnspecifiedObject = decoy;
Object gotByReflection = EnhancerTestUtils.getFieldByReflection( entity, "anUnspecifiedObject" );
assertNotSame( decoy, gotByReflection );
assertSame( ObjectAttributeMarkerInterceptor.WRITE_MARKER, gotByReflection );
Object entityObject = entity.anUnspecifiedObject;
assertNotSame( decoy, entityObject );
assertSame( ObjectAttributeMarkerInterceptor.READ_MARKER, entityObject );
// do some more calls on the various types, without the interceptor
( (PersistentAttributeInterceptable) entity ).$$_hibernate_setInterceptor( null );
entity.id = 1234567890L;
assertEquals( 1234567890L, (long) entity.getId() );
entity.name = "Entity Name";
assertSame( "Entity Name", entity.name );
entity.active = true;
assertTrue( entity.getActive() );
entity.someStrings = Arrays.asList( "A", "B", "C", "D" );
assertArrayEquals( new String[]{"A", "B", "C", "D"}, entity.someStrings.toArray() );
}
// --- //
@Entity
private static | BasicEnhancementTest |
java | netty__netty | transport/src/main/java/io/netty/channel/group/ChannelMatchers.java | {
"start": 4540,
"end": 4859
} | class ____ implements ChannelMatcher {
private final Channel channel;
InstanceMatcher(Channel channel) {
this.channel = channel;
}
@Override
public boolean matches(Channel ch) {
return channel == ch;
}
}
private static final | InstanceMatcher |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/concurrent/MemoizerComputableTest.java | {
"start": 1218,
"end": 4468
} | class ____ extends AbstractLangTest {
private Computable<Integer, Integer> computable;
@BeforeEach
public void setUpComputableMock() {
computable = EasyMock.mock(Computable.class);
}
@Test
void testDefaultBehaviourNotToRecalculateExecutionExceptions() throws Exception {
final Integer input = 1;
final Memoizer<Integer, Integer> memoizer = new Memoizer<>(computable);
final InterruptedException interruptedException = new InterruptedException();
expect(computable.compute(input)).andThrow(interruptedException);
replay(computable);
assertThrows(Throwable.class, () -> memoizer.compute(input));
assertThrows(IllegalStateException.class, () -> memoizer.compute(input));
}
@Test
void testDoesNotRecalculateWhenSetToFalse() throws Exception {
final Integer input = 1;
final Memoizer<Integer, Integer> memoizer = new Memoizer<>(computable, false);
final InterruptedException interruptedException = new InterruptedException();
expect(computable.compute(input)).andThrow(interruptedException);
replay(computable);
assertThrows(Throwable.class, () -> memoizer.compute(input));
assertThrows(IllegalStateException.class, () -> memoizer.compute(input));
}
@Test
void testDoesRecalculateWhenSetToTrue() throws Exception {
final Integer input = 1;
final Integer answer = 3;
final Memoizer<Integer, Integer> memoizer = new Memoizer<>(computable, true);
final InterruptedException interruptedException = new InterruptedException();
expect(computable.compute(input)).andThrow(interruptedException).andReturn(answer);
replay(computable);
assertThrows(Throwable.class, () -> memoizer.compute(input));
assertEquals(answer, memoizer.compute(input));
}
@Test
void testOnlyCallComputableOnceIfDoesNotThrowException() throws Exception {
final Integer input = 1;
final Memoizer<Integer, Integer> memoizer = new Memoizer<>(computable);
expect(computable.compute(input)).andReturn(input);
replay(computable);
assertEquals(input, memoizer.compute(input), "Should call computable first time");
assertEquals(input, memoizer.compute(input), "Should not call the computable the second time");
}
@Test
void testWhenComputableThrowsError() throws Exception {
final Integer input = 1;
final Memoizer<Integer, Integer> memoizer = new Memoizer<>(computable);
final Error error = new Error();
expect(computable.compute(input)).andThrow(error);
replay(computable);
assertThrows(Error.class, () -> memoizer.compute(input));
}
@Test
void testWhenComputableThrowsRuntimeException() throws Exception {
final Integer input = 1;
final Memoizer<Integer, Integer> memoizer = new Memoizer<>(computable);
final RuntimeException runtimeException = new RuntimeException("Some runtime exception");
expect(computable.compute(input)).andThrow(runtimeException);
replay(computable);
assertThrows(RuntimeException.class, () -> memoizer.compute(input));
}
}
| MemoizerComputableTest |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/TestContextAnnotationUtilsTests.java | {
"start": 24167,
"end": 24263
} | interface ____ {
}
@Meta2
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@ | Meta2 |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/dependent/BeanC.java | {
"start": 138,
"end": 336
} | class ____ {
public boolean destroyed = false;
@PreDestroy
void destroy() {
TestData.DESTRUCTION_ORDER.add(BeanC.class.getSimpleName());
this.destroyed = true;
}
}
| BeanC |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/ListSerializerSnapshot.java | {
"start": 1154,
"end": 2391
} | class ____<T>
extends CompositeTypeSerializerSnapshot<List<T>, ListSerializer<T>> {
private static final int CURRENT_VERSION = 1;
/** Constructor for read instantiation. */
public ListSerializerSnapshot() {}
/** Constructor to create the snapshot for writing. */
public ListSerializerSnapshot(ListSerializer<T> listSerializer) {
super(listSerializer);
}
@Override
public int getCurrentOuterSnapshotVersion() {
return CURRENT_VERSION;
}
@Override
protected ListSerializer<T> createOuterSerializerWithNestedSerializers(
TypeSerializer<?>[] nestedSerializers) {
@SuppressWarnings("unchecked")
TypeSerializer<T> elementSerializer = (TypeSerializer<T>) nestedSerializers[0];
return new ListSerializer<>(elementSerializer);
}
@Override
protected TypeSerializer<?>[] getNestedSerializers(ListSerializer<T> outerSerializer) {
return new TypeSerializer<?>[] {outerSerializer.getElementSerializer()};
}
@SuppressWarnings("unchecked")
public TypeSerializerSnapshot<T> getElementSerializerSnapshot() {
return (TypeSerializerSnapshot<T>) getNestedSerializerSnapshots()[0];
}
}
| ListSerializerSnapshot |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1100/Issue1151.java | {
"start": 1013,
"end": 1163
} | class ____ implements B {
public int id;
public C() {
}
public C(int id) {
this.id = id;
}
}
}
| C |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/StreamsMetadata.java | {
"start": 1415,
"end": 3999
} | class ____ extends AbstractNamedDiffable<Metadata.ProjectCustom> implements Metadata.ProjectCustom {
public static final String TYPE = "streams";
public static final StreamsMetadata EMPTY = new StreamsMetadata(false);
private static final ParseField LOGS_ENABLED = new ParseField("logs_enabled");
private static final ConstructingObjectParser<StreamsMetadata, Void> PARSER = new ConstructingObjectParser<>(TYPE, false, args -> {
boolean logsEnabled = (boolean) args[0];
return new StreamsMetadata(logsEnabled);
});
static {
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), LOGS_ENABLED);
}
private static final TransportVersion STREAMS_LOGS_SUPPORT = TransportVersion.fromName("streams_logs_support");
public boolean logsEnabled;
public StreamsMetadata(StreamInput in) throws IOException {
logsEnabled = in.readBoolean();
}
public StreamsMetadata(boolean logsEnabled) {
this.logsEnabled = logsEnabled;
}
public boolean isLogsEnabled() {
return logsEnabled;
}
@Override
public EnumSet<Metadata.XContentContext> context() {
return Metadata.ALL_CONTEXTS;
}
@Override
public String getWriteableName() {
return TYPE;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return STREAMS_LOGS_SUPPORT;
}
@Override
public boolean supportsVersion(TransportVersion version) {
return version.supports(STREAMS_LOGS_SUPPORT);
}
public static NamedDiff<Metadata.ProjectCustom> readDiffFrom(StreamInput in) throws IOException {
return readDiffFrom(Metadata.ProjectCustom.class, TYPE, in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(logsEnabled);
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return Iterators.concat(
ChunkedToXContentHelper.chunk((builder, bParams) -> builder.field(LOGS_ENABLED.getPreferredName(), logsEnabled))
);
}
@Override
public boolean equals(Object o) {
if ((o instanceof StreamsMetadata that)) {
return logsEnabled == that.logsEnabled;
} else {
return false;
}
}
@Override
public int hashCode() {
return Objects.hashCode(logsEnabled);
}
public static StreamsMetadata fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
}
| StreamsMetadata |
java | apache__flink | flink-core/src/main/java/org/apache/flink/util/WrappingProxyUtil.java | {
"start": 1090,
"end": 2502
} | class ____ {
@VisibleForTesting static final int SAFETY_NET_MAX_ITERATIONS = 128;
private WrappingProxyUtil() {
throw new AssertionError();
}
/**
* Expects a proxy, and returns the unproxied delegate.
*
* @param wrappingProxy The initial proxy.
* @param <T> The type of the delegate. Note that all proxies in the chain must be assignable to
* T.
* @return The unproxied delegate.
*/
@SuppressWarnings("unchecked")
public static <T> T stripProxy(@Nullable final WrappingProxy<T> wrappingProxy) {
if (wrappingProxy == null) {
return null;
}
T delegate = wrappingProxy.getWrappedDelegate();
int numProxiesStripped = 0;
while (delegate instanceof WrappingProxy) {
throwIfSafetyNetExceeded(++numProxiesStripped);
delegate = ((WrappingProxy<T>) delegate).getWrappedDelegate();
}
return delegate;
}
private static void throwIfSafetyNetExceeded(final int numProxiesStripped) {
if (numProxiesStripped >= SAFETY_NET_MAX_ITERATIONS) {
throw new IllegalArgumentException(
format(
"Already stripped %d proxies. "
+ "Are there loops in the object graph?",
SAFETY_NET_MAX_ITERATIONS));
}
}
}
| WrappingProxyUtil |
java | redisson__redisson | redisson/src/test/java/org/redisson/SimpleDnsServer.java | {
"start": 379,
"end": 1731
} | class ____ {
private final EventLoopGroup group = new NioEventLoopGroup();
private final Channel channel;
private String ip = "127.0.0.1";
private final int port;
public SimpleDnsServer() throws InterruptedException {
this(ThreadLocalRandom.current().nextInt(49152, 65535));
}
public SimpleDnsServer(int port) throws InterruptedException {
Bootstrap bootstrap = new Bootstrap();
bootstrap.group(group)
.channel(NioDatagramChannel.class)
.handler(new ChannelInitializer<>() {
@Override
protected void initChannel(Channel ch) throws Exception {
ch.pipeline().addLast(new DatagramDnsQueryDecoder());
ch.pipeline().addLast(new DatagramDnsResponseEncoder());
ch.pipeline().addLast(new DnsMessageHandler());
}
});
this.port = port;
ChannelFuture future = bootstrap.bind(port).sync();
channel = future.channel();
}
public InetSocketAddress getAddr() {
return new InetSocketAddress(ip, port);
}
public void stop() {
channel.close();
group.shutdownGracefully();
}
public void updateIP(String ip) {
this.ip = ip;
}
private | SimpleDnsServer |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java | {
"start": 32155,
"end": 33634
} | class ____ "
+ YarnClient.class.getName() + " is expected to implement this !");
}
/**
* <p>
* Get the resource profiles available in the RM.
* </p>
* @return a Map of the resource profile names to their capabilities
* @throws YARNFeatureNotEnabledException if resource-profile is disabled
* @throws YarnException if any error happens inside YARN
* @throws IOException in case of other errors
*/
@Public
@Unstable
public abstract Map<String, Resource> getResourceProfiles()
throws YarnException, IOException;
/**
* <p>
* Get the details of a specific resource profile from the RM.
* </p>
* @param profile the profile name
* @return resource profile name with its capabilities
* @throws YARNFeatureNotEnabledException if resource-profile is disabled
* @throws YarnException if any error happens inside YARN
* @throws IOException in case of other others
*/
@Public
@Unstable
public abstract Resource getResourceProfile(String profile)
throws YarnException, IOException;
/**
* <p>
* Get available resource types supported by RM.
* </p>
* @return list of supported resource types with detailed information
* @throws YarnException if any issue happens inside YARN
* @throws IOException in case of other others
*/
@Public
@Unstable
public abstract List<ResourceTypeInfo> getResourceTypeInfo()
throws YarnException, IOException;
/**
* <p>
* The | extending |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-dubbo/src/test/java/org/apache/dubbo/rpc/protocol/dubbo/decode/LocalEmbeddedChannel.java | {
"start": 1028,
"end": 1311
} | class ____ extends EmbeddedChannel {
public SocketAddress localAddress() {
return new InetSocketAddress(20883);
}
@Override
protected SocketAddress remoteAddress0() {
return new InetSocketAddress(NetUtils.getAvailablePort());
}
}
| LocalEmbeddedChannel |
java | spring-projects__spring-framework | spring-r2dbc/src/main/java/org/springframework/r2dbc/connection/init/ScriptUtils.java | {
"start": 2126,
"end": 23499
} | class ____ {
/**
* Default statement separator within SQL scripts: {@code ";"}.
*/
public static final String DEFAULT_STATEMENT_SEPARATOR = ";";
/**
* Fallback statement separator within SQL scripts: {@code "\n"}.
* <p>Used if neither a custom separator nor the
* {@link #DEFAULT_STATEMENT_SEPARATOR} is present in a given script.
*/
public static final String FALLBACK_STATEMENT_SEPARATOR = "\n";
/**
* End of file (EOF) SQL statement separator: {@code "^^^ END OF SCRIPT ^^^"}.
* <p>This value may be supplied as the {@code separator} to {@link
* #executeSqlScript(Connection, EncodedResource, DataBufferFactory, boolean, boolean, String[], String, String, String)}
* to denote that an SQL script contains a single statement (potentially
* spanning multiple lines) with no explicit statement separator. Note that
* such a script should not actually contain this value; it is merely a
* <em>virtual</em> statement separator.
*/
public static final String EOF_STATEMENT_SEPARATOR = "^^^ END OF SCRIPT ^^^";
/**
* Default prefixes for single-line comments within SQL scripts: {@code ["--"]}.
*/
public static final String[] DEFAULT_COMMENT_PREFIXES = {"--"};
/**
* Default start delimiter for block comments within SQL scripts: {@code "/*"}.
*/
public static final String DEFAULT_BLOCK_COMMENT_START_DELIMITER = "/*";
/**
* Default end delimiter for block comments within SQL scripts: <code>"*/"</code>.
*/
public static final String DEFAULT_BLOCK_COMMENT_END_DELIMITER = "*/";
private static final Log logger = LogFactory.getLog(ScriptUtils.class);
/**
* Execute the given SQL script using default settings for statement
* separators, comment delimiters, and exception handling flags.
* <p>Statement separators and comments will be removed before executing
* individual statements within the supplied script.
* <p><strong>Warning</strong>: this method does <em>not</em> release the
* provided {@link Connection}.
* @param connection the R2DBC connection to use to execute the script; already
* configured and ready to use
* @param resource the resource to load the SQL script from; encoded with the
* current platform's default encoding
* @throws ScriptException if an error occurred while executing the SQL script
* @see #executeSqlScript(Connection, EncodedResource, DataBufferFactory, boolean, boolean, String[], String, String, String)
* @see #DEFAULT_STATEMENT_SEPARATOR
* @see #DEFAULT_COMMENT_PREFIXES
* @see #DEFAULT_BLOCK_COMMENT_START_DELIMITER
* @see #DEFAULT_BLOCK_COMMENT_END_DELIMITER
* @see org.springframework.r2dbc.connection.ConnectionFactoryUtils#getConnection
* @see org.springframework.r2dbc.connection.ConnectionFactoryUtils#releaseConnection
*/
public static Mono<Void> executeSqlScript(Connection connection, Resource resource) {
return executeSqlScript(connection, new EncodedResource(resource));
}
/**
* Execute the given SQL script using default settings for statement
* separators, comment delimiters, and exception handling flags.
* <p>Statement separators and comments will be removed before executing
* individual statements within the supplied script.
* <p><strong>Warning</strong>: this method does <em>not</em> release the
* provided {@link Connection}.
* @param connection the R2DBC connection to use to execute the script; already
* configured and ready to use
* @param resource the resource (potentially associated with a specific encoding)
* to load the SQL script from
* @throws ScriptException if an error occurred while executing the SQL script
* @see #executeSqlScript(Connection, EncodedResource, DataBufferFactory, boolean, boolean, String[], String, String, String)
* @see #DEFAULT_STATEMENT_SEPARATOR
* @see #DEFAULT_COMMENT_PREFIXES
* @see #DEFAULT_BLOCK_COMMENT_START_DELIMITER
* @see #DEFAULT_BLOCK_COMMENT_END_DELIMITER
* @see org.springframework.r2dbc.connection.ConnectionFactoryUtils#getConnection
* @see org.springframework.r2dbc.connection.ConnectionFactoryUtils#releaseConnection
*/
public static Mono<Void> executeSqlScript(Connection connection, EncodedResource resource) {
return executeSqlScript(connection, resource, DefaultDataBufferFactory.sharedInstance, false, false,
DEFAULT_COMMENT_PREFIXES, DEFAULT_STATEMENT_SEPARATOR, DEFAULT_BLOCK_COMMENT_START_DELIMITER,
DEFAULT_BLOCK_COMMENT_END_DELIMITER);
}
/**
* Execute the given SQL script.
* <p>Statement separators and comments will be removed before executing
* individual statements within the supplied script.
* <p><strong>Warning</strong>: this method does <em>not</em> release the
* provided {@link Connection}.
* @param connection the R2DBC connection to use to execute the script; already
* configured and ready to use
* @param resource the resource (potentially associated with a specific encoding)
* to load the SQL script from
* @param dataBufferFactory the factory to create data buffers with
* @param continueOnError whether to continue without throwing an exception
* in the event of an error
* @param ignoreFailedDrops whether to continue in the event of specifically
* an error on a {@code DROP} statement
* @param commentPrefix the prefix that identifies single-line comments in the
* SQL script (typically "--")
* @param separator the script statement separator; defaults to
* {@value #DEFAULT_STATEMENT_SEPARATOR} if not specified and falls back to
* {@value #FALLBACK_STATEMENT_SEPARATOR} as a last resort; may be set to
* {@value #EOF_STATEMENT_SEPARATOR} to signal that the script contains a
* single statement without a separator
* @param blockCommentStartDelimiter the <em>start</em> block comment delimiter
* @param blockCommentEndDelimiter the <em>end</em> block comment delimiter
* @throws ScriptException if an error occurred while executing the SQL script
* @see #DEFAULT_STATEMENT_SEPARATOR
* @see #FALLBACK_STATEMENT_SEPARATOR
* @see #EOF_STATEMENT_SEPARATOR
* @see org.springframework.r2dbc.connection.ConnectionFactoryUtils#getConnection
* @see org.springframework.r2dbc.connection.ConnectionFactoryUtils#releaseConnection
*/
public static Mono<Void> executeSqlScript(Connection connection, EncodedResource resource,
DataBufferFactory dataBufferFactory, boolean continueOnError, boolean ignoreFailedDrops,
String commentPrefix, @Nullable String separator, String blockCommentStartDelimiter,
String blockCommentEndDelimiter) {
return executeSqlScript(connection, resource, dataBufferFactory, continueOnError,
ignoreFailedDrops, new String[] { commentPrefix }, separator,
blockCommentStartDelimiter, blockCommentEndDelimiter);
}
/**
* Execute the given SQL script.
* <p>Statement separators and comments will be removed before executing
* individual statements within the supplied script.
* <p><strong>Warning</strong>: this method does <em>not</em> release the
* provided {@link Connection}.
* @param connection the R2DBC connection to use to execute the script; already
* configured and ready to use
* @param resource the resource (potentially associated with a specific encoding)
* to load the SQL script from
* @param dataBufferFactory the factory to create data buffers with
* @param continueOnError whether to continue without throwing an exception
* in the event of an error
* @param ignoreFailedDrops whether to continue in the event of specifically
* an error on a {@code DROP} statement
* @param commentPrefixes the prefixes that identify single-line comments in the
* SQL script (typically "--")
* @param separator the script statement separator; defaults to
* {@value #DEFAULT_STATEMENT_SEPARATOR} if not specified and falls back to
* {@value #FALLBACK_STATEMENT_SEPARATOR} as a last resort; may be set to
* {@value #EOF_STATEMENT_SEPARATOR} to signal that the script contains a
* single statement without a separator
* @param blockCommentStartDelimiter the <em>start</em> block comment delimiter
* @param blockCommentEndDelimiter the <em>end</em> block comment delimiter
* @throws ScriptException if an error occurred while executing the SQL script
* @see #DEFAULT_STATEMENT_SEPARATOR
* @see #FALLBACK_STATEMENT_SEPARATOR
* @see #EOF_STATEMENT_SEPARATOR
* @see org.springframework.r2dbc.connection.ConnectionFactoryUtils#getConnection
* @see org.springframework.r2dbc.connection.ConnectionFactoryUtils#releaseConnection
*/
public static Mono<Void> executeSqlScript(Connection connection, EncodedResource resource,
DataBufferFactory dataBufferFactory, boolean continueOnError, boolean ignoreFailedDrops,
String[] commentPrefixes, @Nullable String separator, String blockCommentStartDelimiter,
String blockCommentEndDelimiter) {
if (logger.isDebugEnabled()) {
logger.debug("Executing SQL script from " + resource);
}
long startTime = System.currentTimeMillis();
Mono<String> inputScript = readScript(resource, dataBufferFactory, separator)
.onErrorMap(IOException.class, ex -> new CannotReadScriptException(resource, ex));
AtomicInteger statementNumber = new AtomicInteger();
Flux<Void> executeScript = inputScript.flatMapIterable(script -> {
String separatorToUse = separator;
if (separatorToUse == null) {
separatorToUse = DEFAULT_STATEMENT_SEPARATOR;
}
if (!EOF_STATEMENT_SEPARATOR.equals(separatorToUse) &&
!containsStatementSeparator(resource, script, separatorToUse, commentPrefixes,
blockCommentStartDelimiter, blockCommentEndDelimiter)) {
separatorToUse = FALLBACK_STATEMENT_SEPARATOR;
}
return splitSqlScript(resource, script, separatorToUse, commentPrefixes,
blockCommentStartDelimiter, blockCommentEndDelimiter);
}).concatMap(statement -> {
statementNumber.incrementAndGet();
return runStatement(statement, connection, resource, continueOnError, ignoreFailedDrops, statementNumber);
});
if (logger.isDebugEnabled()) {
executeScript = executeScript.doOnComplete(() -> {
long elapsedTime = System.currentTimeMillis() - startTime;
logger.debug("Executed SQL script from " + resource + " in " + elapsedTime + " ms.");
});
}
return executeScript.onErrorMap(ex -> !(ex instanceof ScriptException),
ex -> new UncategorizedScriptException(
"Failed to execute database script from resource [" + resource + "]", ex))
.then();
}
/**
* Read a script from the provided resource, using the supplied statement
* separator, and build a {@code String} containing the lines.
* @param resource the {@code EncodedResource} containing the script to be
* processed
* @param dataBufferFactory the factory to create data buffers with
* @param separator the statement separator in the SQL script (typically ";")
* @return a {@link Mono} of {@link String} containing the script lines that
* completes once the resource has been loaded
*/
static Mono<String> readScript(EncodedResource resource, DataBufferFactory dataBufferFactory,
@Nullable String separator) {
return DataBufferUtils.join(DataBufferUtils.read(resource.getResource(), dataBufferFactory, 8192))
.handle((it, sink) -> {
try (InputStream is = it.asInputStream()) {
InputStreamReader in = (resource.getCharset() != null ?
new InputStreamReader(is, resource.getCharset()) : new InputStreamReader(is));
LineNumberReader lnr = new LineNumberReader(in);
String script = readScript(lnr, separator);
sink.next(script);
sink.complete();
}
catch (Exception ex) {
sink.error(ex);
}
finally {
DataBufferUtils.release(it);
}
});
}
/**
* Read a script from the provided {@code LineNumberReader} and build a
* {@code String} containing the lines.
* @param lineNumberReader the {@code LineNumberReader} containing the script
* to be processed
* @param separator the statement separator in the SQL script (typically ";")
* @return a {@code String} containing the script lines
* @throws IOException in case of I/O errors
*/
private static String readScript(LineNumberReader lineNumberReader, @Nullable String separator) throws IOException {
StringBuilder scriptBuilder = new StringBuilder();
String currentLine = lineNumberReader.readLine();
while (currentLine != null) {
if (scriptBuilder.length() > 0) {
scriptBuilder.append('\n');
}
scriptBuilder.append(currentLine);
currentLine = lineNumberReader.readLine();
}
appendSeparatorToScriptIfNecessary(scriptBuilder, separator);
return scriptBuilder.toString();
}
private static void appendSeparatorToScriptIfNecessary(StringBuilder scriptBuilder, @Nullable String separator) {
if (separator == null) {
return;
}
String trimmed = separator.trim();
if (trimmed.length() == separator.length()) {
return;
}
// separator ends in whitespace, so we might want to see if the script is trying
// to end the same way
if (scriptBuilder.lastIndexOf(trimmed) == scriptBuilder.length() - trimmed.length()) {
scriptBuilder.append(separator.substring(trimmed.length()));
}
}
/**
* Determine if the provided SQL script contains the specified statement separator.
* <p>This method is intended to be used to find the string separating each
* SQL statement — for example, a ';' character.
* <p>Any occurrence of the separator within the script will be ignored if it
* is within a <em>literal</em> block of text enclosed in single quotes
* ({@code '}) or double quotes ({@code "}), if it is escaped with a backslash
* ({@code \}), or if it is within a single-line comment or block comment.
* @param resource the resource from which the script was read, or {@code null}
* if unknown
* @param script the SQL script to search within
* @param separator the statement separator to search for
* @param commentPrefixes the prefixes that identify single-line comments
* (typically {@code "--"})
* @param blockCommentStartDelimiter the <em>start</em> block comment delimiter
* (typically {@code "/*"})
* @param blockCommentEndDelimiter the <em>end</em> block comment delimiter
* (typically <code>"*/"</code>)
* @since 5.3.8
*/
static boolean containsStatementSeparator(EncodedResource resource, String script,
String separator, String[] commentPrefixes, String blockCommentStartDelimiter,
String blockCommentEndDelimiter) {
boolean inSingleQuote = false;
boolean inDoubleQuote = false;
boolean inEscape = false;
for (int i = 0; i < script.length(); i++) {
char c = script.charAt(i);
if (inEscape) {
inEscape = false;
continue;
}
// MySQL style escapes
if (c == '\\') {
inEscape = true;
continue;
}
if (!inDoubleQuote && (c == '\'')) {
inSingleQuote = !inSingleQuote;
}
else if (!inSingleQuote && (c == '"')) {
inDoubleQuote = !inDoubleQuote;
}
if (!inSingleQuote && !inDoubleQuote) {
if (script.startsWith(separator, i)) {
return true;
}
else if (startsWithAny(script, commentPrefixes, i)) {
// Skip over any content from the start of the comment to the EOL
int indexOfNextNewline = script.indexOf('\n', i);
if (indexOfNextNewline > i) {
i = indexOfNextNewline;
continue;
}
else {
// If there's no EOL, we must be at the end of the script, so stop here.
break;
}
}
else if (script.startsWith(blockCommentStartDelimiter, i)) {
// Skip over any block comments
int indexOfCommentEnd = script.indexOf(blockCommentEndDelimiter, i);
if (indexOfCommentEnd > i) {
i = indexOfCommentEnd + blockCommentEndDelimiter.length() - 1;
continue;
}
else {
throw new ScriptParseException(
"Missing block comment end delimiter: " + blockCommentEndDelimiter, resource);
}
}
}
}
return false;
}
/**
* Split an SQL script into separate statements delimited by the provided
* separator string and return a {@code List} containing each individual
* statement.
* <p>Within the script, the provided {@code commentPrefixes} will be honored:
* any text beginning with one of the comment prefixes and extending to the
* end of the line will be omitted from the output. Similarly, the provided
* {@code blockCommentStartDelimiter} and {@code blockCommentEndDelimiter}
* delimiters will be honored: any text enclosed in a block comment will be
* omitted from the output. In addition, multiple adjacent whitespace characters
* will be collapsed into a single space.
* @param resource the resource from which the script was read
* @param script the SQL script
* @param separator text separating each statement
* (typically a ';' or newline character)
* @param commentPrefixes the prefixes that identify SQL line comments
* (typically "--")
* @param blockCommentStartDelimiter the <em>start</em> block comment delimiter;
* never {@code null} or empty
* @param blockCommentEndDelimiter the <em>end</em> block comment delimiter;
* never {@code null} or empty
* @return a list of statements
* @throws ScriptException if an error occurred while splitting the SQL script
*/
static List<String> splitSqlScript(EncodedResource resource, String script,
String separator, String[] commentPrefixes, String blockCommentStartDelimiter,
String blockCommentEndDelimiter) {
Assert.hasText(script, "'script' must not be null or empty");
Assert.notNull(separator, "'separator' must not be null");
Assert.notEmpty(commentPrefixes, "'commentPrefixes' must not be null or empty");
for (String commentPrefix : commentPrefixes) {
Assert.hasText(commentPrefix, "'commentPrefixes' must not contain null or empty elements");
}
Assert.hasText(blockCommentStartDelimiter, "'blockCommentStartDelimiter' must not be null or empty");
Assert.hasText(blockCommentEndDelimiter, "'blockCommentEndDelimiter' must not be null or empty");
List<String> statements = new ArrayList<>();
StringBuilder sb = new StringBuilder();
boolean inSingleQuote = false;
boolean inDoubleQuote = false;
boolean inEscape = false;
for (int i = 0; i < script.length(); i++) {
char c = script.charAt(i);
if (inEscape) {
inEscape = false;
sb.append(c);
continue;
}
// MySQL style escapes
if (c == '\\') {
inEscape = true;
sb.append(c);
continue;
}
if (!inDoubleQuote && (c == '\'')) {
inSingleQuote = !inSingleQuote;
}
else if (!inSingleQuote && (c == '"')) {
inDoubleQuote = !inDoubleQuote;
}
if (!inSingleQuote && !inDoubleQuote) {
if (script.startsWith(separator, i)) {
// We've reached the end of the current statement
if (sb.length() > 0) {
statements.add(sb.toString());
sb = new StringBuilder();
}
i += separator.length() - 1;
continue;
}
else if (startsWithAny(script, commentPrefixes, i)) {
// Skip over any content from the start of the comment to the EOL
int indexOfNextNewline = script.indexOf('\n', i);
if (indexOfNextNewline > i) {
i = indexOfNextNewline;
continue;
}
else {
// If there's no EOL, we must be at the end of the script, so stop here.
break;
}
}
else if (script.startsWith(blockCommentStartDelimiter, i)) {
// Skip over any block comments
int indexOfCommentEnd = script.indexOf(blockCommentEndDelimiter, i);
if (indexOfCommentEnd > i) {
i = indexOfCommentEnd + blockCommentEndDelimiter.length() - 1;
continue;
}
else {
throw new ScriptParseException(
"Missing block comment end delimiter: " + blockCommentEndDelimiter, resource);
}
}
else if (c == ' ' || c == '\r' || c == '\n' || c == '\t') {
// Avoid multiple adjacent whitespace characters
if (sb.length() > 0 && sb.charAt(sb.length() - 1) != ' ') {
c = ' ';
}
else {
continue;
}
}
}
sb.append(c);
}
if (StringUtils.hasText(sb)) {
statements.add(sb.toString());
}
return statements;
}
private static boolean startsWithAny(String script, String[] prefixes, int offset) {
for (String prefix : prefixes) {
if (script.startsWith(prefix, offset)) {
return true;
}
}
return false;
}
private static Publisher<? extends Void> runStatement(String statement, Connection connection,
EncodedResource resource, boolean continueOnError, boolean ignoreFailedDrops, AtomicInteger statementNumber) {
Mono<Long> execution = Flux.from(connection.createStatement(statement).execute())
.flatMap(Result::getRowsUpdated)
.collect(Collectors.summingLong(count -> count));
if (logger.isDebugEnabled()) {
execution = execution.doOnNext(rowsAffected ->
logger.debug(rowsAffected + " returned as update count for SQL: " + statement));
}
return execution.onErrorResume(ex -> {
boolean dropStatement = StringUtils.startsWithIgnoreCase(statement.trim(), "drop");
if (continueOnError || (dropStatement && ignoreFailedDrops)) {
if (logger.isDebugEnabled()) {
logger.debug(ScriptStatementFailedException.buildErrorMessage(
statement, statementNumber.get(), resource), ex);
}
}
else {
return Mono.error(new ScriptStatementFailedException(statement, statementNumber.get(), resource, ex));
}
return Mono.empty();
}).then();
}
}
| ScriptUtils |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JUnit4TestNotRunTest.java | {
"start": 12272,
"end": 12730
} | class ____ {
@Test
public void testDoSomething() {}
}
""")
.setFixChooser(FixChoosers.FIRST)
.doTest();
}
@Test
public void ignoreFix() {
refactoringHelper
.addInputLines(
"in/TestStuff.java",
"""
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public | TestStuff |
java | apache__camel | components/camel-hazelcast/src/main/java/org/apache/camel/processor/idempotent/hazelcast/HazelcastIdempotentRepository.java | {
"start": 1534,
"end": 4101
} | class ____ extends ServiceSupport implements IdempotentRepository {
protected boolean useLocalHzInstance;
@Metadata(description = "Name of cache to use", defaultValue = "HazelcastIdempotentRepository")
private String repositoryName;
private IMap<String, Boolean> repo;
@Metadata(description = "To use an existing Hazelcast instance instead of local")
private HazelcastInstance hazelcastInstance;
public HazelcastIdempotentRepository() {
this(null);
}
public HazelcastIdempotentRepository(HazelcastInstance hazelcastInstance) {
this(hazelcastInstance, HazelcastIdempotentRepository.class.getSimpleName());
}
public HazelcastIdempotentRepository(HazelcastInstance hazelcastInstance, String repositoryName) {
this.hazelcastInstance = hazelcastInstance;
this.repositoryName = repositoryName;
}
@Override
protected void doStart() throws Exception {
if (hazelcastInstance == null) {
Config cfg = new XmlConfigBuilder().build();
cfg.setProperty("hazelcast.version.check.enabled", "false");
hazelcastInstance = Hazelcast.newHazelcastInstance(cfg);
useLocalHzInstance = true;
} else {
ObjectHelper.notNull(hazelcastInstance, "hazelcastInstance");
}
repo = hazelcastInstance.getMap(repositoryName);
}
@Override
protected void doStop() throws Exception {
if (useLocalHzInstance) {
hazelcastInstance.getLifecycleService().shutdown();
}
}
@Override
public boolean add(String key) {
repo.lock(key);
try {
return repo.putIfAbsent(key, false) == null;
} finally {
repo.unlock(key);
}
}
@Override
public boolean confirm(String key) {
repo.lock(key);
try {
return repo.replace(key, false, true);
} finally {
repo.unlock(key);
}
}
@Override
public boolean contains(String key) {
repo.lock(key);
try {
return this.repo.containsKey(key);
} finally {
repo.unlock(key);
}
}
@Override
public boolean remove(String key) {
repo.lock(key);
try {
return repo.remove(key) != null;
} finally {
repo.unlock(key);
}
}
@Override
public void clear() {
repo.clear();
}
public String getRepositoryName() {
return repositoryName;
}
}
| HazelcastIdempotentRepository |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/filter/AbstractPatternFilter.java | {
"start": 1289,
"end": 1368
} | class ____ pattern based filters
*/
@InterfaceAudience.Private
public abstract | for |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/context/env/PropertyExpressionResolver.java | {
"start": 990,
"end": 1688
} | interface ____ {
/**
* Resolve the value for the expression of the specified type.
*
* @param propertyResolver The property resolver
* @param conversionService The conversion service
* @param expression The expression
* @param requiredType The required typ
* @param <T> The type
* @return The optional resolved value
*/
@NonNull
<T> Optional<T> resolve(@NonNull PropertyResolver propertyResolver,
@NonNull ConversionService conversionService,
@NonNull String expression,
@NonNull Class<T> requiredType);
}
| PropertyExpressionResolver |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/execution/DynamicTestIntegrationTests.java | {
"start": 667,
"end": 1415
} | class ____ {
private static final int TEN_MB = 10 * 1024 * 1024;
/**
* Without the fix in {@code DynamicTestTestDescriptor}, setting the
* {@code -mx200m} VM argument will cause an {@link OutOfMemoryError} before
* the 200 limit is reached.
*
* @see <a href="https://github.com/junit-team/junit-framework/issues/1865">Issue 1865</a>
*/
@TestFactory
Stream<DynamicTest> generateDynamicTestsThatReferenceLargeAmountsOfMemory() {
return Stream.generate(() -> new byte[TEN_MB])//
// The lambda Executable in the following line *must* reference
// the `bytes` array in order to hold onto the allocated memory.
.map(bytes -> dynamicTest("test", () -> assertNotNull(bytes)))//
.limit(200);
}
}
| DynamicTestIntegrationTests |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ConfigurationWithFactoryBeanAndAutowiringTests.java | {
"start": 5125,
"end": 5399
} | class ____ {
@Autowired
private DummyBean dummyBean;
@Bean
FactoryBean<String> factoryBean() {
Assert.notNull(dummyBean, "DummyBean was not injected.");
return new MyFactoryBean();
}
}
@Configuration
static | NonPublicParameterizedFactoryBeanInterfaceConfig |
java | apache__camel | dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/commands/process/ListEndpoint.java | {
"start": 9191,
"end": 9413
} | class ____ {
String pid;
String name;
long uptime;
String age;
String endpoint;
String direction;
String total;
boolean stub;
boolean remote;
}
}
| Row |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/beans/factory/xml/XmlBeanFactoryTestTypes.java | {
"start": 7142,
"end": 7373
} | class ____ {
private Collection<?> jumble;
public void setJumble(Collection<?> jumble) {
this.jumble = jumble;
}
public Collection<?> getJumble() {
return jumble;
}
}
/**
* @author Juergen Hoeller
*/
| MixedCollectionBean |
java | spring-projects__spring-boot | core/spring-boot-test/src/test/java/org/springframework/boot/test/context/PropertyMappingContextCustomizerFactoryTests.java | {
"start": 5612,
"end": 5723
} | interface ____ {
@PropertyMapping("mapped")
String value() default "Mapped";
}
}
| AttributeMappingAnnotation |
java | junit-team__junit5 | junit-platform-engine/src/main/java/org/junit/platform/engine/discovery/DiscoverySelectors.java | {
"start": 43440,
"end": 43639
} | class ____, method name, and parameter types.
*
* @param enclosingClassNames the names of the enclosing classes; never {@code null}
* or empty
* @param nestedClassName the name of the nested | name |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/request/transition/DrawableCrossFadeTransition.java | {
"start": 509,
"end": 652
} | class ____
* instead fall back to a default animation that doesn't rely on {@link
* android.graphics.drawable.TransitionDrawable}.
*/
public | can |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/typeutils/SymbolUtilTest.java | {
"start": 1694,
"end": 3097
} | class ____ {
@Test
void testCalciteToSerializable() {
final SerializableSymbol trimString = SerializableSymbol.of("TRIM", "LEADING");
assertThat(calciteToSerializable(SqlTrimFunction.Flag.LEADING)).isEqualTo(trimString);
assertThat(SymbolUtil.serializableToCalcite(SqlTrimFunction.Flag.class, "LEADING"))
.isEqualTo(SqlTrimFunction.Flag.LEADING);
final SerializableSymbol emptyOrErrorString =
SerializableSymbol.of("JSON_QUERY_ON_EMPTY_OR_ERROR", "EMPTY_OBJECT");
assertThat(calciteToSerializable(SqlJsonQueryEmptyOrErrorBehavior.EMPTY_OBJECT))
.isEqualTo(emptyOrErrorString);
assertThat(serializableToCalcite(emptyOrErrorString))
.isEqualTo(SqlJsonQueryEmptyOrErrorBehavior.EMPTY_OBJECT);
}
@Test
void testCommonToCalcite() {
// public symbol
assertThat(commonToCalcite(TimeIntervalUnit.QUARTER)).isEqualTo(TimeUnitRange.QUARTER);
assertThat(calciteToCommon(TimeUnitRange.QUARTER, false))
.isEqualTo(TimeIntervalUnit.QUARTER);
// internal symbol
assertThat(commonToCalcite(DateTimeUtils.TimeUnitRange.QUARTER))
.isEqualTo(TimeUnitRange.QUARTER);
assertThat(calciteToCommon(TimeUnitRange.QUARTER, true))
.isEqualTo(DateTimeUtils.TimeUnitRange.QUARTER);
}
}
| SymbolUtilTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java | {
"start": 1160,
"end": 2710
} | class ____ extends AcknowledgedRequest<Request> {
public static final String MODEL_ALIAS = "model_alias";
private final String modelAlias;
private final String modelId;
public Request(String modelAlias, String modelId) {
super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT);
this.modelAlias = ExceptionsHelper.requireNonNull(modelAlias, MODEL_ALIAS);
this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID);
}
public Request(StreamInput in) throws IOException {
super(in);
this.modelAlias = in.readString();
this.modelId = in.readString();
}
public String getModelAlias() {
return modelAlias;
}
public String getModelId() {
return modelId;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(modelAlias);
out.writeString(modelId);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o;
return Objects.equals(modelAlias, request.modelAlias) && Objects.equals(modelId, request.modelId);
}
@Override
public int hashCode() {
return Objects.hash(modelAlias, modelId);
}
}
}
| Request |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/settings/ConfigurationSettingNames.java | {
"start": 1367,
"end": 3194
} | class ____ {
private static final String CLIENT_SETTINGS_NAMESPACE = SETTINGS_NAMESPACE.concat("client.");
/**
* Set to {@code true} if the client is required to provide a proof key challenge
* and verifier when performing the Authorization Code Grant flow.
*/
public static final String REQUIRE_PROOF_KEY = CLIENT_SETTINGS_NAMESPACE.concat("require-proof-key");
/**
* Set to {@code true} if authorization consent is required when the client
* requests access. This applies to {@code authorization_code} flow.
*/
public static final String REQUIRE_AUTHORIZATION_CONSENT = CLIENT_SETTINGS_NAMESPACE
.concat("require-authorization-consent");
/**
* Set the {@code URL} for the Client's JSON Web Key Set.
*/
public static final String JWK_SET_URL = CLIENT_SETTINGS_NAMESPACE.concat("jwk-set-url");
/**
* Set the {@link JwsAlgorithm JWS} algorithm that must be used for signing the
* {@link Jwt JWT} used to authenticate the Client at the Token Endpoint for the
* {@link ClientAuthenticationMethod#PRIVATE_KEY_JWT private_key_jwt} and
* {@link ClientAuthenticationMethod#CLIENT_SECRET_JWT client_secret_jwt}
* authentication methods.
*/
public static final String TOKEN_ENDPOINT_AUTHENTICATION_SIGNING_ALGORITHM = CLIENT_SETTINGS_NAMESPACE
.concat("token-endpoint-authentication-signing-algorithm");
/**
* Set the expected subject distinguished name associated to the client
* {@code X509Certificate} received during client authentication when using the
* {@code tls_client_auth} method.
*/
public static final String X509_CERTIFICATE_SUBJECT_DN = CLIENT_SETTINGS_NAMESPACE
.concat("x509-certificate-subject-dn");
private Client() {
}
}
/**
* The names for authorization server configuration settings.
*/
public static final | Client |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java | {
"start": 6596,
"end": 7094
} | class ____ {
private final Map<String, Object> params;
private final List<Object> states;
public ReduceScript(Map<String, Object> params, List<Object> states) {
this.params = params;
this.states = states;
}
public Map<String, Object> getParams() {
return params;
}
public List<Object> getStates() {
return states;
}
public abstract Object execute();
public | ReduceScript |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-servlet/runtime/src/main/java/io/quarkus/resteasy/reactive/server/servlet/runtime/ResteasyReactiveServlet.java | {
"start": 580,
"end": 1327
} | class ____ extends HttpServlet {
private final RestInitialHandler initialHandler;
public ResteasyReactiveServlet(Deployment deployment) {
this.initialHandler = new RestInitialHandler(deployment);
}
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
HttpServletRequest request = req;
while (request instanceof HttpServletRequestWrapper) {
request = (HttpServletRequest) ((HttpServletRequestWrapper) request).getRequest();
}
initialHandler.beginProcessing(
((HttpServletRequestImpl) request).getExchange().getAttachment(ServletRequestContext.ATTACHMENT_KEY));
}
}
| ResteasyReactiveServlet |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/test/java/org/apache/dubbo/rpc/protocol/tri/test/TestRunner.java | {
"start": 882,
"end": 2150
} | interface ____ {
TestResponse run(TestRequest request);
<T> T run(TestRequest request, Class<T> type);
<T> T get(TestRequest request, Class<T> type);
String get(TestRequest request);
<T> T get(String path, Class<T> type);
<T> List<T> gets(String path, Class<T> type);
String get(String path);
List<String> gets(String path);
<T> T post(TestRequest request, Class<T> type);
String post(TestRequest request);
<T> T post(String path, Object body, Class<T> type);
<T> List<T> posts(String path, Object body, Class<T> type);
String post(String path, Object body);
List<String> posts(String path, Object body);
<T> T put(TestRequest request, Class<T> type);
String put(TestRequest request);
<T> T put(String path, Object body, Class<T> type);
String put(String path, Object body);
<T> T patch(TestRequest request, Class<T> type);
String patch(TestRequest request);
<T> T patch(String path, Object body, Class<T> type);
String patch(String path, Object body);
<T> T delete(TestRequest request, Class<T> type);
String delete(TestRequest request);
<T> T delete(String path, Class<T> type);
String delete(String path);
void destroy();
}
| TestRunner |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/lucene/queries/SpanMatchNoDocsQuery.java | {
"start": 1071,
"end": 2383
} | class ____ extends SpanQuery {
private final String field;
private final String reason;
public SpanMatchNoDocsQuery(String field, String reason) {
this.field = field;
this.reason = reason;
}
@Override
public String getField() {
return field;
}
@Override
public String toString(String field) {
return "SpanMatchNoDocsQuery(\"" + reason + "\")";
}
@Override
public boolean equals(Object o) {
return sameClassAs(o);
}
@Override
public int hashCode() {
return classHash();
}
@Override
public void visit(QueryVisitor visitor) {
visitor.visitLeaf(this);
}
@Override
public SpanWeight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
return new SpanWeight(this, searcher, Collections.emptyMap(), boost) {
@Override
public void extractTermStates(Map<Term, TermStates> contexts) {}
@Override
public Spans getSpans(LeafReaderContext ctx, Postings requiredPostings) {
return null;
}
@Override
public boolean isCacheable(LeafReaderContext ctx) {
return true;
}
};
}
}
| SpanMatchNoDocsQuery |
java | processing__processing4 | core/src/processing/core/PStyle.java | {
"start": 935,
"end": 1842
} | class ____ implements PConstants {
public int imageMode;
public int rectMode;
public int ellipseMode;
public int shapeMode;
public int blendMode;
public int colorMode;
public float colorModeX;
public float colorModeY;
public float colorModeZ;
public float colorModeA;
public boolean tint;
public int tintColor;
public boolean fill;
public int fillColor;
public boolean stroke;
public int strokeColor;
public float strokeWeight;
public int strokeCap;
public int strokeJoin;
// TODO these fellas are inconsistent, and may need to go elsewhere
public float ambientR, ambientG, ambientB;
public float specularR, specularG, specularB;
public float emissiveR, emissiveG, emissiveB;
public float shininess;
public PFont textFont;
public int textAlign;
public int textAlignY;
public int textMode;
public float textSize;
public float textLeading;
}
| PStyle |
java | apache__camel | core/camel-core-model/src/generated/java/org/apache/camel/model/cloud/CombinedServiceCallServiceFilterConfigurationConfigurer.java | {
"start": 737,
"end": 3694
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Id", java.lang.String.class);
map.put("Properties", java.util.List.class);
map.put("ServiceFilterConfigurations", java.util.List.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.model.cloud.CombinedServiceCallServiceFilterConfiguration target = (org.apache.camel.model.cloud.CombinedServiceCallServiceFilterConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "id": target.setId(property(camelContext, java.lang.String.class, value)); return true;
case "properties": target.setProperties(property(camelContext, java.util.List.class, value)); return true;
case "servicefilterconfigurations":
case "serviceFilterConfigurations": target.setServiceFilterConfigurations(property(camelContext, java.util.List.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "id": return java.lang.String.class;
case "properties": return java.util.List.class;
case "servicefilterconfigurations":
case "serviceFilterConfigurations": return java.util.List.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.model.cloud.CombinedServiceCallServiceFilterConfiguration target = (org.apache.camel.model.cloud.CombinedServiceCallServiceFilterConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "id": return target.getId();
case "properties": return target.getProperties();
case "servicefilterconfigurations":
case "serviceFilterConfigurations": return target.getServiceFilterConfigurations();
default: return null;
}
}
@Override
public Object getCollectionValueType(Object target, String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "properties": return org.apache.camel.model.PropertyDefinition.class;
case "servicefilterconfigurations":
case "serviceFilterConfigurations": return org.apache.camel.model.cloud.ServiceCallServiceFilterConfiguration.class;
default: return null;
}
}
}
| CombinedServiceCallServiceFilterConfigurationConfigurer |
java | google__guice | core/test/com/google/inject/ScopesTest.java | {
"start": 13788,
"end": 16248
} | class ____ extends AbstractModule {
@Override
protected void configure() {
bindScope(CustomScoped.class, Scopes.SINGLETON);
}
}
@Test
public void testBindScopeTooManyTimes() {
try {
Guice.createInjector(new OuterScopeModule());
fail();
} catch (CreationException expected) {
assertContains(
expected.getMessage(),
"Scope Scopes.NO_SCOPE is already bound to ScopesTest$CustomScoped",
"ScopesTest$OuterScopeModule -> ScopesTest$CustomNoScopeModule",
"Cannot bind Scopes.SINGLETON.",
"at ScopesTest$CustomSingletonModule.configure",
"ScopesTest$OuterScopeModule -> ScopesTest$CustomSingletonModule");
}
}
@Test
public void testBindDuplicateScope() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bindScope(CustomScoped.class, Scopes.SINGLETON);
bindScope(CustomScoped.class, Scopes.SINGLETON);
}
});
assertSame(
injector.getInstance(AnnotatedCustomScoped.class),
injector.getInstance(AnnotatedCustomScoped.class));
}
@Test
public void testDuplicateScopeAnnotations() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bindScope(CustomScoped.class, Scopes.NO_SCOPE);
}
});
try {
injector.getInstance(SingletonAndCustomScoped.class);
fail();
} catch (ConfigurationException expected) {
assertContains(
expected.getMessage(),
"More than one scope annotation was found: Singleton and ScopesTest$CustomScoped.",
"while locating ScopesTest$SingletonAndCustomScoped");
}
}
@Test
public void testNullScopedAsASingleton() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
final Iterator<String> values = Arrays.asList(null, "A").iterator();
@Provides
@Singleton
String provideString() {
return values.next();
}
});
assertNull(injector.getInstance(String.class));
assertNull(injector.getInstance(String.class));
assertNull(injector.getInstance(String.class));
}
| CustomSingletonModule |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java | {
"start": 2452,
"end": 2684
} | class ____ extracted from
* {@code org.apache.hadoop.registry.client.binding.JsonSerDeser},
* which is now a subclass of this class.
* @param <T> Type to marshal.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public | was |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/transaction/Transaction.java | {
"start": 930,
"end": 1812
} | interface ____ {
/**
* Retrieve inner database connection.
*
* @return DataBase connection
*
* @throws SQLException
* the SQL exception
*/
Connection getConnection() throws SQLException;
/**
* Commit inner database connection.
*
* @throws SQLException
* the SQL exception
*/
void commit() throws SQLException;
/**
* Rollback inner database connection.
*
* @throws SQLException
* the SQL exception
*/
void rollback() throws SQLException;
/**
* Close inner database connection.
*
* @throws SQLException
* the SQL exception
*/
void close() throws SQLException;
/**
* Get transaction timeout if set.
*
* @return the timeout
*
* @throws SQLException
* the SQL exception
*/
Integer getTimeout() throws SQLException;
}
| Transaction |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/telemetry/endpoints/ontextmessage/ServerEndpointWithPathParams.java | {
"start": 270,
"end": 447
} | class ____ {
@OnTextMessage
public Uni<String> onMessage(String message) {
return Uni.createFrom().item("echo 0: " + message);
}
}
| ServerEndpointWithPathParams |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/projects/multi-build-mode-parallel/module-2/src/main/java/org/acme/HelloResourceBar.java | {
"start": 236,
"end": 373
} | class ____ {
@GET
@Produces(MediaType.TEXT_PLAIN)
public String hello() {
return "Hello bar 2";
}
}
| HelloResourceBar |
java | apache__camel | components/camel-twilio/src/generated/java/org/apache/camel/component/twilio/AvailablePhoneNumberCountryTollFreeEndpointConfigurationConfigurer.java | {
"start": 758,
"end": 3940
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("ApiName", org.apache.camel.component.twilio.internal.TwilioApiName.class);
map.put("MethodName", java.lang.String.class);
map.put("PathAccountSid", java.lang.String.class);
map.put("PathCountryCode", java.lang.String.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.component.twilio.AvailablePhoneNumberCountryTollFreeEndpointConfiguration target = (org.apache.camel.component.twilio.AvailablePhoneNumberCountryTollFreeEndpointConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "apiname":
case "apiName": target.setApiName(property(camelContext, org.apache.camel.component.twilio.internal.TwilioApiName.class, value)); return true;
case "methodname":
case "methodName": target.setMethodName(property(camelContext, java.lang.String.class, value)); return true;
case "pathaccountsid":
case "pathAccountSid": target.setPathAccountSid(property(camelContext, java.lang.String.class, value)); return true;
case "pathcountrycode":
case "pathCountryCode": target.setPathCountryCode(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "apiname":
case "apiName": return org.apache.camel.component.twilio.internal.TwilioApiName.class;
case "methodname":
case "methodName": return java.lang.String.class;
case "pathaccountsid":
case "pathAccountSid": return java.lang.String.class;
case "pathcountrycode":
case "pathCountryCode": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.component.twilio.AvailablePhoneNumberCountryTollFreeEndpointConfiguration target = (org.apache.camel.component.twilio.AvailablePhoneNumberCountryTollFreeEndpointConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "apiname":
case "apiName": return target.getApiName();
case "methodname":
case "methodName": return target.getMethodName();
case "pathaccountsid":
case "pathAccountSid": return target.getPathAccountSid();
case "pathcountrycode":
case "pathCountryCode": return target.getPathCountryCode();
default: return null;
}
}
}
| AvailablePhoneNumberCountryTollFreeEndpointConfigurationConfigurer |
java | spring-projects__spring-boot | core/spring-boot-testcontainers/src/test/java/org/springframework/boot/testcontainers/service/connection/ConnectionDetailsRegistrarTests.java | {
"start": 5180,
"end": 5253
} | class ____ extends TestConnectionDetails {
}
}
| CustomTestConnectionDetails |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/EntityNameResolver.java | {
"start": 227,
"end": 1377
} | class ____ be mapped as an entity multiple times, with each mapping
* distinguished by a distinct <em>entity name</em>. This is not currently possible
* using annotations, but it may be achieved using XML-based mappings. If a class
* is mapped multiple times, Hibernate needs a way to determine which entity is
* represented by a given instance of the class. There are two ways to provide this
* information:
* <ul>
* <li>by passing the entity name explicitly to methods of {@link Session}, for
* example, by calling {@link Session#persist(String, Object)}, but this can
* be verbose and lacking in typesafety, or
* <li>by having the entity instance itself carry around the information, for
* example, by giving it a field holding the entity name, and supplying an
* {@code EntityNameResolver} which is capable of extracting the information
* from the entity instance.
* </ul>
*
* @see org.hibernate.boot.SessionFactoryBuilder#addEntityNameResolver(EntityNameResolver...)
* @see org.hibernate.cfg.Configuration#addEntityNameResolver(EntityNameResolver)
*
* @author Steve Ebersole
*/
@FunctionalInterface
public | may |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLAssignItem.java | {
"start": 927,
"end": 3158
} | class ____ extends SQLExprImpl implements SQLReplaceable {
private SQLExpr target;
private SQLExpr value;
public SQLAssignItem() {
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SQLAssignItem that = (SQLAssignItem) o;
if (target != null ? !target.equals(that.target) : that.target != null) {
return false;
}
return value != null ? value.equals(that.value) : that.value == null;
}
@Override
public int hashCode() {
int result = target != null ? target.hashCode() : 0;
result = 31 * result + (value != null ? value.hashCode() : 0);
return result;
}
public SQLAssignItem(SQLExpr target, SQLExpr value) {
setTarget(target);
setValue(value);
}
public SQLAssignItem clone() {
SQLAssignItem x = new SQLAssignItem();
if (target != null) {
x.setTarget(target.clone());
}
if (value != null) {
x.setValue(value.clone());
}
return x;
}
@Override
public List<SQLObject> getChildren() {
return null;
}
public SQLExpr getTarget() {
return target;
}
public void setTarget(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.target = x;
}
public SQLExpr getValue() {
return value;
}
public void setValue(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.value = x;
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, this.target);
acceptChild(visitor, this.value);
}
visitor.endVisit(this);
}
@Override
public boolean replace(SQLExpr expr, SQLExpr target) {
if (this.target == expr) {
setTarget(target);
return true;
}
if (this.value == expr) {
setValue(target);
return true;
}
return false;
}
}
| SQLAssignItem |
java | google__dagger | javatests/dagger/internal/codegen/MissingBindingValidationTest.java | {
"start": 10808,
"end": 10964
} | interface ____<T> {}");
Source bar =
CompilerTests.javaSource(
"test.Bar",
"package test;",
"",
" | Foo |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_field.java | {
"start": 483,
"end": 752
} | class ____ {
@JSONField(name = "ID", serialzeFeatures={SerializerFeature.WriteClassName})
private long id;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
}
}
| VO |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/convert/support/CollectionToCollectionConverter.java | {
"start": 1536,
"end": 3462
} | class ____ implements ConditionalGenericConverter {
private final ConversionService conversionService;
public CollectionToCollectionConverter(ConversionService conversionService) {
this.conversionService = conversionService;
}
@Override
public Set<ConvertiblePair> getConvertibleTypes() {
return Collections.singleton(new ConvertiblePair(Collection.class, Collection.class));
}
@Override
public boolean matches(TypeDescriptor sourceType, TypeDescriptor targetType) {
return ConversionUtils.canConvertElements(
sourceType.getElementTypeDescriptor(), targetType.getElementTypeDescriptor(), this.conversionService);
}
@Override
public @Nullable Object convert(@Nullable Object source, TypeDescriptor sourceType, TypeDescriptor targetType) {
if (source == null) {
return null;
}
Collection<?> sourceCollection = (Collection<?>) source;
// Shortcut if possible...
boolean copyRequired = !targetType.getType().isInstance(source);
if (!copyRequired && sourceCollection.isEmpty()) {
return source;
}
TypeDescriptor elementDesc = targetType.getElementTypeDescriptor();
if (elementDesc == null && !copyRequired) {
return source;
}
// At this point, we need a collection copy in any case, even if just for finding out about element copies...
Collection<Object> target = CollectionFactory.createCollection(targetType.getType(),
(elementDesc != null ? elementDesc.getType() : null), sourceCollection.size());
if (elementDesc == null) {
target.addAll(sourceCollection);
}
else {
for (Object sourceElement : sourceCollection) {
Object targetElement = this.conversionService.convert(sourceElement,
sourceType.elementTypeDescriptor(sourceElement), elementDesc);
target.add(targetElement);
if (sourceElement != targetElement) {
copyRequired = true;
}
}
}
return (copyRequired ? target : source);
}
}
| CollectionToCollectionConverter |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/stream/StreamExecProcessTableFunction.java | {
"start": 5063,
"end": 17910
} | class ____ extends ExecNodeBase<RowData>
implements StreamExecNode<RowData>, SingleTransformationTranslator<RowData> {
public static final String PROCESS_TRANSFORMATION = "process";
public static final String FIELD_NAME_UID = "uid";
public static final String FIELD_NAME_FUNCTION_CALL = "functionCall";
public static final String FIELD_NAME_INPUT_CHANGELOG_MODES = "inputChangelogModes";
public static final String FIELD_NAME_OUTPUT_CHANGELOG_MODE = "outputChangelogMode";
@JsonProperty(FIELD_NAME_UID)
private final @Nullable String uid;
@JsonProperty(FIELD_NAME_FUNCTION_CALL)
private final RexCall invocation;
@JsonProperty(FIELD_NAME_INPUT_CHANGELOG_MODES)
private final List<ChangelogMode> inputChangelogModes;
@JsonProperty(FIELD_NAME_OUTPUT_CHANGELOG_MODE)
private final ChangelogMode outputChangelogMode;
public StreamExecProcessTableFunction(
ReadableConfig tableConfig,
List<InputProperty> inputProperties,
RowType outputType,
String description,
@Nullable String uid,
RexCall invocation,
List<ChangelogMode> inputChangelogModes,
ChangelogMode outputChangelogMode) {
this(
ExecNodeContext.newNodeId(),
ExecNodeContext.newContext(StreamExecProcessTableFunction.class),
ExecNodeContext.newPersistedConfig(
StreamExecProcessTableFunction.class, tableConfig),
inputProperties,
outputType,
description,
uid,
invocation,
inputChangelogModes,
outputChangelogMode);
}
@JsonCreator
public StreamExecProcessTableFunction(
@JsonProperty(FIELD_NAME_ID) int id,
@JsonProperty(FIELD_NAME_TYPE) ExecNodeContext context,
@JsonProperty(FIELD_NAME_CONFIGURATION) ReadableConfig persistedConfig,
@JsonProperty(FIELD_NAME_INPUT_PROPERTIES) List<InputProperty> inputProperties,
@JsonProperty(FIELD_NAME_OUTPUT_TYPE) RowType outputType,
@JsonProperty(FIELD_NAME_DESCRIPTION) String description,
@JsonProperty(FIELD_NAME_UID) @Nullable String uid,
@JsonProperty(FIELD_NAME_FUNCTION_CALL) RexNode invocation,
@JsonProperty(FIELD_NAME_INPUT_CHANGELOG_MODES) List<ChangelogMode> inputChangelogModes,
@JsonProperty(FIELD_NAME_OUTPUT_CHANGELOG_MODE) ChangelogMode outputChangelogMode) {
super(id, context, persistedConfig, inputProperties, outputType, description);
this.uid = uid;
this.invocation = (RexCall) invocation;
this.inputChangelogModes = inputChangelogModes;
this.outputChangelogMode = outputChangelogMode;
}
public @Nullable String getUid() {
return uid;
}
@Override
@SuppressWarnings("unchecked")
protected Transformation<RowData> translateToPlanInternal(
PlannerBase planner, ExecNodeConfig config) {
final List<Transformation<RowData>> inputTransforms =
getInputEdges().stream()
.map(e -> (Transformation<RowData>) e.translateToPlan(planner))
.collect(Collectors.toList());
final List<Ord<StaticArgument>> providedInputArgs =
StreamPhysicalProcessTableFunction.getProvidedInputArgs(invocation);
final List<RexNode> operands = invocation.getOperands();
final List<Integer> inputTimeColumns =
StreamPhysicalProcessTableFunction.toInputTimeColumns(invocation);
final List<RuntimeTableSemantics> runtimeTableSemantics =
providedInputArgs.stream()
.map(
providedInputArg -> {
final RexTableArgCall tableArgCall =
(RexTableArgCall) operands.get(providedInputArg.i);
final StaticArgument tableArg = providedInputArg.e;
return createRuntimeTableSemantics(
tableArg, tableArgCall, inputTimeColumns);
})
.collect(Collectors.toList());
final CodeGeneratorContext ctx =
new CodeGeneratorContext(config, planner.getFlinkContext().getClassLoader());
final RexCall udfCall = StreamPhysicalProcessTableFunction.toUdfCall(invocation);
final GeneratedRunnerResult generated =
ProcessTableRunnerGenerator.generate(
ctx, udfCall, inputTimeColumns, inputChangelogModes, outputChangelogMode);
final GeneratedProcessTableRunner generatedRunner = generated.runner();
final LinkedHashMap<String, StateInfo> stateInfos = generated.stateInfos();
final List<RuntimeStateInfo> runtimeStateInfos =
stateInfos.entrySet().stream()
.map(
stateInfo ->
createRuntimeStateInfo(
stateInfo.getKey(), stateInfo.getValue(), config))
.collect(Collectors.toList());
final GeneratedHashFunction[] stateHashCode =
runtimeStateInfos.stream()
.map(RuntimeStateInfo::getDataType)
.map(DataType::getLogicalType)
.map(
t ->
HashCodeGenerator.generateRowHash(
ctx,
t,
"StateHashCode",
IntStream.range(0, getFieldCount(t)).toArray()))
.toArray(GeneratedHashFunction[]::new);
final GeneratedRecordEqualiser[] stateEquals =
runtimeStateInfos.stream()
.map(RuntimeStateInfo::getDataType)
.map(DataType::getLogicalType)
.map(t -> EqualiserCodeGenerator.generateRowEquals(ctx, t, "StateEquals"))
.toArray(GeneratedRecordEqualiser[]::new);
final RuntimeChangelogMode producedChangelogMode =
RuntimeChangelogMode.serialize(outputChangelogMode);
final ProcessTableOperatorFactory operatorFactory =
new ProcessTableOperatorFactory(
runtimeTableSemantics,
runtimeStateInfos,
generatedRunner,
stateHashCode,
stateEquals,
producedChangelogMode);
final String effectiveUid =
uid != null ? uid : createTransformationUid(PROCESS_TRANSFORMATION, config);
final TransformationMetadata metadata =
new TransformationMetadata(
effectiveUid,
createTransformationName(config),
createTransformationDescription(config));
final Transformation<RowData> transform;
if (runtimeTableSemantics.stream().anyMatch(RuntimeTableSemantics::hasSetSemantics)) {
transform =
createKeyedTransformation(
inputTransforms,
metadata,
operatorFactory,
planner,
runtimeTableSemantics);
} else {
transform = createNonKeyedTransformation(inputTransforms, metadata, operatorFactory);
}
if (inputsContainSingleton()) {
transform.setParallelism(1);
transform.setMaxParallelism(1);
}
return transform;
}
private RuntimeTableSemantics createRuntimeTableSemantics(
StaticArgument tableArg, RexTableArgCall tableArgCall, List<Integer> inputTimeColumns) {
final RuntimeChangelogMode consumedChangelogMode =
RuntimeChangelogMode.serialize(
inputChangelogModes.get(tableArgCall.getInputIndex()));
final DataType dataType;
if (tableArg.getDataType().isPresent()) {
dataType = tableArg.getDataType().get();
} else {
dataType = DataTypes.of(FlinkTypeFactory.toLogicalRowType(tableArgCall.type));
}
final int timeColumn = inputTimeColumns.get(tableArgCall.getInputIndex());
return new RuntimeTableSemantics(
tableArg.getName(),
tableArgCall.getInputIndex(),
dataType,
tableArgCall.getPartitionKeys(),
consumedChangelogMode,
tableArg.is(StaticArgumentTrait.PASS_COLUMNS_THROUGH),
tableArg.is(StaticArgumentTrait.SET_SEMANTIC_TABLE),
timeColumn);
}
private Transformation<RowData> createKeyedTransformation(
List<Transformation<RowData>> inputTransforms,
TransformationMetadata metadata,
ProcessTableOperatorFactory operatorFactory,
PlannerBase planner,
List<RuntimeTableSemantics> runtimeTableSemantics) {
assert runtimeTableSemantics.size() == inputTransforms.size();
final List<KeySelector<RowData, RowData>> keySelectors =
runtimeTableSemantics.stream()
.map(
inputSemantics ->
KeySelectorUtil.getRowDataSelector(
planner.getFlinkContext().getClassLoader(),
inputSemantics.partitionByColumns(),
(InternalTypeInfo<RowData>)
inputTransforms
.get(inputSemantics.getInputIndex())
.getOutputType()))
.collect(Collectors.toList());
final KeyedMultipleInputTransformation<RowData> transform =
ExecNodeUtil.createKeyedMultiInputTransformation(
inputTransforms,
keySelectors,
((RowDataKeySelector) keySelectors.get(0)).getProducedType(),
metadata,
operatorFactory,
InternalTypeInfo.of(getOutputType()),
inputTransforms.get(0).getParallelism(),
false);
transform.setChainingStrategy(ChainingStrategy.HEAD_WITH_SOURCES);
return transform;
}
private Transformation<RowData> createNonKeyedTransformation(
List<Transformation<RowData>> inputTransforms,
TransformationMetadata metadata,
ProcessTableOperatorFactory operatorFactory) {
final Transformation<RowData> inputTransform = inputTransforms.get(0);
return ExecNodeUtil.createOneInputTransformation(
inputTransform,
metadata,
operatorFactory,
InternalTypeInfo.of(getOutputType()),
inputTransform.getParallelism(),
false);
}
private static RuntimeStateInfo createRuntimeStateInfo(
String name, StateInfo stateInfo, ExecNodeConfig config) {
return new RuntimeStateInfo(
name,
stateInfo.getDataType(),
deriveStateTimeToLive(
stateInfo.getTimeToLive().orElse(null), config.getStateRetentionTime()));
}
private static long deriveStateTimeToLive(
@Nullable Duration declaration, long globalRetentionTime) {
// User declaration take precedence. Including a potential 0.
if (declaration != null) {
return declaration.toMillis();
}
// This prepares the state layout of every PTF. It makes enabling state TTL at a later
// point in time possible. The past has shown that users often don't consider ever-growing
// state initially and would like to enable it later - without breaking the savepoint.
// Setting it to Long.MAX_VALUE is a better default than 0. It comes with overhead which is
// why a 0 declaration can override this for efficiency.
if (globalRetentionTime == 0) {
return Long.MAX_VALUE;
}
return globalRetentionTime;
}
}
| StreamExecProcessTableFunction |
java | apache__camel | components/camel-dns/src/test/java/org/apache/camel/component/dns/cloud/DnsServiceDiscoveryTest.java | {
"start": 1173,
"end": 1975
} | class ____ {
@Test
void testServiceDiscovery() throws IOException {
DnsConfiguration configuration = new DnsConfiguration();
try (DnsServiceDiscovery discovery = new DnsServiceDiscovery(configuration)) {
configuration.setDomain("jabber.com");
configuration.setProto("_tcp");
List<ServiceDefinition> services = discovery.getServices("_xmpp-server");
assertNotNull(services);
assertFalse(services.isEmpty());
for (ServiceDefinition service : services) {
assertFalse(service.getMetadata().isEmpty());
assertNotNull(service.getMetadata().get("priority"));
assertNotNull(service.getMetadata().get("weight"));
}
}
}
}
| DnsServiceDiscoveryTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.