language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
quarkusio__quarkus
integration-tests/kafka-avro-apicurio2/src/test/java/io/quarkus/it/kafka/KafkaAvroIT.java
{ "start": 487, "end": 901 }
class ____ extends KafkaAvroTestBase { AvroKafkaCreator creator; @Override AvroKafkaCreator creator() { return creator; } @BeforeAll public static void setUp() { // this is for the test JVM, which also uses Kafka client, which in turn also interacts with the registry RegistryClientFactory.setProvider(new VertxHttpClientProvider(Vertx.vertx())); } }
KafkaAvroIT
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java
{ "start": 24482, "end": 30144 }
class ____ implements Rewriteable<Rewriteable> { final ShardSearchRequest request; RequestRewritable(ShardSearchRequest request) { this.request = request; } @Override public Rewriteable rewrite(QueryRewriteContext ctx) throws IOException { SearchSourceBuilder newSource = request.source() == null ? null : Rewriteable.rewrite(request.source(), ctx); AliasFilter newAliasFilter = Rewriteable.rewrite(request.getAliasFilter(), ctx); SearchExecutionContext searchExecutionContext = ctx.convertToSearchExecutionContext(); if (searchExecutionContext != null) { final FieldSortBuilder primarySort = FieldSortBuilder.getPrimaryFieldSortOrNull(newSource); if (primarySort != null && primarySort.isBottomSortShardDisjoint(searchExecutionContext, request.getBottomSortValues())) { assert newSource != null : "source should contain a primary sort field"; newSource = newSource.shallowCopy(); int trackTotalHitsUpTo = SearchRequest.resolveTrackTotalHitsUpTo(request.scroll, request.source); if (trackTotalHitsUpTo == TRACK_TOTAL_HITS_DISABLED && newSource.suggest() == null && newSource.aggregations() == null) { newSource.query(new MatchNoneQueryBuilder()); } else { newSource.size(0); } request.source(newSource); request.setBottomSortValues(null); } } if (newSource == request.source() && newAliasFilter == request.getAliasFilter()) { return this; } else { request.source(newSource); request.setAliasFilter(newAliasFilter); return new RequestRewritable(request); } } } /** * Returns the filter associated with listed filtering aliases. * <p> * The list of filtering aliases should be obtained by calling Metadata.filteringAliases. * Returns {@code null} if no filtering is required.</p> */ public static QueryBuilder parseAliasFilter( CheckedFunction<BytesReference, QueryBuilder, IOException> filterParser, IndexMetadata metadata, String... aliasNames ) { if (aliasNames == null || aliasNames.length == 0) { return null; } Index index = metadata.getIndex(); Map<String, AliasMetadata> aliases = metadata.getAliases(); if (aliasNames.length == 1) { AliasMetadata alias = aliases.get(aliasNames[0]); if (alias == null) { // This shouldn't happen unless alias disappeared after filteringAliases was called. throw new InvalidAliasNameException(index, aliasNames[0], "Unknown alias name was passed to alias Filter"); } return parseAliasFilter(filterParser, alias, index); } else { // we need to bench here a bit, to see maybe it makes sense to use OrFilter BoolQueryBuilder combined = new BoolQueryBuilder(); for (String aliasName : aliasNames) { AliasMetadata alias = aliases.get(aliasName); if (alias == null) { // This shouldn't happen unless alias disappeared after filteringAliases was called. throw new InvalidAliasNameException(index, aliasNames[0], "Unknown alias name was passed to alias Filter"); } QueryBuilder parsedFilter = parseAliasFilter(filterParser, alias, index); if (parsedFilter != null) { combined.should(parsedFilter); } else { // The filter might be null only if filter was removed after filteringAliases was called return null; } } return combined; } } private static QueryBuilder parseAliasFilter( CheckedFunction<BytesReference, QueryBuilder, IOException> filterParser, AliasMetadata alias, Index index ) { if (alias.filter() == null) { return null; } try { return filterParser.apply(alias.filter().compressedReference()); } catch (IOException ex) { throw new AliasFilterParsingException(index, alias.getAlias(), "Invalid alias filter", ex); } } public final Map<String, Object> getRuntimeMappings() { return source == null ? emptyMap() : source.runtimeMappings(); } /** * Returns the minimum version of the channel that the request has been passed. If the request never passes around, then the channel * version is {@link TransportVersion#current()}; otherwise, it's the minimum transport version of the coordinating node and data node * (and the proxy node in case the request is sent to the proxy node of the remote cluster before reaching the data node). */ public TransportVersion getChannelVersion() { return channelVersion; } /** * Should this request force {@link SourceLoader.Synthetic synthetic source}? * Use this to test if the mapping supports synthetic _source and to get a sense * of the worst case performance. Fetches with this enabled will be slower the * enabling synthetic source natively in the index. */ public boolean isForceSyntheticSource() { return forceSyntheticSource; } }
RequestRewritable
java
netty__netty
transport-classes-kqueue/src/main/java/io/netty/channel/kqueue/KQueueDomainSocketChannel.java
{ "start": 4061, "end": 6528 }
class ____ extends KQueueStreamUnsafe { @Override void readReady(KQueueRecvByteAllocatorHandle allocHandle) { switch (config().getReadMode()) { case BYTES: super.readReady(allocHandle); break; case FILE_DESCRIPTORS: readReadyFd(); break; default: throw new Error("Unexpected read mode: " + config().getReadMode()); } } private void readReadyFd() { if (socket.isInputShutdown()) { super.clearReadFilter0(); return; } final ChannelConfig config = config(); final KQueueRecvByteAllocatorHandle allocHandle = recvBufAllocHandle(); final ChannelPipeline pipeline = pipeline(); allocHandle.reset(config); try { readLoop: do { // lastBytesRead represents the fd. We use lastBytesRead because it must be set so that the // KQueueRecvByteAllocatorHandle knows if it should try to read again or not when autoRead is // enabled. int recvFd = socket.recvFd(); switch(recvFd) { case 0: allocHandle.lastBytesRead(0); break readLoop; case -1: allocHandle.lastBytesRead(-1); close(voidPromise()); return; default: allocHandle.lastBytesRead(1); allocHandle.incMessagesRead(1); readPending = false; pipeline.fireChannelRead(new FileDescriptor(recvFd)); break; } } while (allocHandle.continueReading()); allocHandle.readComplete(); pipeline.fireChannelReadComplete(); } catch (Throwable t) { allocHandle.readComplete(); pipeline.fireChannelReadComplete(); pipeline.fireExceptionCaught(t); } finally { if (shouldStopReading(config)) { clearReadFilter0(); } } } } }
KQueueDomainUnsafe
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/bytecode/spi/NotInstrumentedException.java
{ "start": 261, "end": 341 }
class ____ not * instrumented/enhanced. * * @author Steve Ebersole */ public
was
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/test/components/Alias.java
{ "start": 327, "end": 969 }
class ____ { private Long id; private Name name; private String source; public Alias() { } public Alias(String firstName, String lastName, String source) { this( new Name( firstName, lastName ), source ); } public Alias(Name name, String source) { this.name = name; this.source = source; } @Id @GeneratedValue public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Name getName() { return name; } public void setName(Name name) { this.name = name; } public String getSource() { return source; } public void setSource(String source) { this.source = source; } }
Alias
java
playframework__playframework
core/play/src/main/java/play/inject/BindingKey.java
{ "start": 3184, "end": 3632 }
class ____ { * {@literal @}Inject * MyController({@literal @}Named("cached") Foo foo) { * ... * } * ... * } * }</pre> * * In the above example, the controller will get the cached `Foo` service. */ public BindingKey<T> qualifiedWith(final String name) { return underlying.qualifiedWith(name).asJava(); } /** * Bind this binding key to the given implementation class. * * <p>This
MyController
java
spring-projects__spring-boot
module/spring-boot-flyway/src/main/java/org/springframework/boot/flyway/actuate/endpoint/FlywayEndpoint.java
{ "start": 1490, "end": 2428 }
class ____ { private final ApplicationContext context; public FlywayEndpoint(ApplicationContext context) { this.context = context; } @ReadOperation public FlywayBeansDescriptor flywayBeans() { ApplicationContext target = this.context; Map<@Nullable String, ContextFlywayBeansDescriptor> contextFlywayBeans = new HashMap<>(); while (target != null) { Map<String, FlywayDescriptor> flywayBeans = new HashMap<>(); target.getBeansOfType(Flyway.class) .forEach((name, flyway) -> flywayBeans.put(name, new FlywayDescriptor(flyway.info().all()))); ApplicationContext parent = target.getParent(); contextFlywayBeans.put(target.getId(), new ContextFlywayBeansDescriptor(flywayBeans, (parent != null) ? parent.getId() : null)); target = parent; } return new FlywayBeansDescriptor(contextFlywayBeans); } /** * Description of an application's {@link Flyway} beans. */ public static final
FlywayEndpoint
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/cache/FederationCache.java
{ "start": 15674, "end": 16318 }
class ____ extends CacheResponse<SubClusterId> { @Override public List<SubClusterId> getList() { return super.getList(); } @Override public void setList(List<SubClusterId> list) { super.setList(list); } @Override public SubClusterId getItem() { return super.getItem(); } @Override public void setItem(SubClusterId item) { super.setItem(item); } } public FederationStateStore getStateStore() { return stateStore; } public void setStateStore(FederationStateStore stateStore) { this.stateStore = stateStore; } }
ApplicationHomeSubClusterCacheResponse
java
spring-projects__spring-boot
module/spring-boot-elasticsearch/src/main/java/org/springframework/boot/elasticsearch/health/ElasticsearchRestClientHealthIndicator.java
{ "start": 1638, "end": 2899 }
class ____ extends AbstractHealthIndicator { private static final String RED_STATUS = "red"; private final Rest5Client client; private final JsonParser jsonParser; public ElasticsearchRestClientHealthIndicator(Rest5Client client) { super("Elasticsearch health check failed"); this.client = client; this.jsonParser = JsonParserFactory.getJsonParser(); } @Override protected void doHealthCheck(Health.Builder builder) throws Exception { Response response = this.client.performRequest(new Request("GET", "/_cluster/health/")); if (response.getStatusCode() != HttpStatus.SC_OK) { builder.down(); builder.withDetail("statusCode", response.getStatusCode()); builder.withDetail("warnings", response.getWarnings()); return; } try (InputStream inputStream = response.getEntity().getContent()) { doHealthCheck(builder, StreamUtils.copyToString(inputStream, StandardCharsets.UTF_8)); } } private void doHealthCheck(Health.Builder builder, String json) { Map<String, Object> response = this.jsonParser.parseMap(json); String status = (String) response.get("status"); builder.status((RED_STATUS.equals(status)) ? Status.OUT_OF_SERVICE : Status.UP); builder.withDetails(response); } }
ElasticsearchRestClientHealthIndicator
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/util/Assert.java
{ "start": 1578, "end": 1798 }
class ____ similar to JUnit's assertion library. If an argument value is * deemed invalid, an {@link IllegalArgumentException} is thrown (typically). * For example: * * <pre class="code"> * Assert.notNull(clazz, "The
is
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/collectionelement/EmbeddableCollectionElementWithLazyManyToOneTest.java
{ "start": 4054, "end": 4286 }
class ____ { @Id @GeneratedValue private int id; private ContainedChild containedChild; @ElementCollection private Set<ContainedChild> containedChildren = new HashSet<>(); } @Entity(name = "Child") public static
Parent
java
apache__flink
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/aggfunctions/LastValueWithRetractAggFunctionWithoutOrderTest.java
{ "start": 3118, "end": 3668 }
class ____ extends NumberLastValueWithRetractAggFunctionWithoutOrderTestBase<Short> { @Override protected Short getValue(String v) { return Short.valueOf(v); } @Override protected AggregateFunction<Short, LastValueWithRetractAccumulator<Short>> getAggregator() { return new LastValueWithRetractAggFunction<>(DataTypes.SMALLINT().getLogicalType()); } } /** Test for {@link IntType}. */ @Nested final
ShortLastValueWithRetractAggFunctionWithoutOrderTest
java
apache__camel
components/camel-braintree/src/generated/java/org/apache/camel/component/braintree/DisputeGatewayEndpointConfigurationConfigurer.java
{ "start": 743, "end": 10095 }
class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter { private static final Map<String, Object> ALL_OPTIONS; static { Map<String, Object> map = new CaseInsensitiveMap(); map.put("AccessToken", java.lang.String.class); map.put("ApiName", org.apache.camel.component.braintree.internal.BraintreeApiName.class); map.put("Content", java.lang.String.class); map.put("DisputeId", java.lang.String.class); map.put("DocumentId", java.lang.String.class); map.put("Environment", java.lang.String.class); map.put("EvidenceId", java.lang.String.class); map.put("FileEvidenceRequest", com.braintreegateway.FileEvidenceRequest.class); map.put("HttpLogLevel", java.lang.String.class); map.put("HttpLogName", java.lang.String.class); map.put("HttpReadTimeout", java.lang.Integer.class); map.put("Id", java.lang.String.class); map.put("LogHandlerEnabled", boolean.class); map.put("MerchantId", java.lang.String.class); map.put("MethodName", java.lang.String.class); map.put("PrivateKey", java.lang.String.class); map.put("ProxyHost", java.lang.String.class); map.put("ProxyPort", java.lang.Integer.class); map.put("PublicKey", java.lang.String.class); map.put("Query", com.braintreegateway.DisputeSearchRequest.class); map.put("TextEvidenceRequest", com.braintreegateway.TextEvidenceRequest.class); ALL_OPTIONS = map; } @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { org.apache.camel.component.braintree.DisputeGatewayEndpointConfiguration target = (org.apache.camel.component.braintree.DisputeGatewayEndpointConfiguration) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "accesstoken": case "accessToken": target.setAccessToken(property(camelContext, java.lang.String.class, value)); return true; case "apiname": case "apiName": target.setApiName(property(camelContext, org.apache.camel.component.braintree.internal.BraintreeApiName.class, value)); return true; case "content": target.setContent(property(camelContext, java.lang.String.class, value)); return true; case "disputeid": case "disputeId": target.setDisputeId(property(camelContext, java.lang.String.class, value)); return true; case "documentid": case "documentId": target.setDocumentId(property(camelContext, java.lang.String.class, value)); return true; case "environment": target.setEnvironment(property(camelContext, java.lang.String.class, value)); return true; case "evidenceid": case "evidenceId": target.setEvidenceId(property(camelContext, java.lang.String.class, value)); return true; case "fileevidencerequest": case "fileEvidenceRequest": target.setFileEvidenceRequest(property(camelContext, com.braintreegateway.FileEvidenceRequest.class, value)); return true; case "httploglevel": case "httpLogLevel": target.setHttpLogLevel(property(camelContext, java.lang.String.class, value)); return true; case "httplogname": case "httpLogName": target.setHttpLogName(property(camelContext, java.lang.String.class, value)); return true; case "httpreadtimeout": case "httpReadTimeout": target.setHttpReadTimeout(property(camelContext, java.lang.Integer.class, value)); return true; case "id": target.setId(property(camelContext, java.lang.String.class, value)); return true; case "loghandlerenabled": case "logHandlerEnabled": target.setLogHandlerEnabled(property(camelContext, boolean.class, value)); return true; case "merchantid": case "merchantId": target.setMerchantId(property(camelContext, java.lang.String.class, value)); return true; case "methodname": case "methodName": target.setMethodName(property(camelContext, java.lang.String.class, value)); return true; case "privatekey": case "privateKey": target.setPrivateKey(property(camelContext, java.lang.String.class, value)); return true; case "proxyhost": case "proxyHost": target.setProxyHost(property(camelContext, java.lang.String.class, value)); return true; case "proxyport": case "proxyPort": target.setProxyPort(property(camelContext, java.lang.Integer.class, value)); return true; case "publickey": case "publicKey": target.setPublicKey(property(camelContext, java.lang.String.class, value)); return true; case "query": target.setQuery(property(camelContext, com.braintreegateway.DisputeSearchRequest.class, value)); return true; case "textevidencerequest": case "textEvidenceRequest": target.setTextEvidenceRequest(property(camelContext, com.braintreegateway.TextEvidenceRequest.class, value)); return true; default: return false; } } @Override public Map<String, Object> getAllOptions(Object target) { return ALL_OPTIONS; } @Override public Class<?> getOptionType(String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "accesstoken": case "accessToken": return java.lang.String.class; case "apiname": case "apiName": return org.apache.camel.component.braintree.internal.BraintreeApiName.class; case "content": return java.lang.String.class; case "disputeid": case "disputeId": return java.lang.String.class; case "documentid": case "documentId": return java.lang.String.class; case "environment": return java.lang.String.class; case "evidenceid": case "evidenceId": return java.lang.String.class; case "fileevidencerequest": case "fileEvidenceRequest": return com.braintreegateway.FileEvidenceRequest.class; case "httploglevel": case "httpLogLevel": return java.lang.String.class; case "httplogname": case "httpLogName": return java.lang.String.class; case "httpreadtimeout": case "httpReadTimeout": return java.lang.Integer.class; case "id": return java.lang.String.class; case "loghandlerenabled": case "logHandlerEnabled": return boolean.class; case "merchantid": case "merchantId": return java.lang.String.class; case "methodname": case "methodName": return java.lang.String.class; case "privatekey": case "privateKey": return java.lang.String.class; case "proxyhost": case "proxyHost": return java.lang.String.class; case "proxyport": case "proxyPort": return java.lang.Integer.class; case "publickey": case "publicKey": return java.lang.String.class; case "query": return com.braintreegateway.DisputeSearchRequest.class; case "textevidencerequest": case "textEvidenceRequest": return com.braintreegateway.TextEvidenceRequest.class; default: return null; } } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { org.apache.camel.component.braintree.DisputeGatewayEndpointConfiguration target = (org.apache.camel.component.braintree.DisputeGatewayEndpointConfiguration) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "accesstoken": case "accessToken": return target.getAccessToken(); case "apiname": case "apiName": return target.getApiName(); case "content": return target.getContent(); case "disputeid": case "disputeId": return target.getDisputeId(); case "documentid": case "documentId": return target.getDocumentId(); case "environment": return target.getEnvironment(); case "evidenceid": case "evidenceId": return target.getEvidenceId(); case "fileevidencerequest": case "fileEvidenceRequest": return target.getFileEvidenceRequest(); case "httploglevel": case "httpLogLevel": return target.getHttpLogLevel(); case "httplogname": case "httpLogName": return target.getHttpLogName(); case "httpreadtimeout": case "httpReadTimeout": return target.getHttpReadTimeout(); case "id": return target.getId(); case "loghandlerenabled": case "logHandlerEnabled": return target.isLogHandlerEnabled(); case "merchantid": case "merchantId": return target.getMerchantId(); case "methodname": case "methodName": return target.getMethodName(); case "privatekey": case "privateKey": return target.getPrivateKey(); case "proxyhost": case "proxyHost": return target.getProxyHost(); case "proxyport": case "proxyPort": return target.getProxyPort(); case "publickey": case "publicKey": return target.getPublicKey(); case "query": return target.getQuery(); case "textevidencerequest": case "textEvidenceRequest": return target.getTextEvidenceRequest(); default: return null; } } }
DisputeGatewayEndpointConfigurationConfigurer
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/UntypedResponseMessageHeaders.java
{ "start": 1233, "end": 1285 }
class ____ the request message. * * @return
of
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/error/ShouldHaveSizeGreaterThanOrEqualTo_create_Test.java
{ "start": 1407, "end": 2478 }
class ____ { private ErrorMessageFactory factory; @BeforeEach void setUp() { factory = shouldHaveSizeGreaterThanOrEqualTo("ab", 2, 4); } @Test void should_create_error_message() { // WHEN String message = factory.create(new TextDescription("Test"), STANDARD_REPRESENTATION); // THEN then(message).isEqualTo(format("[Test] %n" + "Expecting size of:%n" + " \"ab\"%n" + "to be greater than or equal to 4 but was 2")); } @Test void should_create_error_message_with_hexadecimal_representation() { // WHEN String message = factory.create(new TextDescription("Test"), new HexadecimalRepresentation()); // THEN then(message).isEqualTo(format("[Test] %n" + "Expecting size of:%n" + " \"['0x0061', '0x0062']\"%n" + "to be greater than or equal to 4 but was 2")); } }
ShouldHaveSizeGreaterThanOrEqualTo_create_Test
java
apache__kafka
clients/src/main/java/org/apache/kafka/common/requests/ResponseHeader.java
{ "start": 1142, "end": 4636 }
class ____ implements AbstractRequestResponse { private static final int SIZE_NOT_INITIALIZED = -1; private final ResponseHeaderData data; private final short headerVersion; private int size = SIZE_NOT_INITIALIZED; public ResponseHeader(int correlationId, short headerVersion) { this(new ResponseHeaderData().setCorrelationId(correlationId), headerVersion); } public ResponseHeader(ResponseHeaderData data, short headerVersion) { this.data = data; this.headerVersion = headerVersion; } /** * Calculates the size of {@link ResponseHeader} in bytes. * * This method to calculate size should be only when it is immediately followed by * {@link #write(ByteBuffer, ObjectSerializationCache)} method call. In such cases, ObjectSerializationCache * helps to avoid the serialization twice. In all other cases, {@link #size()} should be preferred instead. * * Calls to this method leads to calculation of size every time it is invoked. {@link #size()} should be preferred * instead. * * Visible for testing. */ int size(ObjectSerializationCache serializationCache) { return data().size(serializationCache, headerVersion); } /** * Returns the size of {@link ResponseHeader} in bytes. * * Calls to this method are idempotent and inexpensive since it returns the cached value of size after the first * invocation. */ public int size() { if (this.size == SIZE_NOT_INITIALIZED) { this.size = size(new ObjectSerializationCache()); } return size; } public int correlationId() { return this.data.correlationId(); } public short headerVersion() { return headerVersion; } public ResponseHeaderData data() { return data; } // visible for testing void write(ByteBuffer buffer, ObjectSerializationCache serializationCache) { data.write(new ByteBufferAccessor(buffer), serializationCache, headerVersion); } @Override public String toString() { return "ResponseHeader(" + "correlationId=" + data.correlationId() + ", headerVersion=" + headerVersion + ")"; } public static ResponseHeader parse(ByteBuffer buffer, short headerVersion) { final int bufferStartPositionForHeader = buffer.position(); final ResponseHeader header = new ResponseHeader( new ResponseHeaderData(new ByteBufferAccessor(buffer), headerVersion), headerVersion); // Size of header is calculated by the shift in the position of buffer's start position during parsing. // Prior to parsing, the buffer's start position points to header data and after the parsing operation // the buffer's start position points to api message. For more information on how the buffer is // constructed, see RequestUtils#serialize() header.size = Math.max(buffer.position() - bufferStartPositionForHeader, 0); return header; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ResponseHeader that = (ResponseHeader) o; return headerVersion == that.headerVersion && Objects.equals(data, that.data); } @Override public int hashCode() { return Objects.hash(data, headerVersion); } }
ResponseHeader
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesResponse.java
{ "start": 913, "end": 2129 }
class ____ extends ActionResponse { private final Map<String, List<AliasMetadata>> aliases; private final Map<String, List<DataStreamAlias>> dataStreamAliases; public GetAliasesResponse(Map<String, List<AliasMetadata>> aliases, Map<String, List<DataStreamAlias>> dataStreamAliases) { this.aliases = aliases; this.dataStreamAliases = dataStreamAliases; } public Map<String, List<AliasMetadata>> getAliases() { return aliases; } public Map<String, List<DataStreamAlias>> getDataStreamAliases() { return dataStreamAliases; } @Override public void writeTo(StreamOutput out) throws IOException { TransportAction.localOnly(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } GetAliasesResponse that = (GetAliasesResponse) o; return Objects.equals(aliases, that.aliases) && Objects.equals(dataStreamAliases, that.dataStreamAliases); } @Override public int hashCode() { return Objects.hash(aliases, dataStreamAliases); } }
GetAliasesResponse
java
apache__camel
core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedSendDynamicProcessor.java
{ "start": 1738, "end": 5224 }
class ____ extends ManagedProcessor implements ManagedSendDynamicProcessorMBean { private String uri; private boolean sanitize; public ManagedSendDynamicProcessor(CamelContext context, SendDynamicProcessor processor, ProcessorDefinition<?> definition) { super(context, processor, definition); } @Override public void init(ManagementStrategy strategy) { super.init(strategy); this.sanitize = strategy.getManagementAgent().getMask() != null ? strategy.getManagementAgent().getMask() : true; if (sanitize) { uri = URISupport.sanitizeUri(getProcessor().getUri()); } else { uri = getProcessor().getUri(); } } @Override public void reset() { super.reset(); if (getProcessor().getEndpointUtilizationStatistics() != null) { getProcessor().getEndpointUtilizationStatistics().clear(); } } @Override public Boolean getSupportExtendedInformation() { return true; } @Override public SendDynamicProcessor getProcessor() { return (SendDynamicProcessor) super.getProcessor(); } @Override public String getDestination() { return uri; } @Override public String getUri() { return uri; } @Override public String getVariableSend() { return getProcessor().getVariableSend(); } @Override public String getVariableReceive() { return getProcessor().getVariableReceive(); } @Override public String getMessageExchangePattern() { if (getProcessor().getPattern() != null) { return getProcessor().getPattern().name(); } else { return null; } } @Override public Integer getCacheSize() { return getProcessor().getCacheSize(); } @Override public Boolean isIgnoreInvalidEndpoint() { return getProcessor().isIgnoreInvalidEndpoint(); } @Override public Boolean isAllowOptimisedComponents() { return getProcessor().isAllowOptimisedComponents(); } @Override public Boolean isOptimised() { return getProcessor().getDynamicAware() != null; } @Override public TabularData extendedInformation() { try { TabularData answer = new TabularDataSupport(CamelOpenMBeanTypes.endpointsUtilizationTabularType()); EndpointUtilizationStatistics stats = getProcessor().getEndpointUtilizationStatistics(); if (stats != null) { for (Map.Entry<String, Long> entry : stats.getStatistics().entrySet()) { CompositeType ct = CamelOpenMBeanTypes.endpointsUtilizationCompositeType(); String url = entry.getKey(); if (sanitize) { url = URISupport.sanitizeUri(url); } Long hits = entry.getValue(); if (hits == null) { hits = 0L; } CompositeData data = new CompositeDataSupport(ct, new String[] { "url", "hits" }, new Object[] { url, hits }); answer.put(data); } } return answer; } catch (Exception e) { throw RuntimeCamelException.wrapRuntimeCamelException(e); } } }
ManagedSendDynamicProcessor
java
spring-projects__spring-boot
module/spring-boot-batch-jdbc/src/test/java/org/springframework/boot/batch/jdbc/autoconfigure/BatchJdbcAutoConfigurationTests.java
{ "start": 28476, "end": 29109 }
class ____ { @Autowired private JobRepository jobRepository; @Bean Job discreteJob() { AbstractJob job = new AbstractJob("discreteLocalJob") { @Override public Collection<String> getStepNames() { return Collections.emptySet(); } @Override public Step getStep(String stepName) { return mock(Step.class); } @Override protected void doExecute(JobExecution execution) { execution.setStatus(BatchStatus.COMPLETED); } }; job.setJobRepository(this.jobRepository); return job; } } @Configuration(proxyBeanMethods = false) static
NamedJobConfigurationWithLocalJob
java
spring-projects__spring-framework
spring-webflux/src/test/java/org/springframework/web/reactive/result/method/annotation/MessageWriterResultHandlerTests.java
{ "start": 10053, "end": 10663 }
class ____ { Resource resource() { return null; } String string() { return null; } void voidReturn() { } Mono<Void> monoVoid() { return null; } Completable completable() { return null; } Flux<Void> fluxVoid() { return null; } Observable<Void> observableVoid() { return null; } Flowable<Void> flowableVoid() { return null; } OutputStream outputStream() { return null; } List<ParentClass> listParentClass() { return null; } Identifiable identifiable() { return null; } List<Identifiable> listIdentifiable() { return null; } Object object() { return null; } } }
TestController
java
apache__flink
flink-python/src/main/java/org/apache/flink/table/runtime/arrow/sources/ArrowTableSourceFactory.java
{ "start": 1337, "end": 2314 }
class ____ implements DynamicTableSourceFactory { public static final String IDENTIFIER = "python-arrow-source"; @Override public String factoryIdentifier() { return IDENTIFIER; } @Override public Set<ConfigOption<?>> requiredOptions() { Set<ConfigOption<?>> options = new HashSet<>(); options.add(ArrowTableSourceOptions.DATA); return options; } @Override public Set<ConfigOption<?>> optionalOptions() { return new HashSet<>(); } @Override public DynamicTableSource createDynamicTableSource(Context context) { FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context); ReadableConfig tableOptions = helper.getOptions(); String data = tableOptions.get(ArrowTableSourceOptions.DATA); DataType dataType = context.getPhysicalRowDataType(); return new ArrowTableSource(dataType, data); } }
ArrowTableSourceFactory
java
apache__rocketmq
tools/src/main/java/org/apache/rocketmq/tools/command/topic/RebalanceResult.java
{ "start": 988, "end": 1288 }
class ____ { private Map<String/*ip*/, List<MessageQueue>> result = new HashMap<>(); public Map<String, List<MessageQueue>> getResult() { return result; } public void setResult(final Map<String, List<MessageQueue>> result) { this.result = result; } }
RebalanceResult
java
apache__hadoop
hadoop-tools/hadoop-gcp/src/main/java/org/apache/hadoop/fs/gs/GoogleHadoopFileSystem.java
{ "start": 20986, "end": 27883 }
class ____ Hadoop 2's FsShell. */ @Override public Path makeQualified(final Path path) { Path qualifiedPath = super.makeQualified(path); URI uri = qualifiedPath.toUri(); checkState("".equals(uri.getPath()) || qualifiedPath.isAbsolute(), "Path '{}' must be fully qualified.", qualifiedPath); Path result; String upath = uri.getPath(); // Strip initial '..'s to make root is its own parent. int i = 0; while (upath.startsWith("/../", i)) { // Leave a preceding slash, so path is still absolute. i += 3; } if (i == upath.length() || upath.substring(i).equals("/..")) { // Allow a Path of gs://someBucket to map to gs://someBucket/ result = new Path(uri.getScheme(), uri.getAuthority(), "/"); } else if (i == 0) { result = qualifiedPath; } else { result = new Path(uri.getScheme(), uri.getAuthority(), upath.substring(i)); } LOG.trace("makeQualified(path: {}): {}", path, result); return result; } /** * Returns a URI of the root of this FileSystem. */ @Override public URI getUri() { return fsRoot.toUri(); } /** * The default port is listed as -1 as an indication that ports are not used. */ @Override protected int getDefaultPort() { int result = -1; LOG.trace("getDefaultPort(): {}", result); return result; } @Override public boolean hasPathCapability(final Path path, final String capability) { checkNotNull(path, "path must not be null"); checkArgument(!isNullOrEmpty(capability), "capability must not be null or empty string for {}", path); switch (Ascii.toLowerCase(capability)) { case CommonPathCapabilities.FS_APPEND: case CommonPathCapabilities.FS_CONCAT: return true; default: return false; } } /** * Gets the current working directory. * * @return The current working directory. */ @Override public Path getWorkingDirectory() { LOG.trace("getWorkingDirectory(): {}", workingDirectory); return workingDirectory; } @Override public boolean mkdirs(final Path hadoopPath, final FsPermission permission) throws IOException { return runOperation( GcsStatistics.INVOCATION_MKDIRS, () -> { checkArgument(hadoopPath != null, "hadoopPath must not be null"); LOG.trace("mkdirs(hadoopPath: {}, permission: {}): true", hadoopPath, permission); checkOpen(); URI gcsPath = getGcsPath(hadoopPath); try { getGcsFs().mkdirs(gcsPath); } catch (java.nio.file.FileAlreadyExistsException faee) { // Need to convert to the Hadoop flavor of FileAlreadyExistsException. throw (FileAlreadyExistsException) new FileAlreadyExistsException( String.format( "mkdirs(hadoopPath: %s, permission: %s): failed", hadoopPath, permission)) .initCause(faee); } return true; }, String.format("mkdirs(%s)", hadoopPath)); } @Override public FileStatus getFileStatus(final Path path) throws IOException { return runOperation( GcsStatistics.INVOCATION_GET_FILE_STATUS, () -> { checkArgument(path != null, "path must not be null"); checkOpen(); URI gcsPath = getGcsPath(path); FileInfo fileInfo = getGcsFs().getFileInfo(gcsPath); if (!fileInfo.exists()) { throw new FileNotFoundException( String.format( "%s not found: %s", fileInfo.isDirectory() ? "Directory" : "File", path)); } String userName = getUgiUserName(); return getFileStatus(fileInfo, userName); }, String.format("getFileStatus(%s)", path)); } /** * Returns home directory of the current user. * * <p>Note: This directory is only used for Hadoop purposes. It is not the same as a user's OS * home directory. */ @Override public Path getHomeDirectory() { Path result = new Path(fsRoot, "user/" + System.getProperty("user.name")); LOG.trace("getHomeDirectory(): {}", result); return result; } /** * {@inheritDoc} * * <p>Returns the service if delegation tokens are configured, otherwise, null. */ @Override public String getCanonicalServiceName() { // TODO: Add delegation token support return null; } /** * Gets GCS FS instance. */ GoogleCloudStorageFileSystem getGcsFs() { return gcsFs; } /** * Assert that the FileSystem has been initialized and not close()d. */ private void checkOpen() throws IOException { if (isClosed) { throw new IOException("GoogleHadoopFileSystem has been closed or not initialized."); } } @Override public void close() throws IOException { LOG.trace("close()"); if (isClosed) { return; } super.close(); getGcsFs().close(); this.isClosed = true; } @Override public long getUsed() throws IOException { long result = super.getUsed(); LOG.trace("getUsed(): {}", result); return result; } @Override public void setWorkingDirectory(final Path hadoopPath) { checkArgument(hadoopPath != null, "hadoopPath must not be null"); URI gcsPath = UriPaths.toDirectory(getGcsPath(hadoopPath)); workingDirectory = getHadoopPath(gcsPath); LOG.trace("setWorkingDirectory(hadoopPath: {}): {}", hadoopPath, workingDirectory); } /** * Get the instrumentation's IOStatistics. * @return statistics */ @Override public IOStatistics getIOStatistics() { return instrumentation != null ? instrumentation.getIOStatistics() : null; } /** * Get the storage statistics of this filesystem. * @return the storage statistics */ @Override public GcsStorageStatistics getStorageStatistics() { return this.storageStatistics; } private static String getUgiUserName() throws IOException { UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); return ugi.getShortUserName(); } private FileStatus getFileStatus(FileInfo fileInfo, String userName) { checkNotNull(fileInfo, "fileInfo should not be null"); // GCS does not provide modification time. It only provides creation time. // It works for objects because they are immutable once created. FileStatus status = new FileStatus( fileInfo.getSize(), fileInfo.isDirectory(), REPLICATION_FACTOR_DEFAULT, defaultBlockSize, fileInfo.getModificationTime(), fileInfo.getModificationTime(), reportedPermissions, userName, userName, getHadoopPath(fileInfo.getPath())); LOG.trace("FileStatus(path: {}, userName: {}): {}", fileInfo.getPath(), userName, status); return status; } }
of
java
spring-projects__spring-boot
loader/spring-boot-loader-tools/src/main/java/org/springframework/boot/loader/tools/MainClassFinder.java
{ "start": 12811, "end": 13879 }
class ____ annotated with the annotations with the given * {@code annotationNames}. * @param name the name of the class * @param annotationNames the names of the annotations on the class */ MainClass(String name, Set<String> annotationNames) { this.name = name; this.annotationNames = Collections.unmodifiableSet(new HashSet<>(annotationNames)); } String getName() { return this.name; } Set<String> getAnnotationNames() { return this.annotationNames; } @Override public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } MainClass other = (MainClass) obj; return this.name.equals(other.name); } @Override public int hashCode() { return this.name.hashCode(); } @Override public String toString() { return this.name; } } /** * Find a single main class, throwing an {@link IllegalStateException} if multiple * candidates exist. */ private static final
is
java
micronaut-projects__micronaut-core
core/src/main/java/io/micronaut/core/annotation/AnnotationValueProvider.java
{ "start": 844, "end": 1021 }
interface ____<A extends Annotation> { /** * @return The annotation value. Never null. */ @NonNull AnnotationValue<A> annotationValue(); }
AnnotationValueProvider
java
apache__kafka
tools/src/main/java/org/apache/kafka/tools/TopicCommand.java
{ "start": 16777, "end": 18966 }
class ____ { private final TopicCommandOptions opts; private final Set<Integer> liveBrokers; private final boolean describeConfigs; private final boolean describePartitions; public DescribeOptions(TopicCommandOptions opts, Set<Integer> liveBrokers) { this.opts = opts; this.liveBrokers = liveBrokers; this.describeConfigs = !opts.reportUnavailablePartitions() && !opts.reportUnderReplicatedPartitions() && !opts.reportUnderMinIsrPartitions() && !opts.reportAtMinIsrPartitions(); this.describePartitions = !opts.reportOverriddenConfigs(); } private boolean shouldPrintUnderReplicatedPartitions(PartitionDescription partitionDescription) { return opts.reportUnderReplicatedPartitions() && partitionDescription.isUnderReplicated(); } private boolean shouldPrintUnavailablePartitions(PartitionDescription partitionDescription) { return opts.reportUnavailablePartitions() && partitionDescription.hasUnavailablePartitions(liveBrokers); } private boolean shouldPrintUnderMinIsrPartitions(PartitionDescription partitionDescription) { return opts.reportUnderMinIsrPartitions() && partitionDescription.isUnderMinIsr(); } private boolean shouldPrintAtMinIsrPartitions(PartitionDescription partitionDescription) { return opts.reportAtMinIsrPartitions() && partitionDescription.isAtMinIsrPartitions(); } private boolean shouldPrintTopicPartition(PartitionDescription partitionDesc) { return describeConfigs || shouldPrintUnderReplicatedPartitions(partitionDesc) || shouldPrintUnavailablePartitions(partitionDesc) || shouldPrintUnderMinIsrPartitions(partitionDesc) || shouldPrintAtMinIsrPartitions(partitionDesc); } public void maybePrintPartitionDescription(PartitionDescription desc) { if (shouldPrintTopicPartition(desc)) { desc.printDescription(); } } } public static
DescribeOptions
java
google__guava
android/guava-tests/test/com/google/common/collect/ArrayListMultimapTest.java
{ "start": 1717, "end": 6751 }
class ____ extends TestCase { @GwtIncompatible // suite @J2ktIncompatible @AndroidIncompatible // test-suite builders public static Test suite() { TestSuite suite = new TestSuite(); suite.addTest( ListMultimapTestSuiteBuilder.using( new TestStringListMultimapGenerator() { @Override protected ListMultimap<String, String> create(Entry<String, String>[] entries) { ListMultimap<String, String> multimap = ArrayListMultimap.create(); for (Entry<String, String> entry : entries) { multimap.put(entry.getKey(), entry.getValue()); } return multimap; } }) .named("ArrayListMultimap") .withFeatures( MapFeature.ALLOWS_NULL_KEYS, MapFeature.ALLOWS_NULL_VALUES, MapFeature.ALLOWS_ANY_NULL_QUERIES, MapFeature.GENERAL_PURPOSE, MapFeature.FAILS_FAST_ON_CONCURRENT_MODIFICATION, CollectionFeature.SUPPORTS_ITERATOR_REMOVE, CollectionFeature.SERIALIZABLE, CollectionSize.ANY) .createTestSuite()); suite.addTestSuite(ArrayListMultimapTest.class); return suite; } protected ListMultimap<String, Integer> create() { return ArrayListMultimap.create(); } /** Confirm that get() returns a List implementing RandomAccess. */ public void testGetRandomAccess() { Multimap<String, Integer> multimap = create(); multimap.put("foo", 1); multimap.put("foo", 3); assertTrue(multimap.get("foo") instanceof RandomAccess); assertTrue(multimap.get("bar") instanceof RandomAccess); } /** Confirm that removeAll() returns a List implementing RandomAccess. */ public void testRemoveAllRandomAccess() { Multimap<String, Integer> multimap = create(); multimap.put("foo", 1); multimap.put("foo", 3); assertTrue(multimap.removeAll("foo") instanceof RandomAccess); assertTrue(multimap.removeAll("bar") instanceof RandomAccess); } /** Confirm that replaceValues() returns a List implementing RandomAccess. */ public void testReplaceValuesRandomAccess() { Multimap<String, Integer> multimap = create(); multimap.put("foo", 1); multimap.put("foo", 3); assertTrue(multimap.replaceValues("foo", asList(2, 4)) instanceof RandomAccess); assertTrue(multimap.replaceValues("bar", asList(2, 4)) instanceof RandomAccess); } /** Test throwing ConcurrentModificationException when a sublist's ancestor's delegate changes. */ public void testSublistConcurrentModificationException() { ListMultimap<String, Integer> multimap = create(); multimap.putAll("foo", asList(1, 2, 3, 4, 5)); List<Integer> list = multimap.get("foo"); assertThat(multimap.get("foo")).containsExactly(1, 2, 3, 4, 5).inOrder(); List<Integer> sublist = list.subList(0, 5); assertThat(sublist).containsExactly(1, 2, 3, 4, 5).inOrder(); sublist.clear(); assertTrue(sublist.isEmpty()); multimap.put("foo", 6); assertThrows(ConcurrentModificationException.class, () -> sublist.isEmpty()); } public void testCreateFromMultimap() { Multimap<String, Integer> multimap = create(); multimap.put("foo", 1); multimap.put("foo", 3); multimap.put("bar", 2); ArrayListMultimap<String, Integer> copy = ArrayListMultimap.create(multimap); assertEquals(multimap, copy); } public void testCreate() { ArrayListMultimap<String, Integer> multimap = ArrayListMultimap.create(); assertEquals(3, multimap.expectedValuesPerKey); } public void testCreateFromSizes() { ArrayListMultimap<String, Integer> multimap = ArrayListMultimap.create(15, 20); assertEquals(20, multimap.expectedValuesPerKey); } public void testCreateFromIllegalSizes() { assertThrows(IllegalArgumentException.class, () -> ArrayListMultimap.create(15, -2)); assertThrows(IllegalArgumentException.class, () -> ArrayListMultimap.create(-15, 2)); } public void testCreateFromHashMultimap() { Multimap<String, Integer> original = HashMultimap.create(); ArrayListMultimap<String, Integer> multimap = ArrayListMultimap.create(original); assertEquals(3, multimap.expectedValuesPerKey); } public void testCreateFromArrayListMultimap() { ArrayListMultimap<String, Integer> original = ArrayListMultimap.create(15, 20); ArrayListMultimap<String, Integer> multimap = ArrayListMultimap.create(original); assertEquals(20, multimap.expectedValuesPerKey); } public void testTrimToSize() { ArrayListMultimap<String, Integer> multimap = ArrayListMultimap.create(); multimap.put("foo", 1); multimap.put("foo", 2); multimap.put("bar", 3); multimap.trimToSize(); assertEquals(3, multimap.size()); assertThat(multimap.get("foo")).containsExactly(1, 2).inOrder(); assertThat(multimap.get("bar")).contains(3); } }
ArrayListMultimapTest
java
apache__kafka
streams/src/main/java/org/apache/kafka/streams/processor/internals/PunctuationSchedule.java
{ "start": 3607, "end": 3951 }
class ____ implements Cancellable { private PunctuationSchedule schedule; synchronized void setSchedule(final PunctuationSchedule schedule) { this.schedule = schedule; } @Override public synchronized void cancel() { schedule.markCancelled(); } } }
RepointableCancellable
java
spring-projects__spring-boot
module/spring-boot-kafka/src/test/java/org/springframework/boot/kafka/autoconfigure/KafkaAutoConfigurationTests.java
{ "start": 53697, "end": 53897 }
class ____ { @Bean RecordMessageConverter myMessageConverter() { return mock(RecordMessageConverter.class); } } @Configuration(proxyBeanMethods = false) static
MessageConverterConfiguration
java
apache__camel
components/camel-servlet/src/test/java/org/apache/camel/component/servlet/rest/RestServletBindingModeJsonTest.java
{ "start": 1282, "end": 3326 }
class ____ extends ServletCamelRouterTestSupport { @Test public void testBindingMode() throws Exception { MockEndpoint mock = getMockEndpoint("mock:input"); mock.expectedMessageCount(1); mock.message(0).body().isInstanceOf(UserJaxbPojo.class); String body = "{\"id\": 123, \"name\": \"Donald Duck\"}"; WebRequest req = new PostMethodWebRequest( contextUrl + "/services/users/new", new ByteArrayInputStream(body.getBytes()), "application/json"); WebResponse response = query(req, false); assertEquals(200, response.getResponseCode()); MockEndpoint.assertIsSatisfied(context); UserJaxbPojo user = mock.getReceivedExchanges().get(0).getIn().getBody(UserJaxbPojo.class); assertNotNull(user); assertEquals(123, user.getId()); assertEquals("Donald Duck", user.getName()); } @Test public void testBindingModeWrong() throws Exception { MockEndpoint mock = getMockEndpoint("mock:input"); mock.expectedMessageCount(0); String body = "<user name=\"Donald Duck\" id=\"123\"></user>"; WebRequest req = new PostMethodWebRequest( contextUrl + "/services/users/new", new ByteArrayInputStream(body.getBytes()), "application/xml"); WebResponse response = query(req, false); assertEquals(500, response.getResponseCode()); MockEndpoint.assertIsSatisfied(context); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { restConfiguration().component("servlet").bindingMode(RestBindingMode.json); // use the rest DSL to define the rest services rest("/users/") .post("new").type(UserJaxbPojo.class) .to("mock:input"); } }; } }
RestServletBindingModeJsonTest
java
elastic__elasticsearch
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RejectableTask.java
{ "start": 319, "end": 408 }
interface ____ extends InferenceRequest { void onRejection(Exception e); }
RejectableTask
java
netty__netty
resolver-dns/src/main/java/io/netty/resolver/dns/RoundRobinDnsAddressResolverGroup.java
{ "start": 1367, "end": 2779 }
class ____ extends DnsAddressResolverGroup { public RoundRobinDnsAddressResolverGroup(DnsNameResolverBuilder dnsResolverBuilder) { super(dnsResolverBuilder); } public RoundRobinDnsAddressResolverGroup( Class<? extends DatagramChannel> channelType, DnsServerAddressStreamProvider nameServerProvider) { super(channelType, nameServerProvider); } public RoundRobinDnsAddressResolverGroup( ChannelFactory<? extends DatagramChannel> channelFactory, DnsServerAddressStreamProvider nameServerProvider) { super(channelFactory, nameServerProvider); } /** * We need to override this method, not * {@link #newNameResolver(EventLoop, ChannelFactory, DnsServerAddressStreamProvider)}, * because we need to eliminate possible caching of {@link io.netty.resolver.NameResolver#resolve} * by {@link InflightNameResolver} created in * {@link #newResolver(EventLoop, ChannelFactory, DnsServerAddressStreamProvider)}. */ @Override protected final AddressResolver<InetSocketAddress> newAddressResolver(EventLoop eventLoop, NameResolver<InetAddress> resolver) throws Exception { return new RoundRobinInetAddressResolver(eventLoop, resolver).asAddressResolver(); } }
RoundRobinDnsAddressResolverGroup
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
{ "start": 3157, "end": 3910 }
class ____<T> { String src; INodeLink<T> target; MountPoint(String srcPath, INodeLink<T> mountLink) { src = srcPath; target = mountLink; } /** * Returns the source of mount point. * @return The source */ public String getSource() { return this.src; } /** * Returns the target INode link. * @return The target INode link */ public INodeLink<T> getTarget() { return this.target; } } /** * Breaks file path into component names. * @param path * @return array of names component names */ static String[] breakIntoPathComponents(final String path) { return path == null ? null : path.split(Path.SEPARATOR); } /** * Internal
MountPoint
java
micronaut-projects__micronaut-core
inject/src/main/java/io/micronaut/context/DefaultBeanContext.java
{ "start": 161979, "end": 163296 }
class ____<T> { private final Argument<T> beanType; private final Qualifier<T> qualifier; private final boolean throwNonUnique; private final int hashCode; /** * A bean key for the given bean definition. * * @param argument The argument * @param qualifier The qualifier * @param throwNonUnique The throwNonUnique */ BeanCandidateKey(Argument<T> argument, Qualifier<T> qualifier, boolean throwNonUnique) { this.beanType = argument; this.qualifier = qualifier; this.hashCode = argument.typeHashCode(); this.throwNonUnique = throwNonUnique; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } BeanCandidateKey<?> beanKey = (BeanCandidateKey<?>) o; return beanType.equalsType(beanKey.beanType) && Objects.equals(qualifier, beanKey.qualifier) && throwNonUnique == beanKey.throwNonUnique; } @Override public int hashCode() { return hashCode; } } private final
BeanCandidateKey
java
micronaut-projects__micronaut-core
http-server-tck/src/main/java/io/micronaut/http/server/tck/tests/filter/RequestFilterCompletableFutureFutureProceedTest.java
{ "start": 2461, "end": 2626 }
class ____ { //end::clazz[] */ @Requires(property = "spec.name", value = SPEC_NAME) @ServerFilter(ServerFilter.MATCH_ALL_PATTERN) static
FooBarFilter
java
alibaba__fastjson
src/test/java/com/alibaba/fastjson/deserializer/issues3796/bean/ObjectT1.java
{ "start": 72, "end": 995 }
class ____ { private int a; private int b; private int c; private int d; private int e; private int f; private int g; private int h; private long i; public int getA() { return a; } public void setA(int a) { this.a = a; } public int getB() { return b; } public void setB(int b) { this.b = b; } public int getC() { return c; } public void setC(int c) { this.c = c; } public int getD() { return d; } public void setD(int d) { this.d = d; } public int getE() { return e; } public void setE(int e) { this.e = e; } public int getF() { return f; } public void setF(int f) { this.f = f; } public int getG() { return g; } public void setG(int g) { this.g = g; } public int getH() { return h; } public void setH(int h) { this.h = h; } public long getI() { return i; } public void setI(long i) { this.i = i; } }
ObjectT1
java
apache__flink
flink-test-utils-parent/flink-connector-test-utils/src/main/java/org/apache/flink/connector/testframe/environment/TestEnvironmentSettings.java
{ "start": 1191, "end": 1950 }
class ____ { private final List<URL> connectorJarPaths; @Nullable private final String savepointRestorePath; public static Builder builder() { return new Builder(); } private TestEnvironmentSettings( List<URL> connectorJarPaths, @Nullable String savepointRestorePath) { this.connectorJarPaths = connectorJarPaths; this.savepointRestorePath = savepointRestorePath; } /** List of connector JARs paths. */ public List<URL> getConnectorJarPaths() { return connectorJarPaths; } /** Path of savepoint that the job should recover from. */ @Nullable public String getSavepointRestorePath() { return savepointRestorePath; } /** Builder
TestEnvironmentSettings
java
apache__hadoop
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/Csvout.java
{ "start": 967, "end": 1172 }
class ____ write out rows to a CSV/TSV file. * It does not do any escaping of written text, so don't write entries * containing separators. * Quoting must be done external to this class. */ public final
to
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValueTests.java
{ "start": 781, "end": 3268 }
class ____ extends InternalAggregationTestCase<InternalBucketMetricValue> { @Override protected InternalBucketMetricValue createTestInstance(String name, Map<String, Object> metadata) { double value = frequently() ? randomDoubleBetween(-10000, 100000, true) : randomFrom(new Double[] { Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN }); String[] keys = new String[randomIntBetween(0, 5)]; for (int i = 0; i < keys.length; i++) { keys[i] = randomAlphaOfLength(10); } return new InternalBucketMetricValue(name, keys, value, randomNumericDocValueFormat(), metadata); } @Override public void testReduceRandom() { expectThrows(UnsupportedOperationException.class, () -> createTestInstance("name", null).getReducer(null, 0)); } @Override protected void assertReduced(InternalBucketMetricValue reduced, List<InternalBucketMetricValue> inputs) { // no test since reduce operation is unsupported } @Override protected InternalBucketMetricValue mutateInstance(InternalBucketMetricValue instance) { String name = instance.getName(); String[] keys = instance.keys(); double value = instance.value(); DocValueFormat formatter = instance.formatter(); Map<String, Object> metadata = instance.getMetadata(); switch (between(0, 3)) { case 0: name += randomAlphaOfLength(5); break; case 1: if (Double.isFinite(value)) { value += between(1, 100); } else { value = randomDoubleBetween(0, 100000, true); } break; case 2: keys = Arrays.copyOf(keys, keys.length + 1); keys[keys.length - 1] = randomAlphaOfLengthBetween(1, 20); break; case 3: if (metadata == null) { metadata = Maps.newMapWithExpectedSize(1); } else { metadata = new HashMap<>(instance.getMetadata()); } metadata.put(randomAlphaOfLength(15), randomInt()); break; default: throw new AssertionError("Illegal randomisation branch"); } return new InternalBucketMetricValue(name, keys, value, formatter, metadata); } }
InternalBucketMetricValueTests
java
quarkusio__quarkus
extensions/devui/deployment/src/main/java/io/quarkus/vertx/http/deployment/devmode/ArcDevProcessor.java
{ "start": 1076, "end": 5314 }
class ____ { private static final String BEAN_DEPENDENCIES = "io.quarkus.arc.beanDependencies"; @Record(ExecutionTime.RUNTIME_INIT) @BuildStep(onlyIf = IsDevelopment.class) void registerRoutes(ArcConfig arcConfig, ArcDevRecorder recorder, BuildProducer<RouteBuildItem> routes, BuildProducer<NotFoundPageDisplayableEndpointBuildItem> displayableEndpoints, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, ValidationPhaseBuildItem validationPhase, BuildProducer<ValidationErrorBuildItem> errors) { List<BeanInfo> removed = new ArrayList<>(); Collection<InterceptorInfo> removedInterceptors = validationPhase.getContext() .get(BuildExtension.Key.REMOVED_INTERCEPTORS); if (removedInterceptors != null) { removed.addAll(removedInterceptors); } Collection<DecoratorInfo> removedDecorators = validationPhase.getContext().get(BuildExtension.Key.REMOVED_DECORATORS); if (removedDecorators != null) { removed.addAll(removedDecorators); } List<String[]> removedInterceptorsDecorators; if (removed.isEmpty()) { removedInterceptorsDecorators = Collections.emptyList(); } else { removedInterceptorsDecorators = new ArrayList<>(); for (BeanInfo r : removed) { removedInterceptorsDecorators.add(new String[] { r.isInterceptor() ? InjectableBean.Kind.INTERCEPTOR.toString() : InjectableBean.Kind.DECORATOR.toString(), r.getImplClazz().name().toString() }); } } String basePath = "arc"; String beansPath = basePath + "/beans"; String removedBeansPath = basePath + "/removed-beans"; String observersPath = basePath + "/observers"; routes.produce(nonApplicationRootPathBuildItem.routeBuilder() .route(basePath) .displayOnNotFoundPage("CDI Overview") .handler(recorder.createSummaryHandler(getConfigProperties(arcConfig), nonApplicationRootPathBuildItem.getNonApplicationRootPath(), removedInterceptorsDecorators.size())) .build()); routes.produce(nonApplicationRootPathBuildItem.routeBuilder() .route(beansPath) .displayOnNotFoundPage("Active CDI Beans") .handler(recorder.createBeansHandler(BEAN_DEPENDENCIES)).build()); routes.produce(nonApplicationRootPathBuildItem.routeBuilder() .route(removedBeansPath) .displayOnNotFoundPage("Removed CDI Beans") .handler(recorder.createRemovedBeansHandler(removedInterceptorsDecorators)).build()); routes.produce(nonApplicationRootPathBuildItem.routeBuilder() .route(observersPath) .displayOnNotFoundPage("Active CDI Observers") .handler(recorder.createObserversHandler()).build()); } // Note that we can't turn ArcConfig into BUILD_AND_RUN_TIME_FIXED because it's referencing IndexDependencyConfig // And we can't split the config due to compatibility reasons private Map<String, String> getConfigProperties(ArcConfig arcConfig) { Map<String, String> props = new HashMap<>(); props.put("quarkus.arc.remove-unused-beans", arcConfig.removeUnusedBeans()); props.put("quarkus.arc.unremovable-types", arcConfig.unremovableTypes().map(Object::toString).orElse("")); props.put("quarkus.arc.detect-unused-false-positives", "" + arcConfig.detectUnusedFalsePositives()); props.put("quarkus.arc.transform-unproxyable-classes", "" + arcConfig.transformUnproxyableClasses()); props.put("quarkus.arc.auto-inject-fields", "" + arcConfig.autoInjectFields()); props.put("quarkus.arc.auto-producer-methods", "" + arcConfig.autoProducerMethods()); props.put("quarkus.arc.selected-alternatives", "" + arcConfig.selectedAlternatives().map(Object::toString).orElse("")); props.put("quarkus.arc.exclude-types", "" + arcConfig.excludeTypes().map(Object::toString).orElse("")); return props; } }
ArcDevProcessor
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
{ "start": 88146, "end": 93403 }
class ____ extends BaseTransition { @Override public void transition(RMAppAttemptImpl appAttempt, RMAppAttemptEvent event) { if (appAttempt.targetedFinalState.equals(RMAppAttemptState.FAILED) || appAttempt.targetedFinalState.equals(RMAppAttemptState.KILLED)) { // ignore Container_Finished Event if we were supposed to reach // FAILED/KILLED state. return; } // pass in the earlier AMUnregistered Event also, as this is needed for // AMFinishedAfterFinalSavingTransition later on appAttempt.rememberTargetTransitions(event, new AMFinishedAfterFinalSavingTransition( appAttempt.eventCausingFinalSaving), RMAppAttemptState.FINISHED); } } @Override public long getStartTime() { this.readLock.lock(); try { return this.startTime; } finally { this.readLock.unlock(); } } @Override public RMAppAttemptState getState() { this.readLock.lock(); try { return this.stateMachine.getCurrentState(); } finally { this.readLock.unlock(); } } @Override public RMAppAttemptState getPreviousState() { this.readLock.lock(); try { return this.stateMachine.getPreviousState(); } finally { this.readLock.unlock(); } } @Override public YarnApplicationAttemptState createApplicationAttemptState() { return RMServerUtils.convertRmAppAttemptStateToYarnApplicationAttemptState( getState(), stateBeforeFinalSaving ); } private void launchAttempt(){ launchAMStartTime = System.currentTimeMillis(); // Send event to launch the AM Container eventHandler.handle(new AMLauncherEvent(AMLauncherEventType.LAUNCH, this)); } private void attemptLaunched() { // Register with AMLivelinessMonitor rmContext.getAMLivelinessMonitor().register(getAppAttemptId()); } private void storeAttempt() { // store attempt data in a non-blocking manner to prevent dispatcher // thread starvation and wait for state to be saved LOG.info("Storing attempt: AppId: " + getAppAttemptId().getApplicationId() + " AttemptId: " + getAppAttemptId() + " MasterContainer: " + masterContainer); rmContext.getStateStore().storeNewApplicationAttempt(this); } private void removeCredentials(RMAppAttemptImpl appAttempt) { // Unregister from the ClientToAMTokenSecretManager if (UserGroupInformation.isSecurityEnabled()) { appAttempt.rmContext.getClientToAMTokenSecretManager() .unRegisterApplication(appAttempt.getAppAttemptId()); } // Remove the AppAttempt from the AMRMTokenSecretManager appAttempt.rmContext.getAMRMTokenSecretManager() .applicationMasterFinished(appAttempt.getAppAttemptId()); } private static String sanitizeTrackingUrl(String url) { return (url == null || url.trim().isEmpty()) ? "N/A" : url; } @Override public ApplicationAttemptReport createApplicationAttemptReport() { this.readLock.lock(); ApplicationAttemptReport attemptReport = null; try { // AM container maybe not yet allocated. and also unmangedAM doesn't have // am container. ContainerId amId = masterContainer == null ? null : masterContainer.getId(); attemptReport = ApplicationAttemptReport.newInstance( this.getAppAttemptId(), this.getHost(), this.getRpcPort(), this.getTrackingUrl(), this.getOriginalTrackingUrl(), this.getDiagnostics(), createApplicationAttemptState(), amId, this.startTime, this.finishTime); } finally { this.readLock.unlock(); } return attemptReport; } @Override public RMAppAttemptMetrics getRMAppAttemptMetrics() { // didn't use read/write lock here because RMAppAttemptMetrics has its own // lock return attemptMetrics; } @Override public long getFinishTime() { this.readLock.lock(); try { return this.finishTime; } finally { this.readLock.unlock(); } } private void setFinishTime(long finishTime) { this.writeLock.lock(); try { this.finishTime = finishTime; } finally { this.writeLock.unlock(); } } @Override public void updateAMLaunchDiagnostics(String amLaunchDiagnostics) { this.amLaunchDiagnostics = amLaunchDiagnostics; } public RMAppAttemptState getRecoveredFinalState() { return recoveredFinalState; } public void setRecoveredFinalState(RMAppAttemptState finalState) { this.recoveredFinalState = finalState; } @Override public Set<String> getBlacklistedNodes() { if (scheduler instanceof AbstractYarnScheduler) { AbstractYarnScheduler ayScheduler = (AbstractYarnScheduler) scheduler; SchedulerApplicationAttempt attempt = ayScheduler.getApplicationAttempt(applicationAttemptId); if (attempt != null) { return attempt.getBlacklistedNodes(); } } return Collections.emptySet(); } protected void onInvalidTranstion(RMAppAttemptEventType rmAppAttemptEventType, RMAppAttemptState state){ /* TODO fail the application on the failed transition */ } }
AMExpiredAtFinalSavingTransition
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MapReduceTestUtil.java
{ "start": 8120, "end": 8393 }
class ____ implements WritableComparable<Object> { public void write(DataOutput out) { } public void readFields(DataInput in) { } public int compareTo(Object o) { throw new RuntimeException("Should never see this."); } } public static
IncomparableKey
java
apache__camel
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/dto/RecordTypeInfo.java
{ "start": 866, "end": 1932 }
class ____ extends AbstractDTOBase { private String name; private Boolean available; private String recordTypeId; private Boolean defaultRecordTypeMapping; private InfoUrls urls; public String getName() { return name; } public void setName(String name) { this.name = name; } public Boolean isAvailable() { return available; } public void setAvailable(Boolean available) { this.available = available; } public String getRecordTypeId() { return recordTypeId; } public void setRecordTypeId(String recordTypeId) { this.recordTypeId = recordTypeId; } public Boolean isDefaultRecordTypeMapping() { return defaultRecordTypeMapping; } public void setDefaultRecordTypeMapping(Boolean defaultRecordTypeMapping) { this.defaultRecordTypeMapping = defaultRecordTypeMapping; } public InfoUrls getUrls() { return urls; } public void setUrls(InfoUrls urls) { this.urls = urls; } }
RecordTypeInfo
java
apache__camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/StaticEndpointBuilders.java
{ "start": 385710, "end": 386610 }
interface ____ which to listen for Lumberjack * * Path parameter: port * Network port on which to listen for Lumberjack * Default value: 5044 * * @param componentName to use a custom component name for the endpoint * instead of the default name * @param path host:port * @return the dsl builder */ public static LumberjackEndpointBuilderFactory.LumberjackEndpointBuilder lumberjack(String componentName, String path) { return LumberjackEndpointBuilderFactory.endpointBuilder(componentName, path); } /** * MapStruct (camel-mapstruct) * Type Conversion using Mapstruct * * Category: transformation * Since: 3.19 * Maven coordinates: org.apache.camel:camel-mapstruct * * Syntax: <code>mapstruct:className</code> * * Path parameter: className (required) * The fully qualified
on
java
dropwizard__dropwizard
dropwizard-benchmarks/src/main/java/io/dropwizard/benchmarks/jersey/DropwizardResourceConfigBenchmark.java
{ "start": 4349, "end": 4923 }
class ____ { @POST public String insert(String cluster) { return "code"; } @GET @Path("{code}") public String get(@PathParam("code") String code) { return "cluster_by_code"; } @GET public List<String> getAll() { return Arrays.asList("first_cluster", "second_cluster", "third_cluster"); } @DELETE @Path("{code}") public void delete(@PathParam("code") String code) { // stub implementation } } }
ClustersResource
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/nullness/UnnecessaryCheckNotNullTest.java
{ "start": 20126, "end": 21587 }
class ____ { private final ImmutableMap.Builder<String, Match> builder = ImmutableMap.builder(); @CanIgnoreReturnValue Builder add(String expression, String... expected) { builder.put(expression, new Match(expected)); return this; } TestScanner build() { return new TestScanner(builder.buildOrThrow()); } } private final ImmutableMap<String, Match> matches; private TestScanner(ImmutableMap<String, Match> matches) { this.matches = matches; } @Override public Void visitExpressionStatement(ExpressionStatementTree node, VisitorState state) { ExpressionTree expression = node.getExpression(); Match match = matches.get(expression.toString()); if (match != null) { assertMatch(expression, match.expected); match.found = true; } return super.visitExpressionStatement(node, state); } private static void assertMatch(ExpressionTree node, List<String> expected) { List<IdentifierTree> uses = UnnecessaryCheckNotNull.getVariableUses(node); assertWithMessage("variables used in %s", node) .that(Lists.transform(uses, Functions.toStringFunction())) .isEqualTo(expected); } void assertFoundAll() { for (Map.Entry<String, Match> entry : matches.entrySet()) { assertWithMessage("found %s", entry.getKey()).that(entry.getValue().found).isTrue(); } } } }
Builder
java
spring-projects__spring-security
ldap/src/main/java/org/springframework/security/ldap/jackson/LdapAuthorityMixin.java
{ "start": 1436, "end": 1641 }
class ____ { @JsonCreator LdapAuthorityMixin(@JsonProperty("role") String role, @JsonProperty("dn") String dn, @JsonProperty("attributes") Map<String, List<String>> attributes) { } }
LdapAuthorityMixin
java
spring-projects__spring-boot
smoke-test/spring-boot-smoke-test-actuator-extension/src/main/java/smoketest/actuator/extension/MyExtensionWebMvcEndpointHandlerMapping.java
{ "start": 1478, "end": 2312 }
class ____ extends AbstractWebMvcEndpointHandlerMapping { private static final String PATH = "/myextension"; private final EndpointLinksResolver linksResolver; MyExtensionWebMvcEndpointHandlerMapping(Collection<ExposableWebEndpoint> endpoints, EndpointMediaTypes endpointMediaTypes, @Nullable CorsConfiguration corsConfiguration) { super(new EndpointMapping(PATH), endpoints, endpointMediaTypes, corsConfiguration, true); this.linksResolver = new EndpointLinksResolver(endpoints, PATH); setOrder(-100); } @Override protected LinksHandler getLinksHandler() { return new WebMvcLinksHandler(); } @Override protected void extendInterceptors(List<Object> interceptors) { super.extendInterceptors(interceptors); interceptors.add(0, new MyExtensionSecurityInterceptor()); }
MyExtensionWebMvcEndpointHandlerMapping
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageActionTests.java
{ "start": 18904, "end": 20878 }
class ____ extends TransportService { private final Queue<CapturingRequest> capturedRequests = ConcurrentCollections.newQueue(); private final Consumer<CapturingRequest> onRequestSent; TestTransportService(ThreadPool threadPool, Consumer<CapturingRequest> onRequestSent) { super( Settings.EMPTY, new MockTransport(), threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, addr -> DiscoveryNodeUtils.builder("node_0").roles(emptySet()).build(), null, Collections.emptySet() ); this.onRequestSent = onRequestSent; } @Override @SuppressWarnings("unchecked") public <T extends TransportResponse> void sendRequest( DiscoveryNode node, String action, TransportRequest request, TransportResponseHandler<T> handler ) { CapturingRequest capturingRequest = new CapturingRequest( node, (AnalyzeDiskUsageShardRequest) request, (TransportResponseHandler<AnalyzeDiskUsageShardResponse>) handler ); capturedRequests.add(capturingRequest); onRequestSent.accept(capturingRequest); } List<CapturingRequest> getCapturedRequests(boolean clear) { final List<CapturingRequest> requests = new ArrayList<>(capturedRequests); if (clear) { capturedRequests.clear(); } return requests; } Map<DiscoveryNode, Integer> getRequestsSentPerNode() { Map<DiscoveryNode, Integer> sentRequests = new HashMap<>(); for (CapturingRequest r : getCapturedRequests(false)) { sentRequests.compute(r.node, (k, v) -> v == null ? 1 : v + 1); } return sentRequests; } } }
TestTransportService
java
junit-team__junit5
platform-tests/src/test/java/org/junit/platform/commons/util/AnnotationUtilsTests.java
{ "start": 26738, "end": 26879 }
class ____ extends NonInheritedCompositionOfInheritedAnnotationClass { } @Annotation1
SubNonInheritedCompositionOfInheritedAnnotationClass
java
apache__dubbo
dubbo-common/src/main/java/org/apache/dubbo/common/utils/Stack.java
{ "start": 957, "end": 3261 }
class ____<E> { private int mSize = 0; private final List<E> mElements = new ArrayList<>(); public Stack() {} /** * push. * * @param ele */ public void push(E ele) { if (mElements.size() > mSize) { mElements.set(mSize, ele); } else { mElements.add(ele); } mSize++; } /** * pop. * * @return the last element. */ public E pop() { if (mSize == 0) { throw new EmptyStackException(); } return mElements.set(--mSize, null); } /** * peek. * * @return the last element. */ public E peek() { if (mSize == 0) { throw new EmptyStackException(); } return mElements.get(mSize - 1); } /** * get. * * @param index index. * @return element. */ public E get(int index) { if (index >= mSize || index + mSize < 0) { throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + mSize); } return index < 0 ? mElements.get(index + mSize) : mElements.get(index); } /** * set. * * @param index index. * @param value element. * @return old element. */ public E set(int index, E value) { if (index >= mSize || index + mSize < 0) { throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + mSize); } return mElements.set(index < 0 ? index + mSize : index, value); } /** * remove. * * @param index * @return element */ public E remove(int index) { if (index >= mSize || index + mSize < 0) { throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + mSize); } E ret = mElements.remove(index < 0 ? index + mSize : index); mSize--; return ret; } /** * get stack size. * * @return size. */ public int size() { return mSize; } /** * is empty. * * @return empty or not. */ public boolean isEmpty() { return mSize == 0; } /** * clear stack. */ public void clear() { mSize = 0; mElements.clear(); } }
Stack
java
apache__commons-lang
src/test/java/org/apache/commons/lang3/function/FailableTest.java
{ "start": 98204, "end": 98731 }
interface ____ properly defined to throw any exception using the top level generic types * Object and Throwable. */ @Test void testThrows_FailableLongToDoubleFunction_Throwable() { assertThrows(IOException.class, () -> new FailableLongToDoubleFunction<Throwable>() { @Override public double applyAsDouble(final long value) throws Throwable { throw new IOException("test"); } }.applyAsDouble(0)); } /** * Tests that our failable
is
java
elastic__elasticsearch
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java
{ "start": 104657, "end": 108284 }
class ____ { final String principal; final String realm; final String action; final Supplier<Stream<String>> roles; final Supplier<Stream<String>> indices; // empty is used for events can be filtered out only by the lack of a field static final AuditEventMetaInfo EMPTY = new AuditEventMetaInfo( Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty() ); /** * If a field is missing for an event, its value for filtering purposes is the * empty string or a singleton stream of the empty string. This a allows a * policy to filter by the missing value using the empty string, ie * `ignore_filters.users: ["", "elastic"]` will filter events with a missing * user field (such as `anonymous_access_denied`) as well as events from the * "elastic" username. */ AuditEventMetaInfo( Optional<User> user, Optional<String> realm, Optional<AuthorizationInfo> authorizationInfo, Optional<String[]> indices, Optional<String> action ) { this.principal = user.map(u -> u.principal()).orElse(""); this.realm = realm.orElse(""); this.action = action.orElse(""); // Supplier indirection and lazy generation of Streams serves 2 purposes: // 1. streams might not get generated due to short circuiting logical // conditions on the `principal` and `realm` fields // 2. reusability of the AuditEventMetaInfo instance: in this case Streams have // to be regenerated as they cannot be operated upon twice this.roles = () -> authorizationInfo.filter(info -> { final Object value = info.asMap().get("user.roles"); return value instanceof String[] && ((String[]) value).length != 0 && Arrays.stream((String[]) value).anyMatch(Objects::nonNull); }).map(info -> Arrays.stream((String[]) info.asMap().get("user.roles"))).orElse(Stream.of("")); this.indices = () -> indices.filter(i -> i.length > 0) .filter(a -> Arrays.stream(a).anyMatch(Objects::nonNull)) .map(Arrays::stream) .orElse(Stream.of("")); } AuditEventMetaInfo( Optional<AuthenticationToken> authenticationToken, Optional<String> realm, Optional<String[]> indices, Optional<String> action ) { this.principal = authenticationToken.map(u -> u.principal()).orElse(""); this.realm = realm.orElse(""); this.action = action.orElse(""); this.roles = () -> Stream.of(""); this.indices = () -> indices.filter(r -> r.length != 0).map(i -> Arrays.stream(i)).orElse(Stream.of("")); } } @Override public void clusterChanged(ClusterChangedEvent event) { updateLocalNodeInfo(event.state().getNodes().getLocalNode()); } void updateLocalNodeInfo(DiscoveryNode newLocalNode) { // check if local node changed final EntryCommonFields localNodeInfo = this.entryCommonFields; if (localNodeInfo.localNode == null || localNodeInfo.localNode.equals(newLocalNode) == false) { // no need to synchronize, called only from the cluster state applier thread this.entryCommonFields = this.entryCommonFields.withNewLocalNode(newLocalNode); } } static
AuditEventMetaInfo
java
micronaut-projects__micronaut-core
benchmarks/src/jmh/java/io/micronaut/supplier/SupplierBenchmark.java
{ "start": 465, "end": 2833 }
class ____ { private Supplier<String> memoizedLambda = memoizedUsingLambda(() -> "test"); private Supplier<String> memoizedNonEmptyUsingLambda = memoizedNonEmptyUsingLambda(() -> "test"); private Supplier<String> memoized = SupplierUtil.memoized(() -> "test"); private Supplier<String> memoizedNonEmpty = SupplierUtil.memoizedNonEmpty(() -> "test"); @Benchmark public void memoizedLambda(Blackhole blackhole) { blackhole.consume(memoizedLambda.get()); } @Benchmark public void memoizedNonEmptyUsingLambda(Blackhole blackhole) { blackhole.consume(memoizedNonEmptyUsingLambda.get()); } @Benchmark public void memoized(Blackhole blackhole) { blackhole.consume(memoized.get()); } @Benchmark public void memoizedNonEmpty(Blackhole blackhole) { blackhole.consume(memoizedNonEmpty.get()); } private static <T> Supplier<T> memoizedUsingLambda(Supplier<T> actual) { return new Supplier<>() { Supplier<T> delegate = this::initialize; boolean initialized; @Override public T get() { return delegate.get(); } private synchronized T initialize() { if (!initialized) { T value = actual.get(); delegate = () -> value; initialized = true; } return delegate.get(); } }; } private static <T> Supplier<T> memoizedNonEmptyUsingLambda(Supplier<T> actual) { return new Supplier<>() { Supplier<T> delegate = this::initialize; boolean initialized; @Override public T get() { return delegate.get(); } private synchronized T initialize() { if (!initialized) { T value = actual.get(); if (value == null) { return null; } if (value instanceof Optional optional && !optional.isPresent()) { return value; } delegate = () -> value; initialized = true; } return delegate.get(); } }; } }
SupplierBenchmark
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/MemoryMappedBoundedDataTest.java
{ "start": 1016, "end": 1560 }
class ____ extends BoundedDataTestBase { @Override protected boolean isRegionBased() { return true; } @Override protected BoundedData createBoundedData(Path tempFilePath) throws IOException { return MemoryMappedBoundedData.create(tempFilePath); } @Override protected BoundedData createBoundedDataWithRegion(Path tempFilePath, int regionSize) throws IOException { return MemoryMappedBoundedData.createWithRegionSize(tempFilePath, regionSize); } }
MemoryMappedBoundedDataTest
java
quarkusio__quarkus
extensions/hibernate-search-backend-elasticsearch-common/runtime/src/main/java/io/quarkus/hibernate/search/backend/elasticsearch/common/runtime/HibernateSearchBackendElasticsearchRuntimeConfig.java
{ "start": 3012, "end": 4189 }
enum ____ { /** * Use clear-text HTTP, with SSL/TLS disabled. */ HTTP("http"), /** * Use HTTPS, with SSL/TLS enabled. */ HTTPS("https"); public static ElasticsearchClientProtocol of(String value) { return ParseUtils.parseDiscreteValues( values(), ElasticsearchClientProtocol::getHibernateSearchString, (invalidValue, validValues) -> new SearchException( String.format( Locale.ROOT, "Invalid protocol: '%1$s'. Valid protocols are: %2$s.", invalidValue, validValues)), value); } private final String hibernateSearchString; ElasticsearchClientProtocol(String hibernateSearchString) { this.hibernateSearchString = hibernateSearchString; } public String getHibernateSearchString() { return hibernateSearchString; } } @ConfigGroup
ElasticsearchClientProtocol
java
apache__kafka
server-common/src/main/java/org/apache/kafka/server/util/Scheduler.java
{ "start": 1129, "end": 2299 }
interface ____ { /** * Initialize this scheduler, so it is ready to accept scheduling of tasks */ void startup(); /** * Shutdown this scheduler. When this method is complete no more executions of background tasks will occur. * This includes tasks scheduled with a delayed execution. */ void shutdown() throws InterruptedException; default ScheduledFuture<?> scheduleOnce(String name, Runnable task) { return scheduleOnce(name, task, 0L); } default ScheduledFuture<?> scheduleOnce(String name, Runnable task, long delayMs) { return schedule(name, task, delayMs, -1); } /** * Schedule a task. * @param name The name of this task * @param task The task to run * @param delayMs The number of milliseconds to wait before the first execution * @param periodMs The period in milliseconds with which to execute the task. If &lt; 0 the task will execute only once. * @return A Future object to manage the task scheduled. */ ScheduledFuture<?> schedule(String name, Runnable task, long delayMs, long periodMs); void resizeThreadPool(int newSize); }
Scheduler
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/DefaultOperatorCoordinatorHandlerTest.java
{ "start": 2393, "end": 5151 }
class ____ { @RegisterExtension private static final TestExecutorExtension<ScheduledExecutorService> EXECUTOR_EXTENSION = TestingUtils.defaultExecutorExtension(); @Test void testRegisterAndStartNewCoordinators() throws Exception { final JobVertex[] jobVertices = createJobVertices(BLOCKING); OperatorID operatorId1 = OperatorID.fromJobVertexID(jobVertices[0].getID()); OperatorID operatorId2 = OperatorID.fromJobVertexID(jobVertices[1].getID()); ExecutionGraph executionGraph = createDynamicGraph(jobVertices); ExecutionJobVertex ejv1 = executionGraph.getJobVertex(jobVertices[0].getID()); ExecutionJobVertex ejv2 = executionGraph.getJobVertex(jobVertices[1].getID()); executionGraph.start(ComponentMainThreadExecutorServiceAdapter.forMainThread()); executionGraph.initializeJobVertex(ejv1, 0L); DefaultOperatorCoordinatorHandler handler = new DefaultOperatorCoordinatorHandler(executionGraph, throwable -> {}); assertThat(handler.getCoordinatorMap().keySet()).contains(operatorId1); executionGraph.initializeJobVertex(ejv2, 0L); handler.registerAndStartNewCoordinators( ejv2.getOperatorCoordinators(), executionGraph.getJobMasterMainThreadExecutor(), ejv2.getParallelism()); assertThat(handler.getCoordinatorMap().keySet()).contains(operatorId1, operatorId2); } private JobVertex[] createJobVertices(ResultPartitionType resultPartitionType) throws IOException { final JobVertex[] jobVertices = new JobVertex[2]; final int parallelism = 3; jobVertices[0] = createNoOpVertex(parallelism); jobVertices[1] = createNoOpVertex(parallelism); connectNewDataSetAsInput(jobVertices[1], jobVertices[0], ALL_TO_ALL, resultPartitionType); jobVertices[0].addOperatorCoordinator( new SerializedValue<>( new TestingOperatorCoordinator.Provider( OperatorID.fromJobVertexID(jobVertices[0].getID())))); jobVertices[1].addOperatorCoordinator( new SerializedValue<>( new TestingOperatorCoordinator.Provider( OperatorID.fromJobVertexID(jobVertices[1].getID())))); return jobVertices; } private DefaultExecutionGraph createDynamicGraph(JobVertex... jobVertices) throws Exception { return TestingDefaultExecutionGraphBuilder.newBuilder() .setJobGraph(new JobGraph(new JobID(), "TestJob", jobVertices)) .buildDynamicGraph(EXECUTOR_EXTENSION.getExecutor()); } }
DefaultOperatorCoordinatorHandlerTest
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInotifyEventInputStream.java
{ "start": 1694, "end": 8924 }
class ____ { public static final Logger LOG = LoggerFactory.getLogger( DFSInotifyEventInputStream.class); private final ClientProtocol namenode; private Iterator<EventBatch> it; private long lastReadTxid; /** * The most recent txid the NameNode told us it has sync'ed -- helps us * determine how far behind we are in the edit stream. */ private long syncTxid; /** * Used to generate wait times in {@link DFSInotifyEventInputStream#take()}. */ private Random rng = new Random(); private final Tracer tracer; private static final int INITIAL_WAIT_MS = 10; DFSInotifyEventInputStream(ClientProtocol namenode, Tracer tracer) throws IOException { // Only consider new transaction IDs. this(namenode, tracer, namenode.getCurrentEditLogTxid()); } DFSInotifyEventInputStream(ClientProtocol namenode, Tracer tracer, long lastReadTxid) { this.namenode = namenode; this.it = Collections.emptyIterator(); this.lastReadTxid = lastReadTxid; this.tracer = tracer; } /** * Returns the next batch of events in the stream or null if no new * batches are currently available. * * @throws IOException because of network error or edit log * corruption. Also possible if JournalNodes are unresponsive in the * QJM setting (even one unresponsive JournalNode is enough in rare cases), * so catching this exception and retrying at least a few times is * recommended. * @throws MissingEventsException if we cannot return the next batch in the * stream because the data for the events (and possibly some subsequent * events) has been deleted (generally because this stream is a very large * number of transactions behind the current state of the NameNode). It is * safe to continue reading from the stream after this exception is thrown * The next available batch of events will be returned. */ public EventBatch poll() throws IOException, MissingEventsException { try (TraceScope ignored = tracer.newScope("inotifyPoll")) { // need to keep retrying until the NN sends us the latest committed txid if (lastReadTxid == -1) { LOG.debug("poll(): lastReadTxid is -1, reading current txid from NN"); lastReadTxid = namenode.getCurrentEditLogTxid(); return null; } if (!it.hasNext()) { EventBatchList el = namenode.getEditsFromTxid(lastReadTxid + 1); if (el.getLastTxid() != -1) { // we only want to set syncTxid when we were actually able to read some // edits on the NN -- otherwise it will seem like edits are being // generated faster than we can read them when the problem is really // that we are temporarily unable to read edits syncTxid = el.getSyncTxid(); it = el.getBatches().iterator(); long formerLastReadTxid = lastReadTxid; lastReadTxid = el.getLastTxid(); if (el.getFirstTxid() != formerLastReadTxid + 1) { throw new MissingEventsException(formerLastReadTxid + 1, el.getFirstTxid()); } } else { LOG.debug("poll(): read no edits from the NN when requesting edits " + "after txid {}", lastReadTxid); return null; } } if (it.hasNext()) { // can be empty if el.getLastTxid != -1 but none of the // newly seen edit log ops actually got converted to events return it.next(); } else { return null; } } } /** * Return a estimate of how many transaction IDs behind the NameNode's * current state this stream is. Clients should periodically call this method * and check if its result is steadily increasing, which indicates that they * are falling behind (i.e. transaction are being generated faster than the * client is reading them). If a client falls too far behind events may be * deleted before the client can read them. * <p> * A return value of -1 indicates that an estimate could not be produced, and * should be ignored. The value returned by this method is really only useful * when compared to previous or subsequent returned values. */ public long getTxidsBehindEstimate() { if (syncTxid == 0) { return -1; } else { assert syncTxid >= lastReadTxid; // this gives the difference between the last txid we have fetched to the // client and syncTxid at the time we last fetched events from the // NameNode return syncTxid - lastReadTxid; } } /** * Returns the next event batch in the stream, waiting up to the specified * amount of time for a new batch. Returns null if one is not available at the * end of the specified amount of time. The time before the method returns may * exceed the specified amount of time by up to the time required for an RPC * to the NameNode. * * @param time number of units of the given TimeUnit to wait * @param tu the desired TimeUnit * @throws IOException see {@link DFSInotifyEventInputStream#poll()} * @throws MissingEventsException * see {@link DFSInotifyEventInputStream#poll()} * @throws InterruptedException if the calling thread is interrupted */ public EventBatch poll(long time, TimeUnit tu) throws IOException, InterruptedException, MissingEventsException { EventBatch next; try (TraceScope ignored = tracer.newScope("inotifyPollWithTimeout")) { long initialTime = Time.monotonicNow(); long totalWait = TimeUnit.MILLISECONDS.convert(time, tu); long nextWait = INITIAL_WAIT_MS; while ((next = poll()) == null) { long timeLeft = totalWait - (Time.monotonicNow() - initialTime); if (timeLeft <= 0) { LOG.debug("timed poll(): timed out"); break; } else if (timeLeft < nextWait * 2) { nextWait = timeLeft; } else { nextWait *= 2; } LOG.debug("timed poll(): poll() returned null, sleeping for {} ms", nextWait); Thread.sleep(nextWait); } } return next; } /** * Returns the next batch of events in the stream, waiting indefinitely if * a new batch is not immediately available. * * @throws IOException see {@link DFSInotifyEventInputStream#poll()} * @throws MissingEventsException see * {@link DFSInotifyEventInputStream#poll()} * @throws InterruptedException if the calling thread is interrupted */ public EventBatch take() throws IOException, InterruptedException, MissingEventsException { EventBatch next; try (TraceScope ignored = tracer.newScope("inotifyTake")) { int nextWaitMin = INITIAL_WAIT_MS; while ((next = poll()) == null) { // sleep for a random period between nextWaitMin and nextWaitMin * 2 // to avoid stampedes at the NN if there are multiple clients int sleepTime = nextWaitMin + rng.nextInt(nextWaitMin); LOG.debug("take(): poll() returned null, sleeping for {} ms", sleepTime); Thread.sleep(sleepTime); // the maximum sleep is 2 minutes nextWaitMin = Math.min(60000, nextWaitMin * 2); } } return next; } }
DFSInotifyEventInputStream
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/RouletteSelector.java
{ "start": 1435, "end": 2164 }
class ____ { private Random picker; RouletteSelector(Random rnd) { picker = rnd; } Operation select(List<OperationWeight> ops) { if (ops.isEmpty()) { return null; } double totalWeight = 0; for (OperationWeight w : ops) { if (w.getWeight() < 0) { throw new IllegalArgumentException("Negative weights not allowed"); } totalWeight += w.getWeight(); } // roulette wheel selection double sAm = picker.nextDouble() * totalWeight; int index = 0; for (int i = 0; i < ops.size(); ++i) { sAm -= ops.get(i).getWeight(); if (sAm <= 0) { index = i; break; } } return ops.get(index).getOperation(); } }
RouletteSelector
java
apache__camel
components/camel-graphql/src/generated/java/org/apache/camel/component/graphql/GraphqlEndpointUriFactory.java
{ "start": 517, "end": 2797 }
class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory { private static final String BASE = ":httpUri"; private static final Set<String> PROPERTY_NAMES; private static final Set<String> SECRET_PROPERTY_NAMES; private static final Map<String, String> MULTI_VALUE_PREFIXES; static { Set<String> props = new HashSet<>(16); props.add("accessToken"); props.add("headerFilterStrategy"); props.add("httpClient"); props.add("httpUri"); props.add("jwtAuthorizationType"); props.add("lazyStartProducer"); props.add("operationName"); props.add("password"); props.add("proxyHost"); props.add("query"); props.add("queryFile"); props.add("queryHeader"); props.add("throwExceptionOnFailure"); props.add("username"); props.add("variables"); props.add("variablesHeader"); PROPERTY_NAMES = Collections.unmodifiableSet(props); Set<String> secretProps = new HashSet<>(3); secretProps.add("accessToken"); secretProps.add("password"); secretProps.add("username"); SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps); MULTI_VALUE_PREFIXES = Collections.emptyMap(); } @Override public boolean isEnabled(String scheme) { return "graphql".equals(scheme); } @Override public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException { String syntax = scheme + BASE; String uri = syntax; Map<String, Object> copy = new HashMap<>(properties); uri = buildPathParameter(syntax, uri, "httpUri", null, true, copy); uri = buildQueryParameters(uri, copy, encode); return uri; } @Override public Set<String> propertyNames() { return PROPERTY_NAMES; } @Override public Set<String> secretPropertyNames() { return SECRET_PROPERTY_NAMES; } @Override public Map<String, String> multiValuePrefixes() { return MULTI_VALUE_PREFIXES; } @Override public boolean isLenientProperties() { return true; } }
GraphqlEndpointUriFactory
java
alibaba__druid
core/src/test/java/com/alibaba/druid/pvt/pool/Large10KTest.java
{ "start": 335, "end": 2251 }
class ____ extends TestCase { private DruidDataSource[] dataSources; private ScheduledExecutorService scheduler; protected void setUp() throws Exception { long xmx = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax() / (1000 * 1000); // m final int dataSourceCount; if (xmx <= 256) { dataSourceCount = 1024 * 1; } else if (xmx <= 512) { dataSourceCount = 1024 * 2; } else if (xmx <= 1024) { dataSourceCount = 1024 * 4; } else if (xmx <= 2048) { dataSourceCount = 1024 * 8; } else { dataSourceCount = 1024 * 16; } dataSources = new DruidDataSource[dataSourceCount]; scheduler = Executors.newScheduledThreadPool(10); for (int i = 0; i < dataSources.length; ++i) { DruidDataSource dataSource = new DruidDataSource(); dataSource.setUrl("jdbc:mock:xxx"); dataSource.setCreateScheduler(scheduler); dataSource.setDestroyScheduler(scheduler); dataSource.setTestOnBorrow(false); dataSource.setTestWhileIdle(false); dataSources[i] = dataSource; } } protected void tearDown() throws Exception { for (int i = 0; i < dataSources.length; ++i) { JdbcUtils.close(dataSources[i]); } scheduler.shutdown(); } public void test_large() throws Exception { Connection[] connections = new Connection[dataSources.length * 8]; for (int i = 0; i < dataSources.length; ++i) { for (int j = 0; j < 8; ++j) { connections[i * 8 + j] = dataSources[i].getConnection(); } } for (int i = 0; i < dataSources.length; ++i) { for (int j = 0; j < 8; ++j) { connections[i * 8 + j].close(); } } } }
Large10KTest
java
assertj__assertj-core
assertj-core/src/main/java/org/assertj/core/description/EmptyTextDescription.java
{ "start": 759, "end": 1018 }
class ____ extends TextDescription { private static final Description INSTANCE = new EmptyTextDescription(); public static Description emptyDescription() { return INSTANCE; } private EmptyTextDescription() { super(""); } }
EmptyTextDescription
java
google__truth
core/src/main/java/com/google/common/truth/Correspondence.java
{ "start": 3625, "end": 4825 }
class ____<A extends @Nullable Object, E extends @Nullable Object> { /** * Constructs a {@link Correspondence} that compares actual and expected elements using the given * binary predicate. * * <p>The correspondence does not support formatting of diffs (see {@link #formatDiff}). You can * add that behaviour by calling {@link Correspondence#formattingDiffsUsing}. * * <p>Note that, if the data you are asserting about contains nulls, your predicate may be invoked * with null arguments. If this causes it to throw a {@link NullPointerException}, then your test * will fail. (See {@link Correspondence#compare} for more detail on how exceptions are handled.) * In particular, if your predicate is an instance method reference on the actual value (as in the * {@code String::contains} example below), your test will fail if it sees null actual values. * * <p>Example using an instance method reference: * * <pre>{@code * static final Correspondence<String, String> CONTAINS_SUBSTRING = * Correspondence.from(String::contains, "contains"); * }</pre> * * <p>Example using a static method reference: * * <pre>{@code *
Correspondence
java
assertj__assertj-core
assertj-core/src/main/java/org/assertj/core/error/ShouldBeRegularFile.java
{ "start": 817, "end": 1210 }
class ____ extends BasicErrorMessageFactory { private static final String SHOULD_BE_REGULAR_FILE = "%nExpecting path:%n %s%nto be a regular file."; public static ErrorMessageFactory shouldBeRegularFile(final Path actual) { return new ShouldBeRegularFile(actual); } private ShouldBeRegularFile(final Path actual) { super(SHOULD_BE_REGULAR_FILE, actual); } }
ShouldBeRegularFile
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/template/erroneous/SourceTargetMapperAmbiguous2.java
{ "start": 547, "end": 1667 }
interface ____ { SourceTargetMapperAmbiguous2 INSTANCE = Mappers.getMapper( SourceTargetMapperAmbiguous2.class ); @Mappings({ @Mapping(target = "stringPropY", source = "stringPropX" ), @Mapping(target = "integerPropY", source = "integerPropX" ), @Mapping(target = "nestedResultProp", source = "nestedSourceProp.nested"), @Mapping(target = "constantProp", constant = "constant"), @Mapping(target = "expressionProp", expression = "java(\"expression\")") }) Target forwardCreate(Source source); @Mappings({ @Mapping(target = "stringPropY", source = "stringPropX" ), @Mapping(target = "integerPropY", source = "integerPropX" ), @Mapping(target = "nestedResultProp", source = "nestedSourceProp.nested"), @Mapping(target = "constantProp", constant = "constant"), @Mapping(target = "expressionProp", expression = "java(\"expression\")") }) Target forwardCreate1(Source source); @InheritConfiguration( name = "blah" ) void forwardUpdate(Source source, @MappingTarget Target target); }
SourceTargetMapperAmbiguous2
java
eclipse-vertx__vert.x
vertx-core/src/main/generated/io/vertx/core/net/NetClientOptionsConverter.java
{ "start": 333, "end": 2561 }
class ____ { static void fromJson(Iterable<java.util.Map.Entry<String, Object>> json, NetClientOptions obj) { for (java.util.Map.Entry<String, Object> member : json) { switch (member.getKey()) { case "reconnectAttempts": if (member.getValue() instanceof Number) { obj.setReconnectAttempts(((Number)member.getValue()).intValue()); } break; case "reconnectInterval": if (member.getValue() instanceof Number) { obj.setReconnectInterval(((Number)member.getValue()).longValue()); } break; case "hostnameVerificationAlgorithm": if (member.getValue() instanceof String) { obj.setHostnameVerificationAlgorithm((String)member.getValue()); } break; case "applicationLayerProtocols": if (member.getValue() instanceof JsonArray) { java.util.ArrayList<java.lang.String> list = new java.util.ArrayList<>(); ((Iterable<Object>)member.getValue()).forEach( item -> { if (item instanceof String) list.add((String)item); }); obj.setApplicationLayerProtocols(list); } break; case "registerWriteHandler": if (member.getValue() instanceof Boolean) { obj.setRegisterWriteHandler((Boolean)member.getValue()); } break; } } } static void toJson(NetClientOptions obj, JsonObject json) { toJson(obj, json.getMap()); } static void toJson(NetClientOptions obj, java.util.Map<String, Object> json) { json.put("reconnectAttempts", obj.getReconnectAttempts()); json.put("reconnectInterval", obj.getReconnectInterval()); if (obj.getHostnameVerificationAlgorithm() != null) { json.put("hostnameVerificationAlgorithm", obj.getHostnameVerificationAlgorithm()); } if (obj.getApplicationLayerProtocols() != null) { JsonArray array = new JsonArray(); obj.getApplicationLayerProtocols().forEach(item -> array.add(item)); json.put("applicationLayerProtocols", array); } json.put("registerWriteHandler", obj.isRegisterWriteHandler()); } }
NetClientOptionsConverter
java
apache__camel
components/camel-ai/camel-docling/src/test/java/org/apache/camel/component/docling/BatchProcessingTest.java
{ "start": 1198, "end": 6555 }
class ____ extends CamelTestSupport { @Test public void testBatchProcessingResultsCreation() { BatchProcessingResults results = new BatchProcessingResults(); BatchConversionResult result1 = new BatchConversionResult("doc-1", "/path/doc1.pdf"); result1.setSuccess(true); result1.setResult("Converted content 1"); result1.setProcessingTimeMs(1000); BatchConversionResult result2 = new BatchConversionResult("doc-2", "/path/doc2.pdf"); result2.setSuccess(true); result2.setResult("Converted content 2"); result2.setProcessingTimeMs(1500); results.addResult(result1); results.addResult(result2); assertEquals(2, results.getTotalDocuments()); assertEquals(2, results.getSuccessCount()); assertEquals(0, results.getFailureCount()); assertTrue(results.isAllSuccessful()); assertEquals(100.0, results.getSuccessRate()); } @Test public void testBatchProcessingWithFailures() { BatchProcessingResults results = new BatchProcessingResults(); BatchConversionResult result1 = new BatchConversionResult("doc-1", "/path/doc1.pdf"); result1.setSuccess(true); result1.setResult("Converted content 1"); BatchConversionResult result2 = new BatchConversionResult("doc-2", "/path/doc2.pdf"); result2.setSuccess(false); result2.setErrorMessage("File not found"); BatchConversionResult result3 = new BatchConversionResult("doc-3", "/path/doc3.pdf"); result3.setSuccess(true); result3.setResult("Converted content 3"); results.addResult(result1); results.addResult(result2); results.addResult(result3); assertEquals(3, results.getTotalDocuments()); assertEquals(2, results.getSuccessCount()); assertEquals(1, results.getFailureCount()); assertTrue(results.hasAnySuccessful()); assertTrue(results.hasAnyFailures()); assertEquals(66.67, results.getSuccessRate(), 0.01); List<BatchConversionResult> successful = results.getSuccessful(); assertEquals(2, successful.size()); List<BatchConversionResult> failed = results.getFailed(); assertEquals(1, failed.size()); assertEquals("doc-2", failed.get(0).getDocumentId()); } @Test public void testBatchConversionResultProperties() { BatchConversionResult result = new BatchConversionResult("test-doc", "/path/test.pdf"); result.setSuccess(true); result.setResult("Converted content"); result.setProcessingTimeMs(2000); result.setBatchIndex(5); assertEquals("test-doc", result.getDocumentId()); assertEquals("/path/test.pdf", result.getOriginalPath()); assertTrue(result.isSuccess()); assertEquals("Converted content", result.getResult()); assertEquals(2000, result.getProcessingTimeMs()); assertEquals(5, result.getBatchIndex()); } @Test public void testBatchOperationsEnumExists() { // Verify all batch operations are defined assertNotNull(DoclingOperations.BATCH_CONVERT_TO_MARKDOWN); assertNotNull(DoclingOperations.BATCH_CONVERT_TO_HTML); assertNotNull(DoclingOperations.BATCH_CONVERT_TO_JSON); assertNotNull(DoclingOperations.BATCH_EXTRACT_TEXT); assertNotNull(DoclingOperations.BATCH_EXTRACT_STRUCTURED_DATA); } @Test public void testBatchConfigurationDefaults() { DoclingConfiguration config = new DoclingConfiguration(); assertEquals(10, config.getBatchSize()); assertEquals(4, config.getBatchParallelism()); assertEquals(300000, config.getBatchTimeout()); assertTrue(config.isBatchFailOnFirstError()); assertEquals(false, config.isSplitBatchResults()); } @Test public void testBatchConfigurationSetters() { DoclingConfiguration config = new DoclingConfiguration(); config.setBatchSize(20); config.setBatchParallelism(8); config.setBatchTimeout(600000); config.setBatchFailOnFirstError(false); config.setSplitBatchResults(true); assertEquals(20, config.getBatchSize()); assertEquals(8, config.getBatchParallelism()); assertEquals(600000, config.getBatchTimeout()); assertEquals(false, config.isBatchFailOnFirstError()); assertTrue(config.isSplitBatchResults()); } @Test public void testBatchTimeoutConfiguration() { DoclingConfiguration config = new DoclingConfiguration(); // Test default timeout assertEquals(300000, config.getBatchTimeout()); // Test custom timeout config.setBatchTimeout(120000); assertEquals(120000, config.getBatchTimeout()); // Test minimum reasonable timeout config.setBatchTimeout(5000); assertEquals(5000, config.getBatchTimeout()); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { // Placeholder route for test infrastructure from("direct:batch-test") .log("Batch test route"); } }; } }
BatchProcessingTest
java
quarkusio__quarkus
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/cdi/bcextensions/SyntheticBeanInjectionPointTest.java
{ "start": 4133, "end": 4482 }
class ____ implements SyntheticBeanDisposer<MyDependentBean> { static InjectionPoint lookedUp = null; @Override public void dispose(MyDependentBean instance, Instance<Object> lookup, Parameters params) { lookedUp = lookup.select(InjectionPoint.class).get(); } } public static
MyDependentBeanDisposer
java
quarkusio__quarkus
extensions/keycloak-authorization/runtime/src/main/java/io/quarkus/keycloak/pep/runtime/KeycloakPolicyEnforcerTenantConfigBuilder.java
{ "start": 16657, "end": 22425 }
class ____ implements PathConfigBuilder { private final KeycloakPolicyEnforcerTenantConfigBuilder builder; private final Map<String, MethodConfig> methods = new HashMap<>(); private final Set<String> paths = new HashSet<>(); private ClaimInformationPointConfig claimInformationPointConfig = new ClaimInformationPointConfigImpl(Map.of(), Map.of()); private EnforcementMode enforcementMode = ENFORCING; private String name = null; private PathConfigBuilderImpl(KeycloakPolicyEnforcerTenantConfigBuilder builder, PathConfig pathConfig) { this.builder = builder; if (pathConfig != null) { this.methods.putAll(pathConfig.methods()); this.claimInformationPointConfig = pathConfig.claimInformationPoint(); this.paths.addAll(pathConfig.paths().orElse(List.of())); if (pathConfig.path().isPresent()) { this.paths.add(pathConfig.path().get()); } this.enforcementMode = pathConfig.enforcementMode(); } } @Override public KeycloakPolicyEnforcerTenantConfigBuilder claimInformationPoint( Map<String, Map<String, String>> simpleConfig) { claimInformationPointConfig = new ClaimInformationPointConfigImpl( simpleConfig == null ? Map.of() : Map.copyOf(simpleConfig), claimInformationPointConfig == null ? Map.of() : claimInformationPointConfig.complexConfig()); return builder; } @Override public KeycloakPolicyEnforcerTenantConfigBuilder claimInformationPoint(Map<String, Map<String, String>> simpleConfig, Map<String, Map<String, Map<String, String>>> complexConfig) { claimInformationPointConfig = new ClaimInformationPointConfigImpl( simpleConfig == null ? Map.of() : Map.copyOf(simpleConfig), complexConfig == null ? Map.of() : Map.copyOf(complexConfig)); return builder; } @Override public ClaimInformationPointConfigBuilder<PathConfigBuilder> claimInformationPoint() { return new ClaimInformationPointConfigBuilder<>() { @Override public PathConfigBuilder build() { if (simpleConfig != null || complexConfig != null) { PathConfigBuilderImpl.this.claimInformationPoint(simpleConfig, complexConfig); } return PathConfigBuilderImpl.this; } }; } @Override public PathConfigBuilder permissionName(String name) { this.name = name; return this; } @Override public KeycloakPolicyEnforcerTenantConfigBuilder enforcementMode(EnforcementMode enforcementMode) { Objects.requireNonNull(enforcementMode); this.enforcementMode = enforcementMode; return builder; } @Override public KeycloakPolicyEnforcerTenantConfigBuilder post(String... scopes) { return post(null, scopes); } @Override public KeycloakPolicyEnforcerTenantConfigBuilder post(ScopeEnforcementMode scopeEnforcementMode, String... scopes) { return method("POST", scopeEnforcementMode, scopes); } @Override public KeycloakPolicyEnforcerTenantConfigBuilder head(String... scopes) { return head(null, scopes); } @Override public KeycloakPolicyEnforcerTenantConfigBuilder head(ScopeEnforcementMode scopeEnforcementMode, String... scopes) { return method("HEAD", scopeEnforcementMode, scopes); } @Override public KeycloakPolicyEnforcerTenantConfigBuilder get(String... scopes) { return get(null, scopes); } @Override public KeycloakPolicyEnforcerTenantConfigBuilder get(ScopeEnforcementMode scopeEnforcementMode, String... scopes) { return method("GET", scopeEnforcementMode, scopes); } @Override public KeycloakPolicyEnforcerTenantConfigBuilder put(String... scopes) { return put(null, scopes); } @Override public KeycloakPolicyEnforcerTenantConfigBuilder put(ScopeEnforcementMode scopeEnforcementMode, String... scopes) { return method("PUT", scopeEnforcementMode, scopes); } @Override public KeycloakPolicyEnforcerTenantConfigBuilder patch(String... scopes) { return patch(null, scopes); } @Override public KeycloakPolicyEnforcerTenantConfigBuilder patch(ScopeEnforcementMode scopeEnforcementMode, String... scopes) { return method("PATCH", scopeEnforcementMode, scopes); } @Override public KeycloakPolicyEnforcerTenantConfigBuilder method(String method, ScopeEnforcementMode scopeEnforcementMode, String... scopes) { Objects.requireNonNull(method); if (scopeEnforcementMode == null) { // default enforcement scope is ALL scopeEnforcementMode = ScopeEnforcementMode.ALL; } methods.put(method.toLowerCase(), new MethodConfigImpl(method, List.of(scopes), scopeEnforcementMode)); return builder; } @Override public MethodConfigBuilder method() { return new MethodConfigBuilder(this); } @Override public KeycloakPolicyEnforcerTenantConfigBuilder parent() { return builder; } } public static final
PathConfigBuilderImpl
java
spring-projects__spring-framework
spring-core/src/test/java/org/springframework/util/PlaceholderParserTests.java
{ "start": 6385, "end": 10281 }
class ____ { private final PlaceholderParser parser = new PlaceholderParser("${", "}", ":", null, true); @ParameterizedTest(name = "{0} -> {1}") @MethodSource("placeholders") void placeholderIsReplaced(String text, String expected) { Map<String, String> properties = Map.of( "firstName", "John", "nested0", "first", "nested1", "Name"); assertThat(this.parser.replacePlaceholders(text, properties::get)).isEqualTo(expected); } static Stream<Arguments> placeholders() { return Stream.of( Arguments.of("${invalid:John}", "John"), Arguments.of("${first${invalid:Name}}", "John"), Arguments.of("${invalid:${firstName}}", "John"), Arguments.of("${invalid:${${nested0}${nested1}}}", "John"), Arguments.of("${invalid:$${firstName}}", "$John"), Arguments.of("${invalid: }${firstName}", " John"), Arguments.of("${invalid:}", ""), Arguments.of("${:}", "") ); } @ParameterizedTest(name = "{0} -> {1}") @MethodSource("nestedPlaceholders") void nestedPlaceholdersAreReplaced(String text, String expected) { Map<String, String> properties = Map.of( "p1", "v1", "p2", "v2", "p3", "${p1}:${p2}", // nested placeholders "p4", "${p3}", // deeply nested placeholders "p5", "${p1}:${p2}:${bogus}", // unresolvable placeholder "p6", "${p1}:${p2}:${bogus:def}"); // unresolvable w/ default assertThat(this.parser.replacePlaceholders(text, properties::get)).isEqualTo(expected); } static Stream<Arguments> nestedPlaceholders() { return Stream.of( Arguments.of("${p6}", "v1:v2:def"), Arguments.of("${p6:not-used}", "v1:v2:def"), Arguments.of("${p6:${invalid}}", "v1:v2:def"), Arguments.of("${invalid:${p1}:${p2}}", "v1:v2"), Arguments.of("${invalid:${p3}}", "v1:v2"), Arguments.of("${invalid:${p4}}", "v1:v2"), Arguments.of("${invalid:${p5}}", "v1:v2:${bogus}"), Arguments.of("${invalid:${p6}}", "v1:v2:def") ); } @ParameterizedTest(name = "{0} -> {1}") @MethodSource("exactMatchPlaceholders") void placeholdersWithExactMatchAreConsidered(String text, String expected) { Map<String, String> properties = Map.of( "prefix://my-service", "example-service", "px", "prefix", "p1", "${prefix://my-service}"); assertThat(this.parser.replacePlaceholders(text, properties::get)).isEqualTo(expected); } static Stream<Arguments> exactMatchPlaceholders() { return Stream.of( Arguments.of("${prefix://my-service}", "example-service"), Arguments.of("${p1}", "example-service") ); } @Test void parseWithKeyEqualsToText() { PlaceholderResolver resolver = mockPlaceholderResolver("firstName", "Steve"); assertThat(this.parser.replacePlaceholders("${firstName}", resolver)).isEqualTo("Steve"); verifyPlaceholderResolutions(resolver, "firstName"); } @Test void parseWithHardcodedFallback() { PlaceholderResolver resolver = mockPlaceholderResolver(); assertThat(this.parser.replacePlaceholders("${firstName:Steve}", resolver)).isEqualTo("Steve"); verifyPlaceholderResolutions(resolver, "firstName:Steve", "firstName"); } @Test void parseWithNestedPlaceholderInKeyUsingFallback() { PlaceholderResolver resolver = mockPlaceholderResolver("firstName", "John"); assertThat(this.parser.replacePlaceholders("${first${invalid:Name}}", resolver)).isEqualTo("John"); verifyPlaceholderResolutions(resolver, "invalid:Name", "invalid", "firstName"); } @Test void parseWithFallbackUsingPlaceholder() { PlaceholderResolver resolver = mockPlaceholderResolver("firstName", "John"); assertThat(this.parser.replacePlaceholders("${invalid:${firstName}}", resolver)).isEqualTo("John"); verifyPlaceholderResolutions(resolver, "invalid", "firstName"); } } /** * Tests that use the escape character. */ @Nested
DefaultValueTests
java
spring-projects__spring-framework
spring-beans/src/test/java/org/springframework/beans/factory/xml/FactoryMethodTests.java
{ "start": 16157, "end": 16547 }
class ____ { private Properties props; private MailSession() { } public void setProperties(Properties props) { this.props = props; } public static MailSession getDefaultInstance(Properties props) { MailSession session = new MailSession(); session.setProperties(props); return session; } public Object getProperty(String key) { return this.props.get(key); } }
MailSession
java
grpc__grpc-java
core/src/main/java/io/grpc/internal/AutoConfiguredLoadBalancerFactory.java
{ "start": 8804, "end": 9093 }
class ____ extends SubchannelPicker { private final Status failure; FailingPicker(Status failure) { this.failure = failure; } @Override public PickResult pickSubchannel(PickSubchannelArgs args) { return PickResult.withError(failure); } } }
FailingPicker
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/index/mapper/CompositeSyntheticFieldLoaderTests.java
{ "start": 885, "end": 9787 }
class ____ extends ESTestCase { public void testComposingMultipleStoredFields() throws IOException { var sut = new CompositeSyntheticFieldLoader( "foo", "bar.baz.foo", List.of(new CompositeSyntheticFieldLoader.StoredFieldLayer("foo.one") { @Override protected void writeValue(Object value, XContentBuilder b) throws IOException { b.value((long) value); } }, new CompositeSyntheticFieldLoader.StoredFieldLayer("foo.two") { @Override protected void writeValue(Object value, XContentBuilder b) throws IOException { b.value((long) value); } }) ); var storedFieldLoaders = sut.storedFieldLoaders().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); storedFieldLoaders.get("foo.one").load(List.of(45L, 46L)); storedFieldLoaders.get("foo.two").load(List.of(1L)); var result = XContentBuilder.builder(XContentType.JSON.xContent()); result.startObject(); sut.write(result); result.endObject(); assertEquals(""" {"foo":[45,46,1]}""", Strings.toString(result)); } public void testLoadStoredFieldAndReset() throws IOException { var sut = new CompositeSyntheticFieldLoader( "foo", "bar.baz.foo", List.of(new CompositeSyntheticFieldLoader.StoredFieldLayer("foo.one") { @Override protected void writeValue(Object value, XContentBuilder b) throws IOException { b.value((long) value); } }) ); var storedFieldLoaders = sut.storedFieldLoaders().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); storedFieldLoaders.get("foo.one").load(List.of(45L)); var result = XContentBuilder.builder(XContentType.JSON.xContent()); result.startObject(); sut.write(result); result.endObject(); assertEquals(""" {"foo":45}""", Strings.toString(result)); var empty = XContentBuilder.builder(XContentType.JSON.xContent()); empty.startObject(); // reset() should have been called after previous write sut.write(result); empty.endObject(); assertEquals("{}", Strings.toString(empty)); } public void testComposingMultipleDocValuesFields() throws IOException { var sut = new CompositeSyntheticFieldLoader("foo", "bar.baz.foo", List.of(new CompositeSyntheticFieldLoader.Layer() { @Override public Stream<Map.Entry<String, StoredFieldLoader>> storedFieldLoaders() { return Stream.empty(); } @Override public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException { return (docId -> true); } @Override public boolean hasValue() { return true; } @Override public void write(XContentBuilder b) throws IOException { b.value(45L); b.value(46L); } @Override public void reset() { } @Override public String fieldName() { return ""; } @Override public long valueCount() { return 2; } }, new CompositeSyntheticFieldLoader.Layer() { @Override public Stream<Map.Entry<String, StoredFieldLoader>> storedFieldLoaders() { return Stream.empty(); } @Override public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException { return (docId -> true); } @Override public boolean hasValue() { return true; } @Override public void write(XContentBuilder b) throws IOException { b.value(1L); } @Override public void reset() { } @Override public String fieldName() { return ""; } @Override public long valueCount() { return 1; } })); sut.docValuesLoader(null, new int[0]).advanceToDoc(0); var result = XContentBuilder.builder(XContentType.JSON.xContent()); result.startObject(); sut.write(result); result.endObject(); assertEquals(""" {"foo":[45,46,1]}""", Strings.toString(result)); } public void testComposingStoredFieldsWithDocValues() throws IOException { var sut = new CompositeSyntheticFieldLoader( "foo", "bar.baz.foo", List.of(new CompositeSyntheticFieldLoader.StoredFieldLayer("foo.one") { @Override protected void writeValue(Object value, XContentBuilder b) throws IOException { b.value((long) value); } }, new CompositeSyntheticFieldLoader.Layer() { @Override public Stream<Map.Entry<String, StoredFieldLoader>> storedFieldLoaders() { return Stream.empty(); } @Override public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException { return (docId -> true); } @Override public boolean hasValue() { return true; } @Override public void write(XContentBuilder b) throws IOException { b.value(1L); } @Override public void reset() { } @Override public String fieldName() { return ""; } @Override public long valueCount() { return 1; } }) ); var storedFieldLoaders = sut.storedFieldLoaders().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); storedFieldLoaders.get("foo.one").load(List.of(45L, 46L)); sut.docValuesLoader(null, new int[0]).advanceToDoc(0); var result = XContentBuilder.builder(XContentType.JSON.xContent()); result.startObject(); sut.write(result); result.endObject(); assertEquals(""" {"foo":[45,46,1]}""", Strings.toString(result)); } public void testFieldName() { var sut = new CompositeSyntheticFieldLoader("foo", "bar.baz.foo"); assertEquals("bar.baz.foo", sut.fieldName()); } public void testMergeTwoFieldLoaders() throws IOException { // given var fieldLoader1 = new CompositeSyntheticFieldLoader( "foo", "bar.baz.foo", List.of(new CompositeSyntheticFieldLoader.StoredFieldLayer("foo.one") { @Override protected void writeValue(Object value, XContentBuilder b) throws IOException { b.value((long) value); } }, new CompositeSyntheticFieldLoader.StoredFieldLayer("foo.two") { @Override protected void writeValue(Object value, XContentBuilder b) throws IOException { b.value((long) value); } }) ); var fieldLoader2 = new CompositeSyntheticFieldLoader( "foo", "bar.baz.foo", List.of(new CompositeSyntheticFieldLoader.StoredFieldLayer("foo.three") { @Override protected void writeValue(Object value, XContentBuilder b) throws IOException { b.value((long) value); } }) ); var mergedFieldLoader = fieldLoader1.mergedWith(fieldLoader2); var storedFieldLoaders = mergedFieldLoader.storedFieldLoaders().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); storedFieldLoaders.get("foo.one").load(List.of(45L, 46L)); storedFieldLoaders.get("foo.two").load(List.of(1L)); storedFieldLoaders.get("foo.three").load(List.of(98L, 99L)); // when var result = XContentBuilder.builder(XContentType.JSON.xContent()); result.startObject(); mergedFieldLoader.write(result); result.endObject(); // then assertEquals(""" {"foo":[45,46,1,98,99]}""", Strings.toString(result)); } }
CompositeSyntheticFieldLoaderTests
java
apache__camel
core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/DefaultRouteController.java
{ "start": 1531, "end": 6526 }
class ____ extends ServiceSupport implements RouteController, NonManagedService { // mark this as non managed service as its registered specially as a route controller private CamelContext camelContext; private LoggingLevel loggingLevel = LoggingLevel.DEBUG; public DefaultRouteController() { this(null); } public DefaultRouteController(CamelContext camelContext) { this.camelContext = camelContext; } // *************************************************** // Properties // *************************************************** @Override public void setCamelContext(CamelContext camelContext) { this.camelContext = camelContext; } @Override public CamelContext getCamelContext() { return camelContext; } @Override public LoggingLevel getLoggingLevel() { return loggingLevel; } @Override public void setLoggingLevel(LoggingLevel loggingLevel) { this.loggingLevel = loggingLevel; } @Override public boolean isSupervising() { return this instanceof SupervisingRouteController; } // *************************************************** // Route management // *************************************************** protected RouteController getInternalRouteController() { return camelContext.getCamelContextExtension().getInternalRouteController(); } @Override public void startAllRoutes() throws Exception { getInternalRouteController().startAllRoutes(); } @Override public void stopAllRoutes() throws Exception { getInternalRouteController().stopAllRoutes(); } @Override public void removeAllRoutes() throws Exception { getInternalRouteController().removeAllRoutes(); } @Override public boolean isStartingRoutes() { return getInternalRouteController().isStartingRoutes(); } @Override public boolean hasUnhealthyRoutes() { return getInternalRouteController().hasUnhealthyRoutes(); } @Override public void reloadAllRoutes() throws Exception { getInternalRouteController().reloadAllRoutes(); } @Override public boolean isReloadingRoutes() { return getInternalRouteController().isReloadingRoutes(); } @Override public ServiceStatus getRouteStatus(String routeId) { return getInternalRouteController().getRouteStatus(routeId); } @Override public void startRoute(String routeId) throws Exception { getInternalRouteController().startRoute(routeId); } @Override public void stopRoute(String routeId) throws Exception { getInternalRouteController().stopRoute(routeId); } @Override public void stopRoute(String routeId, Throwable cause) throws Exception { getInternalRouteController().stopRoute(routeId, cause); } @Override public void stopRoute(String routeId, long timeout, TimeUnit timeUnit) throws Exception { getInternalRouteController().stopRoute(routeId, timeout, timeUnit); } @Override public boolean stopRoute(String routeId, long timeout, TimeUnit timeUnit, boolean abortAfterTimeout) throws Exception { return getInternalRouteController().stopRoute(routeId, timeout, timeUnit, abortAfterTimeout); } @Override public void suspendRoute(String routeId) throws Exception { getInternalRouteController().suspendRoute(routeId); } @Override public void suspendRoute(String routeId, long timeout, TimeUnit timeUnit) throws Exception { getInternalRouteController().suspendRoute(routeId, timeout, timeUnit); } @Override public void resumeRoute(String routeId) throws Exception { getInternalRouteController().resumeRoute(routeId); } @Override public void startRouteGroup(String routeGroup) throws Exception { getInternalRouteController().startRouteGroup(routeGroup); } @Override public void stopRouteGroup(String routeGroup) throws Exception { getInternalRouteController().stopRouteGroup(routeGroup); } // *************************************************** // // *************************************************** @Override public <T extends RouteController> T adapt(Class<T> type) { return type.cast(this); } @Override public SupervisingRouteController supervising() { if (this instanceof SupervisingRouteController src) { return src; } else { // change current route controller to be supervising SupervisingRouteController src = new DefaultSupervisingRouteController(); src.setCamelContext(camelContext); camelContext.setRouteController(src); return src; } } @Override public Collection<Route> getControlledRoutes() { return Collections.emptyList(); } }
DefaultRouteController
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/streaming/runtime/io/checkpointing/UnalignedCheckpointsTest.java
{ "start": 43957, "end": 44494 }
class ____ extends org.apache.flink.streaming.runtime.io.checkpointing .ValidatingCheckpointHandler { public ValidatingCheckpointHandler(long nextExpectedCheckpointId) { super(nextExpectedCheckpointId); } @Override public void abortCheckpointOnBarrier(long checkpointId, CheckpointException cause) { super.abortCheckpointOnBarrier(checkpointId, cause); nextExpectedCheckpointId = -1; } } static
ValidatingCheckpointHandler
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java
{ "start": 3378, "end": 6060 }
class ____<T extends Request<T>> extends TransportSingleShardAction<T, ActionResponse.Empty> { private final IndicesService indicesService; @Inject TransportRetentionLeaseAction( final String name, final ThreadPool threadPool, final ClusterService clusterService, final TransportService transportService, final ActionFilters actionFilters, final ProjectResolver projectResolver, final IndexNameExpressionResolver indexNameExpressionResolver, final IndicesService indicesService, final Writeable.Reader<T> requestSupplier ) { super( name, threadPool, clusterService, transportService, actionFilters, projectResolver, indexNameExpressionResolver, requestSupplier, threadPool.executor(ThreadPool.Names.MANAGEMENT) ); this.indicesService = Objects.requireNonNull(indicesService); } @Override protected ShardsIterator shards(final ProjectState state, final InternalRequest request) { return state.routingTable().shardRoutingTable(request.concreteIndex(), request.request().getShardId().id()).primaryShardIt(); } @Override protected void asyncShardOperation(T request, ShardId shardId, final ActionListener<ActionResponse.Empty> listener) { final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); final IndexShard indexShard = indexService.getShard(shardId.id()); indexShard.acquirePrimaryOperationPermit(listener.delegateFailureAndWrap((delegatedListener, releasable) -> { try (Releasable ignore = releasable) { doRetentionLeaseAction(indexShard, request, delegatedListener); } }), EsExecutors.DIRECT_EXECUTOR_SERVICE); } @Override protected ActionResponse.Empty shardOperation(final T request, final ShardId shardId) { throw new UnsupportedOperationException(); } abstract void doRetentionLeaseAction(IndexShard indexShard, T request, ActionListener<ActionResponse.Empty> listener); @Override protected final Writeable.Reader<ActionResponse.Empty> getResponseReader() { return in -> ActionResponse.Empty.INSTANCE; } @Override protected boolean resolveIndex(final T request) { return false; } } public static
TransportRetentionLeaseAction
java
resilience4j__resilience4j
resilience4j-retry/src/main/java/io/github/resilience4j/retry/internal/InMemoryRetryRegistry.java
{ "start": 1266, "end": 6819 }
class ____ extends AbstractRegistry<Retry, RetryConfig> implements RetryRegistry { /** * The constructor with default default. */ public InMemoryRetryRegistry() { this(RetryConfig.ofDefaults()); } /*public InMemoryRetryRegistry(Map<String, String> tags) { this(RetryConfig.ofDefaults(), tags); }*/ public InMemoryRetryRegistry(Map<String, RetryConfig> configs) { this(configs, Collections.emptyMap()); } public InMemoryRetryRegistry(Map<String, RetryConfig> configs, Map<String, String> tags) { this(configs.getOrDefault(DEFAULT_CONFIG, RetryConfig.ofDefaults()), tags); this.configurations.putAll(configs); } public InMemoryRetryRegistry(Map<String, RetryConfig> configs, RegistryEventConsumer<Retry> registryEventConsumer) { this(configs, registryEventConsumer, Collections.emptyMap()); } public InMemoryRetryRegistry(Map<String, RetryConfig> configs, RegistryEventConsumer<Retry> registryEventConsumer, Map<String, String> tags) { this(configs.getOrDefault(DEFAULT_CONFIG, RetryConfig.ofDefaults()), registryEventConsumer, tags); this.configurations.putAll(configs); } public InMemoryRetryRegistry(Map<String, RetryConfig> configs, List<RegistryEventConsumer<Retry>> registryEventConsumers) { this(configs, registryEventConsumers, Collections.emptyMap()); } public InMemoryRetryRegistry(Map<String, RetryConfig> configs, List<RegistryEventConsumer<Retry>> registryEventConsumers, Map<String, String> tags) { this(configs.getOrDefault(DEFAULT_CONFIG, RetryConfig.ofDefaults()), registryEventConsumers, tags); this.configurations.putAll(configs); } public InMemoryRetryRegistry(Map<String, RetryConfig> configs, List<RegistryEventConsumer<Retry>> registryEventConsumers, Map<String, String> tags, RegistryStore<Retry> registryStore) { super(configs.getOrDefault(DEFAULT_CONFIG, RetryConfig.ofDefaults()), registryEventConsumers, Optional.ofNullable(tags).orElse(Collections.emptyMap()), Optional.ofNullable(registryStore).orElse(new InMemoryRegistryStore<>())); this.configurations.putAll(configs); } /** * The constructor with custom default config. * * @param defaultConfig The default config. */ public InMemoryRetryRegistry(RetryConfig defaultConfig) { this(defaultConfig, Collections.emptyMap()); } public InMemoryRetryRegistry(RetryConfig defaultConfig, Map<String, String> tags) { super(defaultConfig, tags); } public InMemoryRetryRegistry(RetryConfig defaultConfig, RegistryEventConsumer<Retry> registryEventConsumer) { this(defaultConfig, registryEventConsumer, Collections.emptyMap()); } public InMemoryRetryRegistry(RetryConfig defaultConfig, RegistryEventConsumer<Retry> registryEventConsumer, Map<String, String> tags) { super(defaultConfig, registryEventConsumer, tags); } public InMemoryRetryRegistry(RetryConfig defaultConfig, List<RegistryEventConsumer<Retry>> registryEventConsumers) { this(defaultConfig, registryEventConsumers, Collections.emptyMap()); } public InMemoryRetryRegistry(RetryConfig defaultConfig, List<RegistryEventConsumer<Retry>> registryEventConsumers, Map<String, String> tags) { super(defaultConfig, registryEventConsumers, tags); } /** * {@inheritDoc} */ @Override public Set<Retry> getAllRetries() { return new HashSet<>(entryMap.values()); } /** * {@inheritDoc} */ @Override public Retry retry(String name) { return retry(name, getDefaultConfig()); } /** * {@inheritDoc} */ @Override public Retry retry(String name, Map<String, String> tags) { return retry(name, getDefaultConfig(), tags); } /** * {@inheritDoc} */ @Override public Retry retry(String name, RetryConfig config) { return retry(name, config, Collections.emptyMap()); } @Override public Retry retry(String name, RetryConfig config, Map<String, String> tags) { return computeIfAbsent(name, () -> Retry .of(name, Objects.requireNonNull(config, CONFIG_MUST_NOT_BE_NULL), getAllTags(tags))); } /** * {@inheritDoc} */ @Override public Retry retry(String name, Supplier<RetryConfig> retryConfigSupplier) { return retry(name, retryConfigSupplier, Collections.emptyMap()); } @Override public Retry retry(String name, Supplier<RetryConfig> retryConfigSupplier, Map<String, String> tags) { return computeIfAbsent(name, () -> Retry.of(name, Objects.requireNonNull( Objects.requireNonNull(retryConfigSupplier, SUPPLIER_MUST_NOT_BE_NULL).get(), CONFIG_MUST_NOT_BE_NULL), getAllTags(tags))); } /** * {@inheritDoc} */ @Override public Retry retry(String name, String configName) { return retry(name, configName, Collections.emptyMap()); } @Override public Retry retry(String name, String configName, Map<String, String> tags) { return computeIfAbsent(name, () -> Retry.of(name, getConfiguration(configName) .orElseThrow(() -> new ConfigurationNotFoundException(configName)), getAllTags(tags))); } }
InMemoryRetryRegistry
java
quarkusio__quarkus
integration-tests/gradle/src/main/resources/test-fixtures-client-exception-mapper/src/main/java/org/example/MyRemoteService.java
{ "start": 477, "end": 834 }
interface ____ { @GET @Path("/extensions") Set<Extension> getExtensionsById(@QueryParam("id") String id); @ClientExceptionMapper static RuntimeException toException(final Response response, final Method target) { return new WebApplicationException( "Request failed with status: " + response.getStatus(), response); }
MyRemoteService
java
quarkusio__quarkus
extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/JsonFormat.java
{ "start": 1047, "end": 1229 }
class ____ extends AnnotationLiteral<JsonFormat> implements JsonFormat { public static JsonFormat INSTANCE = new Literal(); private Literal() { } } }
Literal
java
quarkusio__quarkus
core/runtime/src/main/java/io/quarkus/runtime/configuration/Substitutions.java
{ "start": 17725, "end": 30043 }
class ____ { @Alias private Properties currentProperties; @Alias private void ensureAllPropertiesInitialized() { } @Alias private void ensurePropertyInitialized(String key) { } @Substitute public Properties getCurrentProperties() { return new Properties() { @Override public synchronized Object setProperty(final String key, final String value) { ensurePropertyInitialized(key); return currentProperties.setProperty(key, value); } @Override public synchronized void load(final Reader reader) throws IOException { currentProperties.load(reader); } @Override public synchronized void load(final InputStream inStream) throws IOException { currentProperties.load(inStream); } @Override public void save(final OutputStream out, final String comments) { ensureAllPropertiesInitialized(); currentProperties.save(out, comments); } @Override public void store(final Writer writer, final String comments) throws IOException { ensureAllPropertiesInitialized(); currentProperties.store(writer, comments); } @Override public void store(final OutputStream out, final String comments) throws IOException { ensureAllPropertiesInitialized(); currentProperties.store(out, comments); } @Override public synchronized void loadFromXML(final InputStream in) throws IOException, InvalidPropertiesFormatException { currentProperties.loadFromXML(in); } @Override public void storeToXML(final OutputStream os, final String comment) throws IOException { ensureAllPropertiesInitialized(); currentProperties.storeToXML(os, comment); } @Override public void storeToXML(final OutputStream os, final String comment, final String encoding) throws IOException { ensureAllPropertiesInitialized(); currentProperties.storeToXML(os, comment, encoding); } @Override public void storeToXML(final OutputStream os, final String comment, final Charset charset) throws IOException { ensureAllPropertiesInitialized(); currentProperties.storeToXML(os, comment, charset); } @Override public String getProperty(final String key) { ensurePropertyInitialized(key); return currentProperties.getProperty(key); } @Override public String getProperty(final String key, final String defaultValue) { ensurePropertyInitialized(key); return currentProperties.getProperty(key, defaultValue); } @Override public Enumeration<?> propertyNames() { return currentProperties.propertyNames(); } @Override public Set<String> stringPropertyNames() { return currentProperties.stringPropertyNames(); } @Override public void list(final PrintStream out) { ensureAllPropertiesInitialized(); currentProperties.list(out); } @Override public void list(final PrintWriter out) { ensureAllPropertiesInitialized(); currentProperties.list(out); } @Override public int size() { return currentProperties.size(); } @Override public boolean isEmpty() { return currentProperties.isEmpty(); } @Override public Enumeration<Object> keys() { return currentProperties.keys(); } @Override public Enumeration<Object> elements() { ensureAllPropertiesInitialized(); return currentProperties.elements(); } @Override public boolean contains(final Object value) { ensureAllPropertiesInitialized(); return currentProperties.contains(value); } @Override public boolean containsValue(final Object value) { ensureAllPropertiesInitialized(); return currentProperties.containsValue(value); } @Override public boolean containsKey(final Object key) { return currentProperties.containsKey(key); } @Override public Object get(final Object key) { if (key instanceof String) { ensurePropertyInitialized((String) key); } return currentProperties.get(key); } @Override public synchronized Object put(final Object key, final Object value) { if (key instanceof String) { ensurePropertyInitialized((String) key); } return currentProperties.put(key, value); } @Override public synchronized Object remove(final Object key) { if (key instanceof String) { ensurePropertyInitialized((String) key); } return currentProperties.remove(key); } @Override public synchronized void putAll(final Map<?, ?> t) { currentProperties.putAll(t); } @Override public synchronized void clear() { currentProperties.clear(); } @Override public synchronized String toString() { ensureAllPropertiesInitialized(); return currentProperties.toString(); } @Override public Set<Object> keySet() { return currentProperties.keySet(); } @Override public Collection<Object> values() { ensureAllPropertiesInitialized(); return currentProperties.values(); } @Override public Set<Map.Entry<Object, Object>> entrySet() { ensureAllPropertiesInitialized(); return currentProperties.entrySet(); } @Override public synchronized boolean equals(final Object o) { ensureAllPropertiesInitialized(); return currentProperties.equals(o); } @Override public synchronized int hashCode() { ensureAllPropertiesInitialized(); return currentProperties.hashCode(); } @Override public Object getOrDefault(final Object key, final Object defaultValue) { if (key instanceof String) { ensurePropertyInitialized((String) key); } return currentProperties.getOrDefault(key, defaultValue); } @Override public synchronized void forEach(final BiConsumer<? super Object, ? super Object> action) { ensureAllPropertiesInitialized(); currentProperties.forEach(action); } @Override public synchronized void replaceAll(final BiFunction<? super Object, ? super Object, ?> function) { ensureAllPropertiesInitialized(); currentProperties.replaceAll(function); } @Override public synchronized Object putIfAbsent(final Object key, final Object value) { if (key instanceof String) { ensurePropertyInitialized((String) key); } return currentProperties.putIfAbsent(key, value); } @Override public synchronized boolean remove(final Object key, final Object value) { if (key instanceof String) { ensurePropertyInitialized((String) key); } return currentProperties.remove(key, value); } @Override public synchronized boolean replace(final Object key, final Object oldValue, final Object newValue) { if (key instanceof String) { ensurePropertyInitialized((String) key); } return currentProperties.replace(key, oldValue, newValue); } @Override public synchronized Object replace(final Object key, final Object value) { if (key instanceof String) { ensurePropertyInitialized((String) key); } return currentProperties.replace(key, value); } @Override public synchronized Object computeIfAbsent( final Object key, final Function<? super Object, ?> mappingFunction) { if (key instanceof String) { ensurePropertyInitialized((String) key); } return currentProperties.computeIfAbsent(key, mappingFunction); } @Override public synchronized Object computeIfPresent( final Object key, final BiFunction<? super Object, ? super Object, ?> remappingFunction) { if (key instanceof String) { ensurePropertyInitialized((String) key); } return currentProperties.computeIfPresent(key, remappingFunction); } @Override public synchronized Object compute( final Object key, final BiFunction<? super Object, ? super Object, ?> remappingFunction) { if (key instanceof String) { ensurePropertyInitialized((String) key); } return currentProperties.compute(key, remappingFunction); } @Override public synchronized Object merge( final Object key, final Object value, final BiFunction<? super Object, ? super Object, ?> remappingFunction) { if (key instanceof String) { ensurePropertyInitialized((String) key); } return currentProperties.merge(key, value, remappingFunction); } @Override public synchronized Object clone() { ensureAllPropertiesInitialized(); return currentProperties.clone(); } }; } private static final
Target_SystemPropertiesSupport_post_21
java
elastic__elasticsearch
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/ScoreSort.java
{ "start": 469, "end": 1080 }
class ____ extends Sort { public ScoreSort(Direction direction, Missing missing) { super(direction, missing); } @Override public int hashCode() { return Objects.hash(direction(), missing()); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } ScriptSort other = (ScriptSort) obj; return Objects.equals(direction(), other.direction()) && Objects.equals(missing(), other.missing()); } }
ScoreSort
java
dropwizard__dropwizard
dropwizard-validation/src/test/java/io/dropwizard/validation/SelfValidationTest.java
{ "start": 3456, "end": 3637 }
class ____ extends FailingExample { @Override public void validateFail(ViolationCollector col) { } } @SelfValidating public static
OverridingExample
java
quarkusio__quarkus
integration-tests/hibernate-validator/src/main/java/io/quarkus/it/hibernate/validator/injection/InjectedRuntimeConstraintValidatorConstraint.java
{ "start": 459, "end": 697 }
interface ____ { String message() default "{InjectedRuntimeConstraintValidatorConstraint.message}"; Class<?>[] groups() default {}; Class<? extends Payload>[] payload() default {}; }
InjectedRuntimeConstraintValidatorConstraint
java
alibaba__druid
core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLDropTableGroupStatement.java
{ "start": 843, "end": 2371 }
class ____ extends SQLStatementImpl implements SQLDropStatement, SQLReplaceable { protected SQLName name; protected boolean ifExists; public SQLDropTableGroupStatement() { } public SQLDropTableGroupStatement(DbType dbType) { super(dbType); } @Override protected void accept0(SQLASTVisitor visitor) { if (visitor.visit(this)) { acceptChild(visitor, name); } visitor.endVisit(this); } @Override public List<SQLObject> getChildren() { List<SQLObject> children = new ArrayList<SQLObject>(); if (name != null) { children.add(name); } return children; } public SQLName getName() { return name; } public void setName(SQLName name) { this.name = name; } public String getTableGroupName() { if (name == null) { return null; } if (name instanceof SQLName) { return name.getSimpleName(); } return null; } public boolean isIfExists() { return ifExists; } public void setIfExists(boolean ifNotExists) { this.ifExists = ifNotExists; } public boolean replace(SQLExpr expr, SQLExpr target) { if (name == expr) { setName((SQLName) target); return true; } return false; } @Override public DDLObjectType getDDLObjectType() { return DDLObjectType.TABLE_GROUP; } }
SQLDropTableGroupStatement
java
apache__logging-log4j2
log4j-core-test/src/test/java/org/apache/logging/log4j/core/config/plugins/convert/TypeConverterRegistryTest.java
{ "start": 1247, "end": 3053 }
class ____ { @Test void testFindNullConverter() { assertThrows(NullPointerException.class, () -> TypeConverterRegistry.getInstance() .findCompatibleConverter(null)); } @Test void testFindBooleanConverter() throws Exception { final TypeConverter<?> converter = TypeConverterRegistry.getInstance().findCompatibleConverter(Boolean.class); assertNotNull(converter); assertTrue((Boolean) converter.convert("TRUE")); } @Test void testFindPrimitiveBooleanConverter() throws Exception { final TypeConverter<?> converter = TypeConverterRegistry.getInstance().findCompatibleConverter(Boolean.TYPE); assertNotNull(converter); assertTrue((Boolean) converter.convert("tRUe")); } @SuppressWarnings("unchecked") @Test void testFindCharSequenceConverterUsingStringConverter() throws Exception { final TypeConverter<CharSequence> converter = (TypeConverter<CharSequence>) TypeConverterRegistry.getInstance().findCompatibleConverter(CharSequence.class); assertNotNull(converter); assertThat(converter, instanceOf(TypeConverters.StringConverter.class)); final CharSequence expected = "This is a test sequence of characters"; final CharSequence actual = converter.convert(expected.toString()); assertEquals(expected, actual); } @SuppressWarnings("unchecked") @Test void testFindNumberConverter() { final TypeConverter<Number> numberTypeConverter = (TypeConverter<Number>) TypeConverterRegistry.getInstance().findCompatibleConverter(Number.class); assertNotNull(numberTypeConverter); // TODO: is there a specific converter this should return? } public
TypeConverterRegistryTest
java
hibernate__hibernate-orm
hibernate-vector/src/main/java/org/hibernate/vector/internal/SparseDoubleVectorJavaType.java
{ "start": 985, "end": 3789 }
class ____ extends AbstractClassJavaType<SparseDoubleVector> implements BasicPluralJavaType<Double> { public static final SparseDoubleVectorJavaType INSTANCE = new SparseDoubleVectorJavaType(); public SparseDoubleVectorJavaType() { super( SparseDoubleVector.class, new SparseVectorMutabilityPlan() ); } @Override public JavaType<Double> getElementJavaType() { return DoubleJavaType.INSTANCE; } @Override public BasicType<?> resolveType(TypeConfiguration typeConfiguration, Dialect dialect, BasicType<Double> elementType, ColumnTypeInformation columnTypeInformation, JdbcTypeIndicators stdIndicators) { final int arrayTypeCode = stdIndicators.getPreferredSqlTypeCodeForArray( elementType.getJdbcType().getDefaultSqlTypeCode() ); final JdbcType arrayJdbcType = typeConfiguration.getJdbcTypeRegistry() .resolveTypeConstructorDescriptor( arrayTypeCode, elementType, columnTypeInformation ); if ( elementType.getValueConverter() != null ) { throw new IllegalArgumentException( "Can't convert element type of sparse vector" ); } return typeConfiguration.getBasicTypeRegistry() .resolve( this, arrayJdbcType, () -> new BasicCollectionType<>( elementType, arrayJdbcType, this, "sparse_double_vector" ) ); } @Override public JdbcType getRecommendedJdbcType(JdbcTypeIndicators indicators) { return indicators.getJdbcType( SqlTypes.SPARSE_VECTOR_INT8 ); } @Override public <X> X unwrap(SparseDoubleVector value, Class<X> type, WrapperOptions options) { if ( value == null ) { return null; } else if ( type.isInstance( value ) ) { //noinspection unchecked return (X) value; } else if ( double[].class.isAssignableFrom( type ) ) { return (X) value.toDenseVector(); } else if ( Object[].class.isAssignableFrom( type ) ) { //noinspection unchecked return (X) value.toArray(); } else if ( String.class.isAssignableFrom( type ) ) { //noinspection unchecked return (X) value.toString(); } else { throw unknownUnwrap( type ); } } @Override public <X> SparseDoubleVector wrap(X value, WrapperOptions options) { if ( value == null ) { return null; } else if (value instanceof SparseDoubleVector vector) { return vector; } else if (value instanceof List<?> list) { //noinspection unchecked return new SparseDoubleVector( (List<Double>) list ); } else if (value instanceof Object[] array) { //noinspection unchecked return new SparseDoubleVector( (List<Double>) (List<?>) Arrays.asList( array ) ); } else if (value instanceof double[] vector) { return new SparseDoubleVector( vector ); } else if (value instanceof String vector) { return new SparseDoubleVector( vector ); } else { throw unknownWrap( value.getClass() ); } } private static
SparseDoubleVectorJavaType
java
quarkusio__quarkus
extensions/spring-boot-properties/deployment/src/main/java/io/quarkus/spring/boot/properties/deployment/YamlListObjectHandler.java
{ "start": 10924, "end": 11236 }
class ____ extends Member { public FieldMember(FieldInfo fieldInfo) { super(fieldInfo.declaringClass(), fieldInfo.type(), fieldInfo.name()); } @Override protected String phraseUsage() { return "field '" + name() + "'"; } } static
FieldMember
java
spring-projects__spring-boot
module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/jmx/annotation/JmxEndpointDiscovererTests.java
{ "start": 3043, "end": 15270 }
class ____ { @Test void getEndpointsWhenNoEndpointBeansShouldReturnEmptyCollection() { load(EmptyConfiguration.class, (discoverer) -> assertThat(discoverer.getEndpoints()).isEmpty()); } @Test void getEndpointsShouldDiscoverStandardEndpoints() { load(TestEndpoint.class, (discoverer) -> { Map<EndpointId, ExposableJmxEndpoint> endpoints = discover(discoverer); assertThat(endpoints).containsOnlyKeys(EndpointId.of("test")); Map<String, JmxOperation> operationByName = mapOperations(getJmxEndpoint(endpoints).getOperations()); assertThat(operationByName).containsOnlyKeys("getAll", "getSomething", "update", "deleteSomething"); JmxOperation getAll = operationByName.get("getAll"); assertThat(getAll).isNotNull(); assertThat(getAll.getDescription()).isEqualTo("Invoke getAll for endpoint test"); assertThat(getAll.getOutputType()).isEqualTo(Object.class); assertThat(getAll.getParameters()).isEmpty(); JmxOperation getSomething = operationByName.get("getSomething"); assertThat(getSomething).isNotNull(); assertThat(getSomething.getDescription()).isEqualTo("Invoke getSomething for endpoint test"); assertThat(getSomething.getOutputType()).isEqualTo(String.class); assertThat(getSomething.getParameters()).hasSize(1); assertThat(getSomething.getParameters().get(0).getType()).isEqualTo(String.class); JmxOperation update = operationByName.get("update"); assertThat(update).isNotNull(); assertThat(update.getDescription()).isEqualTo("Invoke update for endpoint test"); assertThat(update.getOutputType()).isEqualTo(Void.TYPE); assertThat(update.getParameters()).hasSize(2); assertThat(update.getParameters().get(0).getType()).isEqualTo(String.class); assertThat(update.getParameters().get(1).getType()).isEqualTo(String.class); JmxOperation deleteSomething = operationByName.get("deleteSomething"); assertThat(deleteSomething).isNotNull(); assertThat(deleteSomething.getDescription()).isEqualTo("Invoke deleteSomething for endpoint test"); assertThat(deleteSomething.getOutputType()).isEqualTo(Void.TYPE); assertThat(deleteSomething.getParameters()).hasSize(1); assertThat(deleteSomething.getParameters().get(0).getType()).isEqualTo(String.class); }); } @Test void getEndpointsWhenHasFilteredEndpointShouldOnlyDiscoverJmxEndpoints() { load(MultipleEndpointsConfiguration.class, (discoverer) -> { Map<EndpointId, ExposableJmxEndpoint> endpoints = discover(discoverer); assertThat(endpoints).containsOnlyKeys(EndpointId.of("test"), EndpointId.of("jmx")); }); } @Test void getEndpointsWhenJmxExtensionIsMissingEndpointShouldThrowException() { load(TestJmxEndpointExtension.class, (discoverer) -> assertThatIllegalStateException() .isThrownBy(discoverer::getEndpoints) .withMessageContaining( "Invalid extension 'jmxEndpointDiscovererTests.TestJmxEndpointExtension': no endpoint found with id 'test'")); } @Test void getEndpointsWhenHasJmxExtensionShouldOverrideStandardEndpoint() { load(OverriddenOperationJmxEndpointConfiguration.class, (discoverer) -> { Map<EndpointId, ExposableJmxEndpoint> endpoints = discover(discoverer); assertThat(endpoints).containsOnlyKeys(EndpointId.of("test")); assertJmxTestEndpoint(getJmxEndpoint(endpoints)); }); } @Test void getEndpointsWhenHasJmxExtensionWithNewOperationAddsExtraOperation() { load(AdditionalOperationJmxEndpointConfiguration.class, (discoverer) -> { Map<EndpointId, ExposableJmxEndpoint> endpoints = discover(discoverer); assertThat(endpoints).containsOnlyKeys(EndpointId.of("test")); Map<String, JmxOperation> operationByName = mapOperations(getJmxEndpoint(endpoints).getOperations()); assertThat(operationByName).containsOnlyKeys("getAll", "getSomething", "update", "deleteSomething", "getAnother"); JmxOperation getAnother = operationByName.get("getAnother"); assertThat(getAnother).isNotNull(); assertThat(getAnother.getDescription()).isEqualTo("Get another thing"); assertThat(getAnother.getOutputType()).isEqualTo(Object.class); assertThat(getAnother.getParameters()).isEmpty(); }); } @Test void getEndpointsWhenHasCacheWithTtlShouldCacheReadOperationWithTtlValue() { load(TestEndpoint.class, (id) -> 500L, (discoverer) -> { Map<EndpointId, ExposableJmxEndpoint> endpoints = discover(discoverer); assertThat(endpoints).containsOnlyKeys(EndpointId.of("test")); Map<String, JmxOperation> operationByName = mapOperations(getJmxEndpoint(endpoints).getOperations()); assertThat(operationByName).containsOnlyKeys("getAll", "getSomething", "update", "deleteSomething"); JmxOperation getAll = operationByName.get("getAll"); assertThat(getAll).isNotNull(); assertThat(getInvoker(getAll)).isInstanceOf(CachingOperationInvoker.class); assertThat(((CachingOperationInvoker) getInvoker(getAll)).getTimeToLive()).isEqualTo(500); }); } @Test void getEndpointsShouldCacheReadOperations() { load(AdditionalOperationJmxEndpointConfiguration.class, (id) -> 500L, (discoverer) -> { Map<EndpointId, ExposableJmxEndpoint> endpoints = discover(discoverer); assertThat(endpoints).containsOnlyKeys(EndpointId.of("test")); Map<String, JmxOperation> operationByName = mapOperations(getJmxEndpoint(endpoints).getOperations()); assertThat(operationByName).containsOnlyKeys("getAll", "getSomething", "update", "deleteSomething", "getAnother"); JmxOperation getAll = operationByName.get("getAll"); assertThat(getAll).isNotNull(); assertThat(getInvoker(getAll)).isInstanceOf(CachingOperationInvoker.class); assertThat(((CachingOperationInvoker) getInvoker(getAll)).getTimeToLive()).isEqualTo(500); JmxOperation getAnother = operationByName.get("getAnother"); assertThat(getAnother).isNotNull(); assertThat(getInvoker(getAnother)).isInstanceOf(CachingOperationInvoker.class); assertThat(((CachingOperationInvoker) getInvoker(getAnother)).getTimeToLive()).isEqualTo(500); }); } @Test void getEndpointsWhenTwoExtensionsHaveTheSameEndpointTypeShouldThrowException() { load(ClashingJmxEndpointConfiguration.class, (discoverer) -> assertThatIllegalStateException() .isThrownBy(discoverer::getEndpoints) .withMessageContaining( "Found multiple extensions for the endpoint bean testEndpoint (testExtensionOne, testExtensionTwo)")); } @Test void getEndpointsWhenTwoStandardEndpointsHaveTheSameIdShouldThrowException() { load(ClashingStandardEndpointConfiguration.class, (discoverer) -> assertThatIllegalStateException().isThrownBy(discoverer::getEndpoints) .withMessageContaining("Found two endpoints with the id 'test': ")); } @Test void getEndpointsWhenWhenEndpointHasTwoOperationsWithTheSameNameShouldThrowException() { load(ClashingOperationsEndpoint.class, (discoverer) -> assertThatIllegalStateException() .isThrownBy(discoverer::getEndpoints) .withMessageContaining( "Unable to map duplicate endpoint operations: [MBean call 'getAll'] to jmxEndpointDiscovererTests.ClashingOperationsEndpoint")); } @Test void getEndpointsWhenWhenExtensionHasTwoOperationsWithTheSameNameShouldThrowException() { load(AdditionalClashingOperationsConfiguration.class, (discoverer) -> assertThatIllegalStateException() .isThrownBy(discoverer::getEndpoints) .withMessageContaining( "Unable to map duplicate endpoint operations: [MBean call 'getAll'] to testEndpoint (clashingOperationsJmxEndpointExtension)")); } @Test void getEndpointsWhenExtensionIsNotCompatibleWithTheEndpointTypeShouldThrowException() { load(InvalidJmxExtensionConfiguration.class, (discoverer) -> assertThatIllegalStateException() .isThrownBy(discoverer::getEndpoints) .withMessageContaining( "Endpoint bean 'nonJmxEndpoint' cannot support the extension bean 'nonJmxJmxEndpointExtension'")); } @Test void shouldRegisterHints() { RuntimeHints runtimeHints = new RuntimeHints(); new JmxEndpointDiscovererRuntimeHints().registerHints(runtimeHints, getClass().getClassLoader()); assertThat(RuntimeHintsPredicates.reflection() .onType(JmxEndpointFilter.class) .withMemberCategories(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS)).accepts(runtimeHints); } private Object getInvoker(JmxOperation operation) { Object invoker = ReflectionTestUtils.getField(operation, "invoker"); assertThat(invoker).isNotNull(); return invoker; } private void assertJmxTestEndpoint(ExposableJmxEndpoint endpoint) { Map<String, JmxOperation> operationsByName = mapOperations(endpoint.getOperations()); assertThat(operationsByName).containsOnlyKeys("getAll", "getSomething", "update", "deleteSomething"); JmxOperation getAll = operationsByName.get("getAll"); assertThat(getAll).isNotNull(); assertThat(getAll.getDescription()).isEqualTo("Get all the things"); assertThat(getAll.getOutputType()).isEqualTo(Object.class); assertThat(getAll.getParameters()).isEmpty(); JmxOperation getSomething = operationsByName.get("getSomething"); assertThat(getSomething).isNotNull(); assertThat(getSomething.getDescription()).isEqualTo("Get something based on a timeUnit"); assertThat(getSomething.getOutputType()).isEqualTo(String.class); assertThat(getSomething.getParameters()).hasSize(1); hasDocumentedParameter(getSomething, 0, "unitMs", Long.class, "Number of milliseconds"); JmxOperation update = operationsByName.get("update"); assertThat(update).isNotNull(); assertThat(update.getDescription()).isEqualTo("Update something based on bar"); assertThat(update.getOutputType()).isEqualTo(Void.TYPE); assertThat(update.getParameters()).hasSize(2); hasDocumentedParameter(update, 0, "foo", String.class, "Foo identifier"); hasDocumentedParameter(update, 1, "bar", String.class, "Bar value"); JmxOperation deleteSomething = operationsByName.get("deleteSomething"); assertThat(deleteSomething).isNotNull(); assertThat(deleteSomething.getDescription()).isEqualTo("Delete something based on a timeUnit"); assertThat(deleteSomething.getOutputType()).isEqualTo(Void.TYPE); assertThat(deleteSomething.getParameters()).hasSize(1); hasDocumentedParameter(deleteSomething, 0, "unitMs", Long.class, "Number of milliseconds"); } private void hasDocumentedParameter(JmxOperation operation, int index, String name, Class<?> type, String description) { assertThat(index).isLessThan(operation.getParameters().size()); JmxOperationParameter parameter = operation.getParameters().get(index); assertThat(parameter.getName()).isEqualTo(name); assertThat(parameter.getType()).isEqualTo(type); assertThat(parameter.getDescription()).isEqualTo(description); } private Map<EndpointId, ExposableJmxEndpoint> discover(JmxEndpointDiscoverer discoverer) { Map<EndpointId, ExposableJmxEndpoint> byId = new HashMap<>(); discoverer.getEndpoints().forEach((endpoint) -> byId.put(endpoint.getEndpointId(), endpoint)); return byId; } private Map<String, JmxOperation> mapOperations(Collection<JmxOperation> operations) { Map<String, JmxOperation> byName = new HashMap<>(); operations.forEach((operation) -> byName.put(operation.getName(), operation)); return byName; } private void load(Class<?> configuration, Consumer<JmxEndpointDiscoverer> consumer) { load(configuration, (id) -> null, consumer); } private void load(Class<?> configuration, Function<EndpointId, @Nullable Long> timeToLive, Consumer<JmxEndpointDiscoverer> consumer) { try (AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(configuration)) { ConversionServiceParameterValueMapper parameterMapper = new ConversionServiceParameterValueMapper( DefaultConversionService.getSharedInstance()); JmxEndpointDiscoverer discoverer = new JmxEndpointDiscoverer(context, parameterMapper, Collections.singleton(new CachingOperationInvokerAdvisor(timeToLive)), Collections.emptyList(), Collections.emptyList()); consumer.accept(discoverer); } } private ExposableJmxEndpoint getJmxEndpoint(Map<EndpointId, ExposableJmxEndpoint> endpoints) { ExposableJmxEndpoint endpoint = endpoints.get(EndpointId.of("test")); assertThat(endpoint).isNotNull(); return endpoint; } @Configuration(proxyBeanMethods = false) static
JmxEndpointDiscovererTests
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/type/descriptor/ValueExtractor.java
{ "start": 690, "end": 1383 }
interface ____<X> { /** * Extract value from result set * * @throws SQLException Indicates a JDBC error occurred. */ X extract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException; /** * Extract value from a callable output parameter by index * * @throws SQLException Indicates a JDBC error occurred. */ X extract(CallableStatement statement, int paramIndex, WrapperOptions options) throws SQLException; /** * Extract value from a callable output parameter by name * * @throws SQLException Indicates a JDBC error occurred. */ X extract(CallableStatement statement, String paramName, WrapperOptions options) throws SQLException; }
ValueExtractor
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/hadoop/HadoopDependency.java
{ "start": 991, "end": 1734 }
class ____ { private static final Logger LOG = LoggerFactory.getLogger(HadoopDependency.class); public static boolean isHadoopCommonOnClasspath(ClassLoader classLoader) { try { LOG.debug("Checking whether hadoop common dependency in on classpath."); Class.forName("org.apache.hadoop.conf.Configuration", false, classLoader); Class.forName("org.apache.hadoop.security.UserGroupInformation", false, classLoader); LOG.debug("Hadoop common dependency found on classpath."); return true; } catch (ClassNotFoundException e) { LOG.debug("Hadoop common dependency cannot be found on classpath."); return false; } } }
HadoopDependency
java
quarkusio__quarkus
extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/resteasy/OpenTelemetryReactiveServerFilter.java
{ "start": 496, "end": 856 }
class ____ { @ServerRequestFilter public void filter(SimpleResourceInfo resourceInfo) throws IOException { Span localRootSpan = LocalRootSpan.current(); localRootSpan.setAttribute(CODE_FUNCTION_NAME, resourceInfo.getResourceClass().getName() + "." + resourceInfo.getMethodName()); } }
OpenTelemetryReactiveServerFilter
java
google__auto
common/src/test/java/com/google/auto/common/OverridesTest.java
{ "start": 7593, "end": 7655 }
interface ____<E> { boolean add(E x); }
GCollection
java
quarkusio__quarkus
extensions/resteasy-reactive/rest-client/runtime/src/main/java/io/quarkus/rest/client/reactive/runtime/ProxyAddressUtil.java
{ "start": 839, "end": 1055 }
class ____ { public final String host; public final int port; public HostAndPort(String host, int port) { this.host = host; this.port = port; } } }
HostAndPort
java
apache__camel
components/camel-jackson-avro/src/main/java/org/apache/camel/component/jackson/avro/transform/Avro.java
{ "start": 1208, "end": 2143 }
class ____ { private static final AvroMapper MAPPER; static { MAPPER = AvroMapper.builder() .disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES) .enable(DeserializationFeature.READ_ENUMS_USING_TO_STRING) .enable(SerializationFeature.WRITE_ENUMS_USING_TO_STRING) .disable(JsonParser.Feature.AUTO_CLOSE_SOURCE) .enable(MapperFeature.BLOCK_UNSAFE_POLYMORPHIC_BASE_TYPES) .build(); MAPPER.setDefaultPropertyInclusion( JsonInclude.Value.construct(JsonInclude.Include.NON_EMPTY, JsonInclude.Include.NON_EMPTY)); } private Avro() { // prevent instantiation of utility class } /** * Provides access to the default object mapper instance. * * @return the default object mapper. */ public static AvroMapper mapper() { return MAPPER; } }
Avro
java
apache__rocketmq
tools/src/main/java/org/apache/rocketmq/tools/admin/MQAdminUtils.java
{ "start": 2722, "end": 19705 }
class ____ { public static ClientMetadata getBrokerMetadata(DefaultMQAdminExt defaultMQAdminExt) throws InterruptedException, RemotingConnectException, RemotingTimeoutException, RemotingSendRequestException, MQBrokerException { ClientMetadata clientMetadata = new ClientMetadata(); refreshClusterInfo(defaultMQAdminExt, clientMetadata); return clientMetadata; } public static ClientMetadata getBrokerAndTopicMetadata(String topic, DefaultMQAdminExt defaultMQAdminExt) throws InterruptedException, RemotingException, MQBrokerException { ClientMetadata clientMetadata = new ClientMetadata(); refreshClusterInfo(defaultMQAdminExt, clientMetadata); refreshTopicRouteInfo(topic, defaultMQAdminExt, clientMetadata); return clientMetadata; } public static void refreshClusterInfo(DefaultMQAdminExt defaultMQAdminExt, ClientMetadata clientMetadata) throws InterruptedException, MQBrokerException, RemotingTimeoutException, RemotingSendRequestException, RemotingConnectException { ClusterInfo clusterInfo = defaultMQAdminExt.examineBrokerClusterInfo(); if (clusterInfo == null || clusterInfo.getClusterAddrTable().isEmpty()) { throw new RuntimeException("The Cluster info is empty"); } clientMetadata.refreshClusterInfo(clusterInfo); } public static void refreshTopicRouteInfo(String topic, DefaultMQAdminExt defaultMQAdminExt, ClientMetadata clientMetadata) throws RemotingException, InterruptedException, MQBrokerException { TopicRouteData routeData = null; try { routeData = defaultMQAdminExt.examineTopicRouteInfo(topic); } catch (MQClientException exception) { if (exception.getResponseCode() != ResponseCode.TOPIC_NOT_EXIST) { throw new MQBrokerException(exception.getResponseCode(), exception.getErrorMessage()); } } if (routeData != null && !routeData.getQueueDatas().isEmpty()) { clientMetadata.freshTopicRoute(topic, routeData); } } public static Set<String> getAllBrokersInSameCluster(Collection<String> brokers, DefaultMQAdminExt defaultMQAdminExt) throws InterruptedException, MQBrokerException, RemotingTimeoutException, RemotingSendRequestException, RemotingConnectException { ClusterInfo clusterInfo = defaultMQAdminExt.examineBrokerClusterInfo(); if (clusterInfo == null || clusterInfo.getClusterAddrTable().isEmpty()) { throw new RuntimeException("The Cluster info is empty"); } Set<String> allBrokers = new HashSet<>(); for (String broker: brokers) { if (allBrokers.contains(broker)) { continue; } for (Set<String> clusterBrokers : clusterInfo.getClusterAddrTable().values()) { if (clusterBrokers.contains(broker)) { allBrokers.addAll(clusterBrokers); break; } } } return allBrokers; } public static void completeNoTargetBrokers(Map<String, TopicConfigAndQueueMapping> brokerConfigMap, DefaultMQAdminExt defaultMQAdminExt) throws InterruptedException, RemotingConnectException, RemotingTimeoutException, RemotingSendRequestException, MQBrokerException { TopicConfigAndQueueMapping configMapping = brokerConfigMap.values().iterator().next(); String topic = configMapping.getTopicName(); int queueNum = configMapping.getMappingDetail().getTotalQueues(); long newEpoch = configMapping.getMappingDetail().getEpoch(); Set<String> allBrokers = getAllBrokersInSameCluster(brokerConfigMap.keySet(), defaultMQAdminExt); for (String broker: allBrokers) { if (!brokerConfigMap.containsKey(broker)) { brokerConfigMap.put(broker, new TopicConfigAndQueueMapping(new TopicConfig(topic, 0, 0), new TopicQueueMappingDetail(topic, queueNum, broker, newEpoch))); } } } public static void checkIfMasterAlive(Collection<String> brokers, DefaultMQAdminExt defaultMQAdminExt, ClientMetadata clientMetadata) { for (String broker : brokers) { String addr = clientMetadata.findMasterBrokerAddr(broker); if (addr == null) { throw new RuntimeException("Can't find addr for broker " + broker); } } } public static void updateTopicConfigMappingAll(Map<String, TopicConfigAndQueueMapping> brokerConfigMap, DefaultMQAdminExt defaultMQAdminExt, boolean force) throws Exception { ClientMetadata clientMetadata = getBrokerMetadata(defaultMQAdminExt); checkIfMasterAlive(brokerConfigMap.keySet(), defaultMQAdminExt, clientMetadata); //If some succeed, and others fail, it will cause inconsistent data for (Map.Entry<String, TopicConfigAndQueueMapping> entry : brokerConfigMap.entrySet()) { String broker = entry.getKey(); String addr = clientMetadata.findMasterBrokerAddr(broker); TopicConfigAndQueueMapping configMapping = entry.getValue(); defaultMQAdminExt.createStaticTopic(addr, defaultMQAdminExt.getCreateTopicKey(), configMapping, configMapping.getMappingDetail(), force); } } public static void remappingStaticTopic(String topic, Set<String> brokersToMapIn, Set<String> brokersToMapOut, Map<String, TopicConfigAndQueueMapping> brokerConfigMap, int blockSeqSize, boolean force, DefaultMQAdminExt defaultMQAdminExt) throws RemotingException, MQBrokerException, InterruptedException, MQClientException { ClientMetadata clientMetadata = MQAdminUtils.getBrokerMetadata(defaultMQAdminExt); MQAdminUtils.checkIfMasterAlive(brokerConfigMap.keySet(), defaultMQAdminExt, clientMetadata); // now do the remapping //Step1: let the new leader can be written without the logicOffset for (String broker: brokersToMapIn) { String addr = clientMetadata.findMasterBrokerAddr(broker); TopicConfigAndQueueMapping configMapping = brokerConfigMap.get(broker); defaultMQAdminExt.createStaticTopic(addr, defaultMQAdminExt.getCreateTopicKey(), configMapping, configMapping.getMappingDetail(), force); } //Step2: forbid to write of old leader for (String broker: brokersToMapOut) { String addr = clientMetadata.findMasterBrokerAddr(broker); TopicConfigAndQueueMapping configMapping = brokerConfigMap.get(broker); defaultMQAdminExt.createStaticTopic(addr, defaultMQAdminExt.getCreateTopicKey(), configMapping, configMapping.getMappingDetail(), force); } //Step3: decide the logic offset for (String broker: brokersToMapOut) { String addr = clientMetadata.findMasterBrokerAddr(broker); TopicStatsTable statsTable = defaultMQAdminExt.examineTopicStats(addr, topic); TopicConfigAndQueueMapping mapOutConfig = brokerConfigMap.get(broker); for (Map.Entry<Integer, List<LogicQueueMappingItem>> entry : mapOutConfig.getMappingDetail().getHostedQueues().entrySet()) { List<LogicQueueMappingItem> items = entry.getValue(); Integer globalId = entry.getKey(); if (items.size() < 2) { continue; } LogicQueueMappingItem newLeader = items.get(items.size() - 1); LogicQueueMappingItem oldLeader = items.get(items.size() - 2); if (newLeader.getLogicOffset() > 0) { continue; } TopicOffset topicOffset = statsTable.getOffsetTable().get(new MessageQueue(topic, oldLeader.getBname(), oldLeader.getQueueId())); if (topicOffset == null) { throw new RuntimeException("Cannot get the max offset for old leader " + oldLeader); } //TO DO check the max offset, will it return -1? if (topicOffset.getMaxOffset() < oldLeader.getStartOffset()) { throw new RuntimeException("The max offset is smaller then the start offset " + oldLeader + " " + topicOffset.getMaxOffset()); } newLeader.setLogicOffset(TopicQueueMappingUtils.blockSeqRoundUp(oldLeader.computeStaticQueueOffsetStrictly(topicOffset.getMaxOffset()), blockSeqSize)); TopicConfigAndQueueMapping mapInConfig = brokerConfigMap.get(newLeader.getBname()); //fresh the new leader TopicQueueMappingDetail.putMappingInfo(mapInConfig.getMappingDetail(), globalId, items); } } //Step4: write to the new leader with logic offset for (String broker: brokersToMapIn) { String addr = clientMetadata.findMasterBrokerAddr(broker); TopicConfigAndQueueMapping configMapping = brokerConfigMap.get(broker); defaultMQAdminExt.createStaticTopic(addr, defaultMQAdminExt.getCreateTopicKey(), configMapping, configMapping.getMappingDetail(), force); } //Step5: write the non-target brokers for (String broker: brokerConfigMap.keySet()) { if (brokersToMapIn.contains(broker) || brokersToMapOut.contains(broker)) { continue; } String addr = clientMetadata.findMasterBrokerAddr(broker); TopicConfigAndQueueMapping configMapping = brokerConfigMap.get(broker); defaultMQAdminExt.createStaticTopic(addr, defaultMQAdminExt.getCreateTopicKey(), configMapping, configMapping.getMappingDetail(), force); } } public static Map<String, TopicConfigAndQueueMapping> examineTopicConfigAll(String topic, DefaultMQAdminExt defaultMQAdminExt) throws RemotingException, InterruptedException, MQBrokerException { Map<String, TopicConfigAndQueueMapping> brokerConfigMap = new HashMap<>(); ClientMetadata clientMetadata = new ClientMetadata(); //check all the brokers ClusterInfo clusterInfo = defaultMQAdminExt.examineBrokerClusterInfo(); if (clusterInfo != null && clusterInfo.getBrokerAddrTable() != null) { clientMetadata.refreshClusterInfo(clusterInfo); } for (String broker : clientMetadata.getBrokerAddrTable().keySet()) { String addr = clientMetadata.findMasterBrokerAddr(broker); try { TopicConfigAndQueueMapping mapping = (TopicConfigAndQueueMapping) defaultMQAdminExt.examineTopicConfig(addr, topic); //allow the config is null if (mapping != null) { if (mapping.getMappingDetail() != null) { assert mapping.getMappingDetail().getBname().equals(broker); } brokerConfigMap.put(broker, mapping); } } catch (MQBrokerException exception1) { if (exception1.getResponseCode() != ResponseCode.TOPIC_NOT_EXIST) { throw exception1; } } } return brokerConfigMap; } public static Map<String, TopicConfigAndQueueMapping> examineTopicConfigFromRoute(String topic, TopicRouteData topicRouteData, DefaultMQAdminExt defaultMQAdminExt) throws RemotingException, InterruptedException, MQBrokerException { Map<String, TopicConfigAndQueueMapping> brokerConfigMap = new HashMap<>(); for (BrokerData bd : topicRouteData.getBrokerDatas()) { String broker = bd.getBrokerName(); String addr = bd.selectBrokerAddr(); if (addr == null) { continue; } try { TopicConfigAndQueueMapping mapping = (TopicConfigAndQueueMapping) defaultMQAdminExt.examineTopicConfig(addr, topic); //allow the config is null if (mapping != null) { if (mapping.getMappingDetail() != null) { assert mapping.getMappingDetail().getBname().equals(broker); } brokerConfigMap.put(broker, mapping); } } catch (MQBrokerException exception) { if (exception.getResponseCode() != ResponseCode.TOPIC_NOT_EXIST) { throw exception; } } } return brokerConfigMap; } public static void convertPhysicalTopicStats(String topic, Map<String, TopicConfigAndQueueMapping> brokerConfigMap, TopicStatsTable topicStatsTable) { Map<Integer, TopicQueueMappingOne> globalIdMap = checkAndBuildMappingItems(getMappingDetailFromConfig(brokerConfigMap.values()), true, false); for (Map.Entry<Integer, TopicQueueMappingOne> entry: globalIdMap.entrySet()) { Integer qid = entry.getKey(); TopicQueueMappingOne mappingOne = entry.getValue(); LogicQueueMappingItem minItem = TopicQueueMappingUtils.findLogicQueueMappingItem(mappingOne.getItems(), 0, true); LogicQueueMappingItem maxItem = TopicQueueMappingUtils.findLogicQueueMappingItem(mappingOne.getItems(), Long.MAX_VALUE, true); assert minItem != null && maxItem != null; TopicOffset minTopicOffset = topicStatsTable.getOffsetTable().get(new MessageQueue(topic, minItem.getBname(), minItem.getQueueId())); TopicOffset maxTopicOffset = topicStatsTable.getOffsetTable().get(new MessageQueue(topic, maxItem.getBname(), maxItem.getQueueId())); if (minTopicOffset == null || maxTopicOffset == null) { continue; } long min = minItem.computeStaticQueueOffsetLoosely(minTopicOffset.getMinOffset()); if (min < 0) min = 0; long max = maxItem.computeStaticQueueOffsetStrictly(maxTopicOffset.getMaxOffset()); if (max < 0) max = 0; long timestamp = maxTopicOffset.getLastUpdateTimestamp(); TopicOffset topicOffset = new TopicOffset(); topicOffset.setMinOffset(min); topicOffset.setMaxOffset(max); topicOffset.setLastUpdateTimestamp(timestamp); topicStatsTable.getOffsetTable().put(new MessageQueue(topic, TopicQueueMappingUtils.getMockBrokerName(mappingOne.getMappingDetail().getScope()), qid), topicOffset); } } public static ConsumeStats convertPhysicalConsumeStats(Map<String, TopicConfigAndQueueMapping> brokerConfigMap, ConsumeStats physicalResult) { Map<Integer, TopicQueueMappingOne> globalIdMap = checkAndBuildMappingItems(getMappingDetailFromConfig(brokerConfigMap.values()), true, false); ConsumeStats result = new ConsumeStats(); result.setConsumeTps(physicalResult.getConsumeTps()); for (Map.Entry<Integer, TopicQueueMappingOne> entry : globalIdMap.entrySet()) { Integer qid = entry.getKey(); TopicQueueMappingOne mappingOne = entry.getValue(); MessageQueue messageQueue = new MessageQueue(mappingOne.getTopic(), TopicQueueMappingUtils.getMockBrokerName(mappingOne.getMappingDetail().getScope()), qid); OffsetWrapper offsetWrapper = new OffsetWrapper(); long brokerOffset = -1; long consumerOffset = -1; long lastTimestamp = -1; //maybe need to be polished for (int i = mappingOne.getItems().size() - 1; i >= 0; i--) { LogicQueueMappingItem item = mappingOne.getItems().get(i); MessageQueue phyQueue = new MessageQueue(mappingOne.getTopic(), item.getBname(), item.getQueueId()); OffsetWrapper phyOffsetWrapper = physicalResult.getOffsetTable().get(phyQueue); if (phyOffsetWrapper == null) { continue; } if (consumerOffset == -1 && phyOffsetWrapper.getConsumerOffset() >= 0) { consumerOffset = phyOffsetWrapper.getConsumerOffset(); lastTimestamp = phyOffsetWrapper.getLastTimestamp(); } if (brokerOffset == -1 && item.getLogicOffset() >= 0) { brokerOffset = item.computeStaticQueueOffsetStrictly(phyOffsetWrapper.getBrokerOffset()); } if (consumerOffset >= 0 && brokerOffset >= 0) { break; } } if (brokerOffset >= 0 && consumerOffset >= 0) { offsetWrapper.setBrokerOffset(brokerOffset); offsetWrapper.setConsumerOffset(consumerOffset); offsetWrapper.setLastTimestamp(lastTimestamp); result.getOffsetTable().put(messageQueue, offsetWrapper); } } return result; } }
MQAdminUtils
java
mockito__mockito
mockito-core/src/main/java/org/mockito/exceptions/verification/ArgumentsAreDifferent.java
{ "start": 301, "end": 996 }
class ____ extends MockitoAssertionError { private static final long serialVersionUID = 1L; public ArgumentsAreDifferent(String message) { super(message); } /** * Three-arg constructor for compatibility with ExceptionFactory's three-arg * create method. This implementation simply ignores the second and third * arguments. * * @param message * @param wanted ignored * @param actual ignored */ public ArgumentsAreDifferent(String message, String wanted, String actual) { this(message); } @Override public String getMessage() { return removeFirstLine(super.getMessage()); } }
ArgumentsAreDifferent
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/SelfAlwaysReturnsThisTest.java
{ "start": 6405, "end": 6768 }
class ____ { public String self() { return "hi"; } } """) .expectUnchanged() .doTest(); } @Test public void self_static() { helper .addInputLines( "Builder.java", """ package com.google.frobber; public final
Builder
java
apache__flink
flink-python/src/main/java/org/apache/flink/formats/csv/CsvRowDeserializationSchema.java
{ "start": 8440, "end": 15851 }
interface ____ extends Serializable { Object convert(JsonNode node); } private static RuntimeConverter createRowRuntimeConverter( RowTypeInfo rowTypeInfo, boolean ignoreParseErrors, boolean isTopLevel) { final TypeInformation<?>[] fieldTypes = rowTypeInfo.getFieldTypes(); final String[] fieldNames = rowTypeInfo.getFieldNames(); final RuntimeConverter[] fieldConverters = createFieldRuntimeConverters(ignoreParseErrors, fieldTypes); return assembleRowRuntimeConverter( ignoreParseErrors, isTopLevel, fieldNames, fieldConverters); } static RuntimeConverter[] createFieldRuntimeConverters( boolean ignoreParseErrors, TypeInformation<?>[] fieldTypes) { final RuntimeConverter[] fieldConverters = new RuntimeConverter[fieldTypes.length]; for (int i = 0; i < fieldTypes.length; i++) { fieldConverters[i] = createNullableRuntimeConverter(fieldTypes[i], ignoreParseErrors); } return fieldConverters; } private static RuntimeConverter assembleRowRuntimeConverter( boolean ignoreParseErrors, boolean isTopLevel, String[] fieldNames, RuntimeConverter[] fieldConverters) { final int rowArity = fieldNames.length; return (node) -> { final int nodeSize = node.size(); if (nodeSize != 0) { validateArity(rowArity, nodeSize, ignoreParseErrors); } else { return null; } final Row row = new Row(rowArity); for (int i = 0; i < Math.min(rowArity, nodeSize); i++) { // Jackson only supports mapping by name in the first level if (isTopLevel) { row.setField(i, fieldConverters[i].convert(node.get(fieldNames[i]))); } else { row.setField(i, fieldConverters[i].convert(node.get(i))); } } return row; }; } private static RuntimeConverter createNullableRuntimeConverter( TypeInformation<?> info, boolean ignoreParseErrors) { final RuntimeConverter valueConverter = createRuntimeConverter(info, ignoreParseErrors); return (node) -> { if (node.isNull()) { return null; } try { return valueConverter.convert(node); } catch (Throwable t) { if (!ignoreParseErrors) { throw t; } return null; } }; } private static RuntimeConverter createRuntimeConverter( TypeInformation<?> info, boolean ignoreParseErrors) { if (info.equals(Types.VOID)) { return (node) -> null; } else if (info.equals(Types.STRING)) { return JsonNode::asText; } else if (info.equals(Types.BOOLEAN)) { return (node) -> Boolean.valueOf(node.asText().trim()); } else if (info.equals(Types.BYTE)) { return (node) -> Byte.valueOf(node.asText().trim()); } else if (info.equals(Types.SHORT)) { return (node) -> Short.valueOf(node.asText().trim()); } else if (info.equals(Types.INT)) { return (node) -> Integer.valueOf(node.asText().trim()); } else if (info.equals(Types.LONG)) { return (node) -> Long.valueOf(node.asText().trim()); } else if (info.equals(Types.FLOAT)) { return (node) -> Float.valueOf(node.asText().trim()); } else if (info.equals(Types.DOUBLE)) { return (node) -> Double.valueOf(node.asText().trim()); } else if (info.equals(Types.BIG_DEC)) { return (node) -> new BigDecimal(node.asText().trim()); } else if (info.equals(Types.BIG_INT)) { return (node) -> new BigInteger(node.asText().trim()); } else if (info.equals(Types.SQL_DATE)) { return (node) -> Date.valueOf(node.asText()); } else if (info.equals(Types.SQL_TIME)) { return (node) -> Time.valueOf(node.asText()); } else if (info.equals(Types.SQL_TIMESTAMP)) { return (node) -> Timestamp.valueOf(node.asText()); } else if (info.equals(Types.LOCAL_DATE)) { return (node) -> Date.valueOf(node.asText()).toLocalDate(); } else if (info.equals(Types.LOCAL_TIME)) { return (node) -> Time.valueOf(node.asText()).toLocalTime(); } else if (info.equals(Types.LOCAL_DATE_TIME)) { return (node) -> LocalDateTime.parse(node.asText().trim(), SQL_TIMESTAMP_FORMAT); } else if (info.equals(Types.INSTANT)) { return (node) -> LocalDateTime.parse(node.asText(), SQL_TIMESTAMP_WITH_LOCAL_TIMEZONE_FORMAT) .toInstant(ZoneOffset.UTC); } else if (info instanceof RowTypeInfo) { final RowTypeInfo rowTypeInfo = (RowTypeInfo) info; return createRowRuntimeConverter(rowTypeInfo, ignoreParseErrors, false); } else if (info instanceof BasicArrayTypeInfo) { return createObjectArrayRuntimeConverter( ((BasicArrayTypeInfo<?, ?>) info).getComponentInfo(), ignoreParseErrors); } else if (info instanceof ObjectArrayTypeInfo) { return createObjectArrayRuntimeConverter( ((ObjectArrayTypeInfo<?, ?>) info).getComponentInfo(), ignoreParseErrors); } else if (info instanceof PrimitiveArrayTypeInfo && ((PrimitiveArrayTypeInfo) info).getComponentType() == Types.BYTE) { return createByteArrayRuntimeConverter(ignoreParseErrors); } else { throw new RuntimeException("Unsupported type information '" + info + "'."); } } private static RuntimeConverter createObjectArrayRuntimeConverter( TypeInformation<?> elementType, boolean ignoreParseErrors) { final Class<?> elementClass = elementType.getTypeClass(); final RuntimeConverter elementConverter = createNullableRuntimeConverter(elementType, ignoreParseErrors); return (node) -> { final int nodeSize = node.size(); final Object[] array = (Object[]) Array.newInstance(elementClass, nodeSize); for (int i = 0; i < nodeSize; i++) { array[i] = elementConverter.convert(node.get(i)); } return array; }; } private static RuntimeConverter createByteArrayRuntimeConverter(boolean ignoreParseErrors) { return (node) -> { try { return node.binaryValue(); } catch (IOException e) { if (!ignoreParseErrors) { throw new RuntimeException("Unable to deserialize byte array.", e); } return null; } }; } static void validateArity(int expected, int actual, boolean ignoreParseErrors) { if (expected != actual && !ignoreParseErrors) { throw new RuntimeException( "Row length mismatch. " + expected + " fields expected but was " + actual + "."); } } }
RuntimeConverter
java
junit-team__junit5
junit-jupiter-api/src/main/java/org/junit/jupiter/api/DynamicNode.java
{ "start": 991, "end": 2723 }
class ____ { private final String displayName; /** Custom test source {@link URI} associated with this node; potentially {@code null}. */ private final @Nullable URI testSourceUri; private final @Nullable ExecutionMode executionMode; DynamicNode(AbstractConfiguration<?> configuration) { this.displayName = Preconditions.notBlank(configuration.displayName, "displayName must not be null or blank"); this.testSourceUri = configuration.testSourceUri; this.executionMode = configuration.executionMode; } /** * Get the display name of this {@code DynamicNode}. * * @return the display name */ public String getDisplayName() { return this.displayName; } /** * Get the custom test source {@link URI} of this {@code DynamicNode}. * * @return an {@code Optional} containing the custom test source {@link URI}; * never {@code null} but potentially empty * @since 5.3 */ public Optional<URI> getTestSourceUri() { return Optional.ofNullable(testSourceUri); } /** * {@return the {@link ExecutionMode} of this {@code DynamicNode}} * * @since 6.1 * @see DynamicContainer#getChildExecutionMode() */ @API(status = EXPERIMENTAL, since = "6.1") public Optional<ExecutionMode> getExecutionMode() { return Optional.ofNullable(executionMode); } @Override public String toString() { return new ToStringBuilder(this) // .append("displayName", displayName) // .append("testSourceUri", testSourceUri) // .toString(); } /** * {@code Configuration} of a {@link DynamicNode} or one of its * subinterfaces. * * @since 6.1 * @see DynamicTest.Configuration * @see DynamicContainer.Configuration */ @API(status = EXPERIMENTAL, since = "6.1") public sealed
DynamicNode