language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/source/spi/SingularAttributeSource.java
|
{
"start": 313,
"end": 1807
}
|
interface ____ extends AttributeSource {
/**
* Determine whether this is a virtual attribute or whether it physically exists on the users domain model.
*
* @return {@code true} indicates the attribute is virtual, meaning it does NOT exist on the domain model;
* {@code false} indicates the attribute physically exists.
*/
boolean isVirtualAttribute();
/**
* Obtain the nature of this attribute type.
*
* @return The attribute type nature
*/
SingularAttributeNature getSingularAttributeNature();
/**
* Obtain a description of if/when the attribute value is generated by the database.
*
* @return The attribute value generation information
*/
GenerationTiming getGenerationTiming();
/**
* Did the mapping specify that the given attribute value(s) should be inserted into the database?
*
* @return {@code true} indicates value(s) should be inserted; {@code false} indicates not.
*/
Boolean isInsertable();
/**
* Did the mapping specify that the given attribute value(s) should be updated in the database?
*
* @return {@code true} indicates value(s) should be updated; {@code false} indicates not.
*/
Boolean isUpdatable();
/**
* Should the attribute be lazily loaded by bytecode enhancement?
*
* @return {@code true} to indicate the attribute should be lazily loaded by bytecode enhancement?
*/
boolean isBytecodeLazy();
/**
* Retrieve the natural id mutability
*
* @return The mutability, see
|
SingularAttributeSource
|
java
|
apache__camel
|
components/camel-disruptor/src/test/java/org/apache/camel/component/disruptor/DisruptorFromRouteIdTest.java
|
{
"start": 1119,
"end": 2295
}
|
class ____ extends CamelTestSupport {
@Test
void testDisruptorFromRouteId() throws Exception {
final MockEndpoint foo = getMockEndpoint("mock:foo");
foo.expectedMessageCount(1);
final MockEndpoint bar = getMockEndpoint("mock:bar");
bar.expectedMessageCount(1);
template.sendBody("disruptor:foo", "Hello World");
MockEndpoint.assertIsSatisfied(context);
assertEquals("foo", foo.getReceivedExchanges().get(0).getFromRouteId());
assertEquals("disruptor://foo", foo.getReceivedExchanges().get(0).getFromEndpoint().getEndpointUri());
assertEquals("bar", bar.getReceivedExchanges().get(0).getFromRouteId());
assertEquals("disruptor://bar", bar.getReceivedExchanges().get(0).getFromEndpoint().getEndpointUri());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("disruptor:foo").routeId("foo").to("mock:foo").to("disruptor:bar");
from("disruptor:bar").routeId("bar").to("mock:bar");
}
};
}
}
|
DisruptorFromRouteIdTest
|
java
|
apache__maven
|
impl/maven-core/src/main/java/org/apache/maven/project/DuplicateArtifactAttachmentException.java
|
{
"start": 1191,
"end": 1995
}
|
class ____ extends RuntimeException {
private static final String DEFAULT_MESSAGE = "Duplicate artifact attachment detected.";
private Artifact artifact;
private final MavenProject project;
public DuplicateArtifactAttachmentException(MavenProject project, Artifact artifact) {
super(constructMessage(project, artifact));
this.project = project;
this.artifact = artifact;
}
private static String constructMessage(MavenProject project, Artifact artifact) {
return DEFAULT_MESSAGE + " (project: " + project.getId() + "; illegal attachment: " + artifact.getId() + ")";
}
public MavenProject getProject() {
return project;
}
public Artifact getArtifact() {
return artifact;
}
}
|
DuplicateArtifactAttachmentException
|
java
|
apache__maven
|
compat/maven-toolchain-builder/src/main/java/org/apache/maven/toolchain/merge/MavenToolchainMerger.java
|
{
"start": 1255,
"end": 3086
}
|
class ____ {
public void merge(PersistedToolchains dominant, PersistedToolchains recessive, String recessiveSourceLevel) {
if (dominant == null || recessive == null) {
return;
}
recessive.setSourceLevel(recessiveSourceLevel);
shallowMerge(dominant.getToolchains(), recessive.getToolchains(), recessiveSourceLevel);
}
private void shallowMerge(
List<ToolchainModel> dominant, List<ToolchainModel> recessive, String recessiveSourceLevel) {
Map<Object, ToolchainModel> merged = new LinkedHashMap<>();
for (ToolchainModel dominantModel : dominant) {
Object key = getToolchainModelKey(dominantModel);
merged.put(key, dominantModel);
}
for (ToolchainModel recessiveModel : recessive) {
Object key = getToolchainModelKey(recessiveModel);
ToolchainModel dominantModel = merged.get(key);
if (dominantModel == null) {
recessiveModel.setSourceLevel(recessiveSourceLevel);
dominant.add(recessiveModel);
} else {
mergeToolchainModelConfiguration(dominantModel, recessiveModel);
}
}
}
protected void mergeToolchainModelConfiguration(ToolchainModel target, ToolchainModel source) {
Xpp3Dom src = (Xpp3Dom) source.getConfiguration();
if (src != null) {
Xpp3Dom tgt = (Xpp3Dom) target.getConfiguration();
if (tgt == null) {
tgt = Xpp3Dom.mergeXpp3Dom(new Xpp3Dom(src), tgt);
} else {
tgt = Xpp3Dom.mergeXpp3Dom(tgt, src);
}
target.setConfiguration(tgt);
}
}
protected Object getToolchainModelKey(ToolchainModel model) {
return model;
}
}
|
MavenToolchainMerger
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/cache/ClassLevelDirtiesContextTestNGTests.java
|
{
"start": 2199,
"end": 2226
}
|
class ____.
*
* <p>This
|
level
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java
|
{
"start": 1855,
"end": 6872
}
|
class ____ extends AbstractDataFrameAnalyticsStep {
private static final Logger LOGGER = LogManager.getLogger(InferenceStep.class);
private final ThreadPool threadPool;
private final InferenceRunner inferenceRunner;
public InferenceStep(
NodeClient client,
DataFrameAnalyticsTask task,
DataFrameAnalyticsAuditor auditor,
DataFrameAnalyticsConfig config,
ThreadPool threadPool,
InferenceRunner inferenceRunner
) {
super(client, task, auditor, config);
this.threadPool = Objects.requireNonNull(threadPool);
this.inferenceRunner = Objects.requireNonNull(inferenceRunner);
}
@Override
public Name name() {
return Name.INFERENCE;
}
@Override
protected void doExecute(ActionListener<StepResponse> listener) {
if (config.getAnalysis().supportsInference() == false) {
LOGGER.debug(() -> format("[%s] Inference step completed immediately as analysis does not support inference", config.getId()));
listener.onResponse(new StepResponse(false));
return;
}
refreshDestAsync(
listener.delegateFailureAndWrap(
(delegate, refreshResponse) -> searchIfTestDocsExist(delegate.delegateFailureAndWrap((delegate2, testDocsExist) -> {
if (testDocsExist) {
getModelId(delegate2.delegateFailureAndWrap((l, modelId) -> runInference(modelId, l)));
} else {
// no need to run inference at all so let us skip
// loading the model in memory.
LOGGER.debug(() -> "[" + config.getId() + "] Inference step completed immediately as there are no test docs");
task.getStatsHolder().getProgressTracker().updateInferenceProgress(100);
delegate2.onResponse(new StepResponse(isTaskStopping()));
}
}))
)
);
}
private void runInference(String modelId, ActionListener<StepResponse> listener) {
threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(ActionRunnable.wrap(listener, delegate -> {
inferenceRunner.run(modelId, ActionListener.wrap(aVoid -> delegate.onResponse(new StepResponse(isTaskStopping())), e -> {
if (task.isStopping()) {
delegate.onResponse(new StepResponse(false));
} else {
delegate.onFailure(e);
}
}));
}));
}
private void searchIfTestDocsExist(ActionListener<Boolean> listener) {
SearchRequest searchRequest = new SearchRequest(config.getDest().getIndex());
searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS));
searchRequest.source()
.query(
QueryBuilders.boolQuery()
.mustNot(QueryBuilders.termQuery(config.getDest().getResultsField() + "." + DestinationIndex.IS_TRAINING, true))
);
searchRequest.source().size(0);
searchRequest.source().trackTotalHitsUpTo(1);
executeAsyncWithOrigin(
client,
ML_ORIGIN,
TransportSearchAction.TYPE,
searchRequest,
listener.delegateFailureAndWrap((l, searchResponse) -> l.onResponse(searchResponse.getHits().getTotalHits().value() > 0))
);
}
private void getModelId(ActionListener<String> listener) {
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.size(1);
searchSourceBuilder.fetchSource(false);
searchSourceBuilder.query(
QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(TrainedModelConfig.TAGS.getPreferredName(), config.getId()))
);
searchSourceBuilder.sort(TrainedModelConfig.CREATE_TIME.getPreferredName(), SortOrder.DESC);
SearchRequest searchRequest = new SearchRequest(InferenceIndexConstants.INDEX_PATTERN);
searchRequest.source(searchSourceBuilder);
executeAsyncWithOrigin(
client,
ML_ORIGIN,
TransportSearchAction.TYPE,
searchRequest,
listener.delegateFailureAndWrap((l, searchResponse) -> {
SearchHit[] hits = searchResponse.getHits().getHits();
if (hits.length == 0) {
l.onFailure(new ResourceNotFoundException("No model could be found to perform inference"));
} else {
l.onResponse(hits[0].getId());
}
})
);
}
@Override
public void cancel(String reason, TimeValue timeout) {
inferenceRunner.cancel();
}
@Override
public void updateProgress(ActionListener<Void> listener) {
// Inference runner updates progress directly
listener.onResponse(null);
}
}
|
InferenceStep
|
java
|
spring-projects__spring-security
|
core/src/main/java/org/springframework/security/authorization/method/HandleAuthorizationDenied.java
|
{
"start": 1434,
"end": 1717
}
|
interface ____ {
/**
* The {@link MethodAuthorizationDeniedHandler} used to handle denied authorization
* results
* @return
*/
Class<? extends MethodAuthorizationDeniedHandler> handlerClass() default ThrowingMethodAuthorizationDeniedHandler.class;
}
|
HandleAuthorizationDenied
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/sqm/mutation/internal/temptable/LocalTemporaryTableInsertStrategy.java
|
{
"start": 1161,
"end": 3151
}
|
class ____ extends LocalTemporaryTableStrategy implements SqmMultiTableInsertStrategy {
public LocalTemporaryTableInsertStrategy(EntityMappingType rootEntityDescriptor, RuntimeModelCreationContext runtimeModelCreationContext) {
this(
rootEntityDescriptor,
requireLocalTemporaryTableStrategy( runtimeModelCreationContext.getDialect() ),
runtimeModelCreationContext
);
}
private LocalTemporaryTableInsertStrategy(
EntityMappingType rootEntityDescriptor,
TemporaryTableStrategy temporaryTableStrategy,
RuntimeModelCreationContext runtimeModelCreationContext) {
this(
TemporaryTable.createEntityTable(
runtimeModelCreationContext.getMetadata()
.getEntityBinding( rootEntityDescriptor.getEntityName() ),
basename -> temporaryTableStrategy.adjustTemporaryTableName( TemporaryTable.ENTITY_TABLE_PREFIX + basename ),
TemporaryTableKind.LOCAL,
runtimeModelCreationContext.getDialect(),
runtimeModelCreationContext
),
runtimeModelCreationContext.getSessionFactory()
);
}
public LocalTemporaryTableInsertStrategy(
TemporaryTable entityTable,
SessionFactoryImplementor sessionFactory) {
super( entityTable, sessionFactory );
}
@Override
public MultiTableHandlerBuildResult buildHandler(SqmInsertStatement<?> sqmInsertStatement, DomainParameterXref domainParameterXref, DomainQueryExecutionContext context) {
final MutableObject<JdbcParameterBindings> firstJdbcParameterBindings = new MutableObject<>();
final MultiTableHandler multiTableHandler = new TableBasedInsertHandler(
sqmInsertStatement,
domainParameterXref,
getTemporaryTable(),
getTemporaryTableStrategy(),
isDropIdTables(),
session -> {
throw new UnsupportedOperationException( "Unexpected call to access Session uid" );
},
context,
firstJdbcParameterBindings
);
return new MultiTableHandlerBuildResult( multiTableHandler, firstJdbcParameterBindings.get() );
}
}
|
LocalTemporaryTableInsertStrategy
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/ref/RefTest14.java
|
{
"start": 1970,
"end": 2817
}
|
class ____ {
private String name;
private List<Group> groups = new ArrayList<Group>();
private User reportTo;
public User(){
}
public User getReportTo() {
return reportTo;
}
public void setReportTo(User reportTo) {
this.reportTo = reportTo;
}
public User(String name){
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<Group> getGroups() {
return groups;
}
public void setGroups(List<Group> groups) {
this.groups = groups;
}
public String toString() {
return this.name;
}
}
}
|
User
|
java
|
quarkusio__quarkus
|
extensions/oidc-client-registration/deployment/src/test/java/io/quarkus/oidc/client/registration/OidcClientRegistrationKeycloakDevServiceTest.java
|
{
"start": 2276,
"end": 3057
}
|
class ____ {
private volatile ClientMetadata defaultClientMetadata;
private volatile ClientMetadata namedClientMetadata;
void prepareDefaultClientMetadata(@Observes StartupEvent event, OidcClientRegistrations clientRegistrations) {
var clientRegistration = clientRegistrations.getClientRegistration();
var registeredClient = clientRegistration.registeredClient().await().indefinitely();
defaultClientMetadata = registeredClient.metadata();
clientRegistration = clientRegistrations.getClientRegistration("named");
registeredClient = clientRegistration.registeredClient().await().indefinitely();
namedClientMetadata = registeredClient.metadata();
}
}
}
|
TestClientRegistrations
|
java
|
apache__camel
|
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/internal/client/DefaultRestClient.java
|
{
"start": 2177,
"end": 27103
}
|
class ____ extends AbstractClientBase implements RestClient {
private static final String SERVICES_DATA = "/services/data/";
private static final String TOKEN_HEADER = "Authorization";
private static final String TOKEN_PREFIX = "Bearer ";
private static final String SERVICES_APEXREST = "/services/apexrest/";
private static final String GET_SCHEMA_MINIMUM_VERSION = "40.0";
public DefaultRestClient(final SalesforceHttpClient httpClient, final String version,
final SalesforceSession session,
final SalesforceLoginConfig loginConfig) throws SalesforceException {
super(version, session, httpClient, loginConfig);
}
@Override
protected void doHttpRequest(Request request, ClientResponseCallback callback) {
// set standard headers for all requests
request.headers(h -> h.add(HttpHeader.ACCEPT, APPLICATION_JSON_UTF8));
request.headers(h -> h.add(HttpHeader.ACCEPT_CHARSET, StandardCharsets.UTF_8.name()));
// request content type and charset is set by the request entity
super.doHttpRequest(request, callback);
}
@Override
public void approval(final InputStream request, Map<String, List<String>> headers, final ResponseCallback callback) {
final Request post = getRequest(HttpMethod.POST, versionUrl() + "process/approvals/", headers);
// authorization
setAccessToken(post);
// input stream as entity content
post.body(new InputStreamRequestContent(request));
post.headers(h -> h.add(HttpHeader.CONTENT_TYPE, APPLICATION_JSON_UTF8));
doHttpRequest(post, new DelegatingClientCallback(callback));
}
@Override
public void approvals(Map<String, List<String>> headers, final ResponseCallback callback) {
final Request get = getRequest(HttpMethod.GET, versionUrl() + "process/approvals/", headers);
// authorization
setAccessToken(get);
doHttpRequest(get, new DelegatingClientCallback(callback));
}
@Override
public void getVersions(Map<String, List<String>> headers, final ResponseCallback callback) {
Request get = getRequest(HttpMethod.GET, servicesDataUrl(), headers);
// does not require authorization token
doHttpRequest(get, new DelegatingClientCallback(callback));
}
@Override
public void getResources(Map<String, List<String>> headers, ResponseCallback callback) {
Request get = getRequest(HttpMethod.GET, versionUrl(), headers);
// requires authorization token
setAccessToken(get);
doHttpRequest(get, new DelegatingClientCallback(callback));
}
@Override
public void getGlobalObjects(Map<String, List<String>> headers, ResponseCallback callback) {
Request get = getRequest(HttpMethod.GET, sobjectsUrl(""), headers);
// requires authorization token
setAccessToken(get);
doHttpRequest(get, new DelegatingClientCallback(callback));
}
@Override
public void getBasicInfo(String sObjectName, Map<String, List<String>> headers, ResponseCallback callback) {
Request get = getRequest(HttpMethod.GET, sobjectsUrl(sObjectName + "/"), headers);
// requires authorization token
setAccessToken(get);
doHttpRequest(get, new DelegatingClientCallback(callback));
}
@Override
public void getDescription(String sObjectName, Map<String, List<String>> headers, ResponseCallback callback) {
Request get = getRequest(HttpMethod.GET, sobjectsUrl(sObjectName + "/describe/"), headers);
// requires authorization token
setAccessToken(get);
doHttpRequest(get, new DelegatingClientCallback(callback));
}
@Override
public void getSObject(
String sObjectName, String id, String[] fields, Map<String, List<String>> headers, ResponseCallback callback) {
// parse fields if set
String params = "";
if (fields != null && fields.length > 0) {
StringBuilder fieldsValue = new StringBuilder("?fields=");
for (int i = 0; i < fields.length; i++) {
fieldsValue.append(fields[i]);
if (i < (fields.length - 1)) {
fieldsValue.append(',');
}
}
params = fieldsValue.toString();
}
Request get = getRequest(HttpMethod.GET, sobjectsUrl(sObjectName + "/" + id + params), headers);
// requires authorization token
setAccessToken(get);
doHttpRequest(get, new DelegatingClientCallback(callback));
}
@Override
public void createSObject(
String sObjectName, InputStream sObject, Map<String, List<String>> headers, ResponseCallback callback) {
// post the sObject
final Request post = getRequest(HttpMethod.POST, sobjectsUrl(sObjectName), headers);
// authorization
setAccessToken(post);
// input stream as entity content
post.body(new InputStreamRequestContent(sObject));
post.headers(h -> h.add(HttpHeader.CONTENT_TYPE, APPLICATION_JSON_UTF8));
doHttpRequest(post, new DelegatingClientCallback(callback));
}
@Override
public void createSObjectMultipart(
String sObjectName, Object sObjectDto, InputStream sObject,
Map<String, List<String>> headers, ResponseCallback callback) {
try {
final Request post = getRequest(HttpMethod.POST, sobjectsUrl(sObjectName), headers);
setAccessToken(post);
try (MultiPartRequestContent multipartContent = new MultiPartRequestContent()) {
ObjectMapper objectMapper = new ObjectMapper();
JsonNode jsonNode = objectMapper.readTree(sObject);
if (jsonNode.isObject()) {
ObjectNode objectNode = (ObjectNode) jsonNode;
ObjectNode cleanJson = objectNode.deepCopy();
Map<String, InputStream> binaryFields = getBinaryFieldMap(sObjectDto);
if (!binaryFields.isEmpty()) {
// Remove binary field names from JSON first
for (String fieldName : binaryFields.keySet()) {
cleanJson.remove(fieldName);
}
// non-binary json data must be the first part of the multipart request
String cleanJsonString = objectMapper.writeValueAsString(cleanJson);
multipartContent.addPart(new MultiPart.ContentSourcePart(
"entity", null,
HttpFields.build().add(HttpHeader.CONTENT_TYPE, APPLICATION_JSON_UTF8),
new InputStreamRequestContent(
new ByteArrayInputStream(cleanJsonString.getBytes(StandardCharsets.UTF_8)))));
// Then add binary fields as subsequent parts
for (Map.Entry<String, InputStream> entry : binaryFields.entrySet()) {
String fieldName = entry.getKey();
InputStream binaryData = entry.getValue();
multipartContent.addPart(new MultiPart.ContentSourcePart(
fieldName, "temp-file-name.doc",
HttpFields.build().add(HttpHeader.CONTENT_TYPE, "application/octet-stream"),
new InputStreamRequestContent(binaryData)));
}
} else {
// No multipart data found - this shouldn't happen as processor should handle this case
callback.onResponse(null, Collections.emptyMap(),
new SalesforceException("createSObjectMultipart called but no binary fields found", null));
return;
}
} else {
// If not a JSON object, send as-is
multipartContent.addPart(new MultiPart.ContentSourcePart(
"entity", null,
HttpFields.build().add(HttpHeader.CONTENT_TYPE, APPLICATION_JSON_UTF8),
new InputStreamRequestContent(sObject)));
}
post.body(multipartContent);
}
doHttpRequest(post, new DelegatingClientCallback(callback));
} catch (Exception e) {
// If JSON parsing fails, fall back to regular processing
callback.onResponse(null, Collections.emptyMap(),
new SalesforceException("Failed to process multipart request: " + e.getMessage(), e));
}
}
private Map<String, InputStream> getBinaryFieldMap(Object sObject) {
Map<String, InputStream> binaryFields = new HashMap<>();
if (sObject == null) {
return binaryFields;
}
Class<?> clazz = sObject.getClass();
java.lang.reflect.Field[] fields = clazz.getDeclaredFields();
for (java.lang.reflect.Field field : fields) {
String fieldName = field.getName();
if (fieldName.endsWith("Binary") && field.isAnnotationPresent(JsonIgnore.class)) {
try {
field.setAccessible(true);
Object value = field.get(sObject);
if (value instanceof InputStream) {
String originalFieldName = fieldName.replace("Binary", "");
binaryFields.put(originalFieldName, (InputStream) value);
}
} catch (Exception e) {
// Skip inaccessible fields
}
}
}
return binaryFields;
}
@Override
public void updateSObject(
String sObjectName, String id, InputStream sObject, Map<String, List<String>> headers, ResponseCallback callback) {
final Request patch = getRequest("PATCH", sobjectsUrl(sObjectName + "/" + id), headers);
// requires authorization token
setAccessToken(patch);
// input stream as entity content
patch.body(new InputStreamRequestContent(sObject));
patch.headers(h -> h.add(HttpHeader.CONTENT_TYPE, APPLICATION_JSON_UTF8));
doHttpRequest(patch, new DelegatingClientCallback(callback));
}
@Override
public void updateSObjectMultipart(
String sObjectName, String id, Object sObjectDto, InputStream sObject,
Map<String, List<String>> headers, ResponseCallback callback) {
try {
final Request patch = getRequest("PATCH", sobjectsUrl(sObjectName + "/" + id), headers);
setAccessToken(patch);
try (MultiPartRequestContent multipartContent = new MultiPartRequestContent()) {
ObjectMapper objectMapper = new ObjectMapper();
JsonNode jsonNode = objectMapper.readTree(sObject);
if (jsonNode.isObject()) {
ObjectNode objectNode = (ObjectNode) jsonNode;
ObjectNode cleanJson = objectNode.deepCopy();
Map<String, InputStream> binaryFields = getBinaryFieldMap(sObjectDto);
if (!binaryFields.isEmpty()) {
// Remove binary field names from JSON first
for (String fieldName : binaryFields.keySet()) {
cleanJson.remove(fieldName);
}
// non-binary json data must be the first part of the multipart request
String cleanJsonString = objectMapper.writeValueAsString(cleanJson);
multipartContent.addPart(new MultiPart.ContentSourcePart(
"entity", null,
HttpFields.build().add(HttpHeader.CONTENT_TYPE, APPLICATION_JSON_UTF8),
new InputStreamRequestContent(
new ByteArrayInputStream(cleanJsonString.getBytes(StandardCharsets.UTF_8)))));
// Then add binary fields as subsequent parts
for (Map.Entry<String, InputStream> entry : binaryFields.entrySet()) {
String fieldName = entry.getKey();
InputStream binaryData = entry.getValue();
multipartContent.addPart(new MultiPart.ContentSourcePart(
fieldName, "temp-file-name.doc",
HttpFields.build().add(HttpHeader.CONTENT_TYPE, "application/octet-stream"),
new InputStreamRequestContent(binaryData)));
}
} else {
// No multipart data found - this shouldn't happen as processor should handle this case
callback.onResponse(null, Collections.emptyMap(),
new SalesforceException("updateSObjectMultipart called but no binary fields found", null));
return;
}
} else {
// If not a JSON object, send as-is
multipartContent.addPart(new MultiPart.ContentSourcePart(
"entity", null,
HttpFields.build().add(HttpHeader.CONTENT_TYPE, APPLICATION_JSON_UTF8),
new InputStreamRequestContent(sObject)));
}
patch.body(multipartContent);
}
doHttpRequest(patch, new DelegatingClientCallback(callback));
} catch (Exception e) {
// If JSON parsing fails, fall back to regular processing
callback.onResponse(null, Collections.emptyMap(),
new SalesforceException("Failed to process multipart update request: " + e.getMessage(), e));
}
}
@Override
public void deleteSObject(String sObjectName, String id, Map<String, List<String>> headers, ResponseCallback callback) {
final Request delete = getRequest(HttpMethod.DELETE, sobjectsUrl(sObjectName + "/" + id), headers);
// requires authorization token
setAccessToken(delete);
doHttpRequest(delete, new DelegatingClientCallback(callback));
}
@Override
public void getSObjectWithId(
String sObjectName, String fieldName, String fieldValue, Map<String, List<String>> headers,
ResponseCallback callback) {
final Request get = getRequest(HttpMethod.GET, sobjectsExternalIdUrl(sObjectName, fieldName, fieldValue), headers);
// requires authorization token
setAccessToken(get);
doHttpRequest(get, new DelegatingClientCallback(callback));
}
@Override
public void upsertSObject(
String sObjectName, String fieldName, String fieldValue, Map<String, List<String>> headers, InputStream sObject,
ResponseCallback callback) {
final Request patch = getRequest("PATCH", sobjectsExternalIdUrl(sObjectName, fieldName, fieldValue), headers);
// requires authorization token
setAccessToken(patch);
// input stream as entity content
patch.body(new InputStreamRequestContent(sObject));
// TODO will the encoding always be UTF-8??
patch.headers(h -> h.add(HttpHeader.CONTENT_TYPE, APPLICATION_JSON_UTF8));
doHttpRequest(patch, new DelegatingClientCallback(callback));
}
@Override
public void deleteSObjectWithId(
String sObjectName, String fieldName, String fieldValue, Map<String, List<String>> headers,
ResponseCallback callback) {
final Request delete
= getRequest(HttpMethod.DELETE, sobjectsExternalIdUrl(sObjectName, fieldName, fieldValue), headers);
// requires authorization token
setAccessToken(delete);
doHttpRequest(delete, new DelegatingClientCallback(callback));
}
@Override
public void getBlobField(
String sObjectName, String id, String blobFieldName, Map<String, List<String>> headers, ResponseCallback callback) {
final Request get = getRequest(HttpMethod.GET, sobjectsUrl(sObjectName + "/" + id + "/" + blobFieldName), headers);
// TODO this doesn't seem to be required, the response is always the
// content binary stream
// get.header(HttpHeader.ACCEPT_ENCODING, "base64");
// requires authorization token
setAccessToken(get);
doHttpRequest(get, new DelegatingClientCallback(callback));
}
@Override
public void query(String soqlQuery, Map<String, List<String>> headers, ResponseCallback callback) {
try {
String encodedQuery = urlEncode(soqlQuery);
final Request get = getRequest(HttpMethod.GET, versionUrl() + "query/?q=" + encodedQuery, headers);
// requires authorization token
setAccessToken(get);
doHttpRequest(get, new DelegatingClientCallback(callback));
} catch (UnsupportedEncodingException e) {
String msg = "Unexpected error: " + e.getMessage();
callback.onResponse(null, Collections.emptyMap(), new SalesforceException(msg, e));
}
}
@Override
public void queryMore(String nextRecordsUrl, Map<String, List<String>> headers, ResponseCallback callback) {
final Request get = getRequest(HttpMethod.GET, instanceUrl + nextRecordsUrl, headers);
// requires authorization token
setAccessToken(get);
doHttpRequest(get, new DelegatingClientCallback(callback));
}
@Override
public void queryAll(String soqlQuery, Map<String, List<String>> headers, ResponseCallback callback) {
try {
String encodedQuery = urlEncode(soqlQuery);
final Request get = getRequest(HttpMethod.GET, versionUrl() + "queryAll/?q=" + encodedQuery, headers);
// requires authorization token
setAccessToken(get);
doHttpRequest(get, new DelegatingClientCallback(callback));
} catch (UnsupportedEncodingException e) {
String msg = "Unexpected error: " + e.getMessage();
callback.onResponse(null, Collections.emptyMap(), new SalesforceException(msg, e));
}
}
@Override
public void search(String soslQuery, Map<String, List<String>> headers, ResponseCallback callback) {
try {
String encodedQuery = urlEncode(soslQuery);
final Request get = getRequest(HttpMethod.GET, versionUrl() + "search/?q=" + encodedQuery, headers);
// requires authorization token
setAccessToken(get);
doHttpRequest(get, new DelegatingClientCallback(callback));
} catch (UnsupportedEncodingException e) {
String msg = "Unexpected error: " + e.getMessage();
callback.onResponse(null, Collections.emptyMap(), new SalesforceException(msg, e));
}
}
@Override
public void apexCall(
String httpMethod, String apexUrl, Map<String, Object> queryParams, InputStream requestDto,
Map<String, List<String>> headers, ResponseCallback callback) {
// create APEX call request
final Request request;
try {
request = getRequest(httpMethod, apexCallUrl(apexUrl, queryParams), headers);
// set request SObject and content type
if (requestDto != null) {
// guard against requests that do not support bodies
switch (request.getMethod()) {
case "PUT":
case "PATCH":
case "POST":
request.body(new InputStreamRequestContent(requestDto));
request.headers(h -> h.add(HttpHeader.CONTENT_TYPE, APPLICATION_JSON_UTF8));
break;
default:
// ignore body for other methods
}
}
// requires authorization token
setAccessToken(request);
doHttpRequest(request, new DelegatingClientCallback(callback));
} catch (UnsupportedEncodingException | URISyntaxException e) {
String msg = "Unexpected error: " + e.getMessage();
callback.onResponse(null, Collections.emptyMap(), new SalesforceException(msg, e));
}
}
@Override
public void getEventSchemaByEventName(
String eventName, String payloadFormat, Map<String, List<String>> headers, ResponseCallback callback) {
validateMinimumVersion(GET_SCHEMA_MINIMUM_VERSION);
final Request request;
request = getRequest(HttpMethod.GET, sobjectsUrl(eventName) + "/eventSchema" + "?payloadFormat=" + payloadFormat,
headers);
// requires authorization token
setAccessToken(request);
doHttpRequest(request, new DelegatingClientCallback(callback));
}
@Override
public void getEventSchemaBySchemaId(
String schemaId, String payloadFormat, Map<String, List<String>> headers, ResponseCallback callback) {
validateMinimumVersion(GET_SCHEMA_MINIMUM_VERSION);
final Request request;
request = getRequest(HttpMethod.GET, versionUrl() + "event/eventSchema/" + schemaId + "?payloadFormat=" + payloadFormat,
headers);
// requires authorization token
setAccessToken(request);
doHttpRequest(request, new DelegatingClientCallback(callback));
}
private String apexCallUrl(String apexUrl, Map<String, Object> queryParams)
throws UnsupportedEncodingException, URISyntaxException {
if (queryParams != null && !queryParams.isEmpty()) {
apexUrl = URISupport.appendParametersToURI(apexUrl, queryParams);
}
return instanceUrl + SERVICES_APEXREST + apexUrl;
}
@Override
public void recent(final Integer limit, Map<String, List<String>> headers, final ResponseCallback responseCallback) {
final String param = Optional.ofNullable(limit).map(v -> "?limit=" + v).orElse("");
final Request get = getRequest(HttpMethod.GET, versionUrl() + "recent/" + param, headers);
// requires authorization token
setAccessToken(get);
doHttpRequest(get, new DelegatingClientCallback(responseCallback));
}
@Override
public void limits(Map<String, List<String>> headers, final ResponseCallback responseCallback) {
final Request get = getRequest(HttpMethod.GET, versionUrl() + "limits/", headers);
// requires authorization token
setAccessToken(get);
doHttpRequest(get, new DelegatingClientCallback(responseCallback));
}
private String servicesDataUrl() {
return instanceUrl + SERVICES_DATA;
}
private void validateMinimumVersion(String minimumVersion) {
if (Version.create(version).compareTo(Version.create(minimumVersion)) < 0) {
throw new IllegalArgumentException(
"Salesforce API version " + minimumVersion + " or newer is required, version " + version + " was detected");
}
}
private String versionUrl() {
ObjectHelper.notNull(version, "version");
return servicesDataUrl() + "v" + version + "/";
}
private String sobjectsUrl(String sObjectName) {
ObjectHelper.notNull(sObjectName, "sObjectName");
return versionUrl() + "sobjects/" + sObjectName;
}
private String sobjectsExternalIdUrl(String sObjectName, String fieldName, String fieldValue) {
if (fieldName == null || fieldValue == null) {
throw new IllegalArgumentException("External field name and value cannot be NULL");
}
try {
String encodedValue = urlEncode(fieldValue);
return sobjectsUrl(sObjectName + "/" + fieldName + "/" + encodedValue);
} catch (UnsupportedEncodingException e) {
String msg = "Unexpected error: " + e.getMessage();
throw new IllegalArgumentException(msg, e);
}
}
@Override
protected void setAccessToken(Request request) {
// replace old token
request.headers(h -> h.add(TOKEN_HEADER, TOKEN_PREFIX + accessToken));
}
private String urlEncode(String query) throws UnsupportedEncodingException {
String encodedQuery = URLEncoder.encode(query, StandardCharsets.UTF_8);
// URLEncoder likes to use '+' for spaces
encodedQuery = encodedQuery.replace("+", "%20");
return encodedQuery;
}
private static
|
DefaultRestClient
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/validator/BeanValidatorInputValidateTest.java
|
{
"start": 1233,
"end": 1834
}
|
class ____ extends ContextTestSupport {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
validator().type("toValidate").withBean("testValidator");
onException(ValidationException.class).handled(true).log("Invalid validation: ${exception.message}")
.to("mock:invalid");
from("direct:in").inputTypeWithValidate("toValidate").to("mock:out");
}
};
}
public static
|
BeanValidatorInputValidateTest
|
java
|
bumptech__glide
|
integration/volley/src/test/java/com/bumptech/glide/integration/volley/VolleyStreamFetcherServerTest.java
|
{
"start": 1866,
"end": 7988
}
|
class ____ {
private static final String DEFAULT_PATH = "/fakepath";
@Mock private DataFetcher.DataCallback<InputStream> callback;
private MockWebServer mockWebServer;
private RequestQueue requestQueue;
private ArgumentCaptor<InputStream> streamCaptor;
private CountDownLatch waitForResponseLatch;
@Before
public void setUp() throws IOException {
MockitoAnnotations.initMocks(this);
waitForResponseLatch = new CountDownLatch(1);
doAnswer(new CountDown()).when(callback).onDataReady(any(InputStream.class));
doAnswer(new CountDown()).when(callback).onLoadFailed(any(Exception.class));
requestQueue = Volley.newRequestQueue(ApplicationProvider.getApplicationContext());
mockWebServer = new MockWebServer();
mockWebServer.start();
streamCaptor = ArgumentCaptor.forClass(InputStream.class);
}
@After
public void tearDown() throws IOException {
mockWebServer.shutdown();
requestQueue.stop();
}
@Test
public void testReturnsInputStreamOnStatusOk() throws Exception {
String expected = "fakedata";
mockWebServer.enqueue(new MockResponse().setBody(expected).setResponseCode(200));
DataFetcher<InputStream> fetcher = getFetcher();
fetcher.loadData(Priority.HIGH, callback);
waitForResponseLatch.await();
verify(callback).onDataReady(streamCaptor.capture());
assertStreamOf(expected, streamCaptor.getValue());
}
@Test
public void testHandlesRedirect301s() throws Exception {
String expected = "fakedata";
mockWebServer.enqueue(
new MockResponse()
.setResponseCode(301)
.setHeader("Location", mockWebServer.url("/redirect").toString()));
mockWebServer.enqueue(new MockResponse().setResponseCode(200).setBody(expected));
getFetcher().loadData(Priority.LOW, callback);
waitForResponseLatch.await();
verify(callback).onDataReady(streamCaptor.capture());
assertStreamOf(expected, streamCaptor.getValue());
}
@Test
public void testHandlesRedirect302s() throws Exception {
String expected = "fakedata";
mockWebServer.enqueue(
new MockResponse()
.setResponseCode(302)
.setHeader("Location", mockWebServer.url("/redirect").toString()));
mockWebServer.enqueue(new MockResponse().setResponseCode(200).setBody(expected));
getFetcher().loadData(Priority.LOW, callback);
waitForResponseLatch.await();
verify(callback).onDataReady(streamCaptor.capture());
assertStreamOf(expected, streamCaptor.getValue());
}
@Test
public void testHandlesUpToFiveRedirects() throws Exception {
int numRedirects = 4;
String expected = "redirectedData";
String redirectBase = "/redirect";
for (int i = 0; i < numRedirects; i++) {
mockWebServer.enqueue(
new MockResponse()
.setResponseCode(301)
.setHeader("Location", mockWebServer.url(redirectBase + i).toString()));
}
mockWebServer.enqueue(new MockResponse().setResponseCode(200).setBody(expected));
getFetcher().loadData(Priority.NORMAL, callback);
waitForResponseLatch.await();
verify(callback).onDataReady(streamCaptor.capture());
assertStreamOf(expected, streamCaptor.getValue());
assertThat(mockWebServer.takeRequest().getPath()).contains(DEFAULT_PATH);
for (int i = 0; i < numRedirects; i++) {
assertThat(mockWebServer.takeRequest().getPath()).contains(redirectBase + i);
}
}
@Test
public void testCallsLoadFailedIfRedirectLocationIsEmpty() throws Exception {
for (int i = 0; i < 2; i++) {
mockWebServer.enqueue(new MockResponse().setResponseCode(301));
}
getFetcher().loadData(Priority.NORMAL, callback);
waitForResponseLatch.await();
verify(callback).onLoadFailed(isA(VolleyError.class));
}
@Test
public void testCallsLoadFailedIfStatusCodeIsNegativeOne() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(-1));
getFetcher().loadData(Priority.LOW, callback);
waitForResponseLatch.await();
verify(callback).onLoadFailed(isA(VolleyError.class));
}
@Test
public void testCallsLoadFailedAfterTooManyRedirects() throws Exception {
for (int i = 0; i < 20; i++) {
mockWebServer.enqueue(
new MockResponse()
.setResponseCode(301)
.setHeader("Location", mockWebServer.url("/redirect" + i).toString()));
}
getFetcher().loadData(Priority.NORMAL, callback);
waitForResponseLatch.await();
verify(callback).onLoadFailed(isA(VolleyError.class));
}
@Test
public void testCallsLoadFailedIfStatusCodeIs500() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(500).setBody("error"));
getFetcher().loadData(Priority.NORMAL, callback);
waitForResponseLatch.await();
verify(callback).onLoadFailed(isA(VolleyError.class));
}
@Test
public void testCallsLoadFailedIfStatusCodeIs400() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(400).setBody("error"));
getFetcher().loadData(Priority.LOW, callback);
waitForResponseLatch.await();
verify(callback).onLoadFailed(isA(VolleyError.class));
}
@Test
public void testAppliesHeadersInGlideUrl() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(200));
String headerField = "field";
String headerValue = "value";
Map<String, String> headersMap = new HashMap<>();
headersMap.put(headerField, headerValue);
Headers headers = mock(Headers.class);
when(headers.getHeaders()).thenReturn(headersMap);
getFetcher(headers).loadData(Priority.HIGH, callback);
waitForResponseLatch.await();
assertThat(mockWebServer.takeRequest().getHeader(headerField)).isEqualTo(headerValue);
}
private DataFetcher<InputStream> getFetcher() {
return getFetcher(Headers.DEFAULT);
}
private DataFetcher<InputStream> getFetcher(Headers headers) {
URL url = mockWebServer.url(DEFAULT_PATH).url();
return new VolleyStreamFetcher(requestQueue, new GlideUrl(url.toString(), headers));
}
private
|
VolleyStreamFetcherServerTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/HdfsSearchableSnapshotsIT.java
|
{
"start": 964,
"end": 2269
}
|
class ____ extends AbstractSearchableSnapshotsRestTestCase {
public static HdfsFixture hdfsFixture = new HdfsFixture();
public static ElasticsearchCluster cluster = ElasticsearchCluster.local()
.distribution(DistributionType.DEFAULT)
.plugin("repository-hdfs")
.setting("xpack.searchable.snapshot.shared_cache.size", "16MB")
.setting("xpack.searchable.snapshot.shared_cache.region_size", "256KB")
.setting("xpack.license.self_generated.type", "trial")
.setting("xpack.security.enabled", "false")
.build();
@ClassRule
public static TestRule ruleChain = RuleChain.outerRule(hdfsFixture).around(cluster);
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
@Override
protected String writeRepositoryType() {
return "hdfs";
}
@Override
protected Settings writeRepositorySettings() {
final String uri = "hdfs://localhost:" + hdfsFixture.getPort();
final String path = "/user/elasticsearch/test/searchable_snapshots/simple";
Settings.Builder repositorySettings = Settings.builder().put("client", "searchable_snapshots").put("uri", uri).put("path", path);
return repositorySettings.build();
}
}
|
HdfsSearchableSnapshotsIT
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java
|
{
"start": 60468,
"end": 64901
}
|
class ____<T extends Number> extends Baz<T> {",
" private final int anInt;",
" private final byte[] aByteArray;",
" private final int @Nullable [] aNullableIntArray;",
" private final List<T> aList;",
" private final ImmutableMap<T, String> anImmutableMap;",
" private final Optional<String> anOptionalString;",
"",
" private AutoValue_Baz(",
" int anInt,",
" byte[] aByteArray,",
" int @Nullable [] aNullableIntArray,",
" List<T> aList,",
" ImmutableMap<T, String> anImmutableMap,",
" Optional<String> anOptionalString) {",
" this.anInt = anInt;",
" this.aByteArray = aByteArray;",
" this.aNullableIntArray = aNullableIntArray;",
" this.aList = aList;",
" this.anImmutableMap = anImmutableMap;",
" this.anOptionalString = anOptionalString;",
" }",
"",
" @Override public int anInt() {",
" return anInt;",
" }",
"",
" @SuppressWarnings(\"mutable\")",
" @Override public byte[] aByteArray() {",
" return aByteArray;",
" }",
"",
" @SuppressWarnings(\"mutable\")",
" @Override public int @Nullable [] aNullableIntArray() {",
" return aNullableIntArray;",
" }",
"",
" @Override public List<T> aList() {",
" return aList;",
" }",
"",
" @Override public ImmutableMap<T, String> anImmutableMap() {",
" return anImmutableMap;",
" }",
"",
" @Override public Optional<String> anOptionalString() {",
" return anOptionalString;",
" }",
"",
" @Override public String toString() {",
" return \"Baz{\"",
" + \"anInt=\" + anInt + \", \"",
" + \"aByteArray=\" + Arrays.toString(aByteArray) + \", \"",
" + \"aNullableIntArray=\" + Arrays.toString(aNullableIntArray) + \", \"",
" + \"aList=\" + aList + \", \"",
" + \"anImmutableMap=\" + anImmutableMap + \", \"",
" + \"anOptionalString=\" + anOptionalString",
" + \"}\";",
" }",
"",
" @Override public boolean equals(@Nullable Object o) {",
" if (o == this) {",
" return true;",
" }",
" if (o instanceof Baz) {",
" Baz<?> that = (Baz<?>) o;",
" return this.anInt == that.anInt()",
" && Arrays.equals(this.aByteArray, "
+ "(that instanceof AutoValue_Baz) "
+ "? ((AutoValue_Baz<?>) that).aByteArray : that.aByteArray())",
" && Arrays.equals(this.aNullableIntArray, "
+ "(that instanceof AutoValue_Baz) "
+ "? ((AutoValue_Baz<?>) that).aNullableIntArray : that.aNullableIntArray())",
" && this.aList.equals(that.aList())",
" && this.anImmutableMap.equals(that.anImmutableMap())",
" && this.anOptionalString.equals(that.anOptionalString());",
" }",
" return false;",
" }",
"",
" @Override public int hashCode() {",
" int h$ = 1;",
" h$ *= 1000003;",
" h$ ^= anInt;",
" h$ *= 1000003;",
" h$ ^= Arrays.hashCode(aByteArray);",
" h$ *= 1000003;",
" h$ ^= Arrays.hashCode(aNullableIntArray);",
" h$ *= 1000003;",
" h$ ^= aList.hashCode();",
" h$ *= 1000003;",
" h$ ^= anImmutableMap.hashCode();",
" h$ *= 1000003;",
" h$ ^= anOptionalString.hashCode();",
" return h$;",
" }",
"",
" @Override public Baz.Builder<T> toBuilder() {",
" return new AutoValue_Baz.Builder<T>(this);",
" }",
"",
" static final
|
AutoValue_Baz
|
java
|
apache__camel
|
components/camel-sjms/src/main/java/org/apache/camel/component/sjms/SessionMessageListener.java
|
{
"start": 963,
"end": 1075
}
|
interface ____ {
void onMessage(Message message, Session session) throws JMSException;
}
|
SessionMessageListener
|
java
|
apache__camel
|
test-infra/camel-test-infra-artemis/src/test/java/org/apache/camel/test/infra/artemis/services/ArtemisServiceFactory.java
|
{
"start": 5465,
"end": 5900
}
|
class ____ {
static final ArtemisService INSTANCE;
static {
SimpleTestServiceBuilder<ArtemisService> mqttInstanceBuilder = new SimpleTestServiceBuilder<>("artemis");
mqttInstanceBuilder
.addLocalMapping(() -> new SingletonArtemisService(new ArtemisMQTTService(), "artemis-mqtt"));
INSTANCE = mqttInstanceBuilder.build();
}
}
}
|
SingletonMQTTServiceHolder
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet2/HamletSpec.java
|
{
"start": 29341,
"end": 29562
}
|
interface ____ extends _Child {
/**
* Content of the element
* @param lines of content
* @return the current element builder
*/
_Content __(Object... lines);
}
/**
*
*/
public
|
_Content
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-ws/src/generated/java/org/apache/camel/component/spring/ws/SpringWebserviceEndpointConfigurer.java
|
{
"start": 736,
"end": 10608
}
|
class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
SpringWebserviceEndpoint target = (SpringWebserviceEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "allowresponseattachmentoverride":
case "allowResponseAttachmentOverride": target.getConfiguration().setAllowResponseAttachmentOverride(property(camelContext, boolean.class, value)); return true;
case "allowresponseheaderoverride":
case "allowResponseHeaderOverride": target.getConfiguration().setAllowResponseHeaderOverride(property(camelContext, boolean.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "endpointdispatcher":
case "endpointDispatcher": target.getConfiguration().setEndpointDispatcher(property(camelContext, org.apache.camel.component.spring.ws.bean.CamelEndpointDispatcher.class, value)); return true;
case "endpointmapping":
case "endpointMapping": target.getConfiguration().setEndpointMapping(property(camelContext, org.apache.camel.component.spring.ws.bean.CamelSpringWSEndpointMapping.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "expression": target.getConfiguration().setExpression(property(camelContext, java.lang.String.class, value)); return true;
case "faultaction":
case "faultAction": target.getConfiguration().setFaultAction(property(camelContext, java.net.URI.class, value)); return true;
case "faultto":
case "faultTo": target.getConfiguration().setFaultTo(property(camelContext, java.net.URI.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "messagefactory":
case "messageFactory": target.getConfiguration().setMessageFactory(property(camelContext, org.springframework.ws.WebServiceMessageFactory.class, value)); return true;
case "messagefilter":
case "messageFilter": target.getConfiguration().setMessageFilter(property(camelContext, org.apache.camel.component.spring.ws.filter.MessageFilter.class, value)); return true;
case "messageidstrategy":
case "messageIdStrategy": target.getConfiguration().setMessageIdStrategy(property(camelContext, org.springframework.ws.soap.addressing.messageid.MessageIdStrategy.class, value)); return true;
case "messagesender":
case "messageSender": target.getConfiguration().setMessageSender(property(camelContext, org.springframework.ws.transport.WebServiceMessageSender.class, value)); return true;
case "outputaction":
case "outputAction": target.getConfiguration().setOutputAction(property(camelContext, java.net.URI.class, value)); return true;
case "replyto":
case "replyTo": target.getConfiguration().setReplyTo(property(camelContext, java.net.URI.class, value)); return true;
case "soapaction":
case "soapAction": target.getConfiguration().setSoapAction(property(camelContext, java.lang.String.class, value)); return true;
case "sslcontextparameters":
case "sslContextParameters": target.getConfiguration().setSslContextParameters(property(camelContext, org.apache.camel.support.jsse.SSLContextParameters.class, value)); return true;
case "timeout": target.getConfiguration().setTimeout(property(camelContext, int.class, value)); return true;
case "webservicetemplate":
case "webServiceTemplate": target.getConfiguration().setWebServiceTemplate(property(camelContext, org.springframework.ws.client.core.WebServiceTemplate.class, value)); return true;
case "wsaddressingaction":
case "wsAddressingAction": target.getConfiguration().setWsAddressingAction(property(camelContext, java.net.URI.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "allowresponseattachmentoverride":
case "allowResponseAttachmentOverride": return boolean.class;
case "allowresponseheaderoverride":
case "allowResponseHeaderOverride": return boolean.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "endpointdispatcher":
case "endpointDispatcher": return org.apache.camel.component.spring.ws.bean.CamelEndpointDispatcher.class;
case "endpointmapping":
case "endpointMapping": return org.apache.camel.component.spring.ws.bean.CamelSpringWSEndpointMapping.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "expression": return java.lang.String.class;
case "faultaction":
case "faultAction": return java.net.URI.class;
case "faultto":
case "faultTo": return java.net.URI.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "messagefactory":
case "messageFactory": return org.springframework.ws.WebServiceMessageFactory.class;
case "messagefilter":
case "messageFilter": return org.apache.camel.component.spring.ws.filter.MessageFilter.class;
case "messageidstrategy":
case "messageIdStrategy": return org.springframework.ws.soap.addressing.messageid.MessageIdStrategy.class;
case "messagesender":
case "messageSender": return org.springframework.ws.transport.WebServiceMessageSender.class;
case "outputaction":
case "outputAction": return java.net.URI.class;
case "replyto":
case "replyTo": return java.net.URI.class;
case "soapaction":
case "soapAction": return java.lang.String.class;
case "sslcontextparameters":
case "sslContextParameters": return org.apache.camel.support.jsse.SSLContextParameters.class;
case "timeout": return int.class;
case "webservicetemplate":
case "webServiceTemplate": return org.springframework.ws.client.core.WebServiceTemplate.class;
case "wsaddressingaction":
case "wsAddressingAction": return java.net.URI.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
SpringWebserviceEndpoint target = (SpringWebserviceEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "allowresponseattachmentoverride":
case "allowResponseAttachmentOverride": return target.getConfiguration().isAllowResponseAttachmentOverride();
case "allowresponseheaderoverride":
case "allowResponseHeaderOverride": return target.getConfiguration().isAllowResponseHeaderOverride();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "endpointdispatcher":
case "endpointDispatcher": return target.getConfiguration().getEndpointDispatcher();
case "endpointmapping":
case "endpointMapping": return target.getConfiguration().getEndpointMapping();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "expression": return target.getConfiguration().getExpression();
case "faultaction":
case "faultAction": return target.getConfiguration().getFaultAction();
case "faultto":
case "faultTo": return target.getConfiguration().getFaultTo();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "messagefactory":
case "messageFactory": return target.getConfiguration().getMessageFactory();
case "messagefilter":
case "messageFilter": return target.getConfiguration().getMessageFilter();
case "messageidstrategy":
case "messageIdStrategy": return target.getConfiguration().getMessageIdStrategy();
case "messagesender":
case "messageSender": return target.getConfiguration().getMessageSender();
case "outputaction":
case "outputAction": return target.getConfiguration().getOutputAction();
case "replyto":
case "replyTo": return target.getConfiguration().getReplyTo();
case "soapaction":
case "soapAction": return target.getConfiguration().getSoapAction();
case "sslcontextparameters":
case "sslContextParameters": return target.getConfiguration().getSslContextParameters();
case "timeout": return target.getConfiguration().getTimeout();
case "webservicetemplate":
case "webServiceTemplate": return target.getConfiguration().getWebServiceTemplate();
case "wsaddressingaction":
case "wsAddressingAction": return target.getConfiguration().getWsAddressingAction();
default: return null;
}
}
}
|
SpringWebserviceEndpointConfigurer
|
java
|
elastic__elasticsearch
|
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/idea/IdeaXmlUtil.java
|
{
"start": 812,
"end": 2849
}
|
class ____ {
static Node parseXml(String xmlPath) throws IOException, SAXException, ParserConfigurationException {
File xmlFile = new File(xmlPath);
XmlParser xmlParser = new XmlParser(false, true, true);
xmlParser.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false);
Node xml = xmlParser.parse(xmlFile);
return xml;
}
/**
* Parses a given XML file, applies a set of changes, and writes those changes back to the original file.
*
* @param path Path to existing XML file
* @param action Action to perform on parsed XML document
* but before the XML document, e.g. a doctype or comment
*/
static void modifyXml(String xmlPath, Action<? super Node> action) throws IOException, ParserConfigurationException, SAXException {
modifyXml(xmlPath, action, null);
}
/**
* Parses a given XML file, applies a set of changes, and writes those changes back to the original file.
*
* @param path Path to existing XML file
* @param action Action to perform on parsed XML document
* @param preface optional front matter to add after the XML declaration
* but before the XML document, e.g. a doctype or comment
*/
static void modifyXml(String xmlPath, Action<? super Node> action, String preface) throws IOException, ParserConfigurationException,
SAXException {
File xmlFile = new File(xmlPath);
if (xmlFile.exists()) {
Node xml = parseXml(xmlPath);
action.execute(xml);
try (PrintWriter writer = new PrintWriter(xmlFile)) {
var printer = new XmlNodePrinter(writer);
printer.setNamespaceAware(true);
printer.setPreserveWhitespace(true);
writer.write("<?xml version=\"1.0\"?>\n");
if (preface != null) {
writer.write(preface);
}
printer.print(xml);
}
}
}
}
|
IdeaXmlUtil
|
java
|
google__gson
|
proto/src/main/java/com/google/gson/protobuf/ProtoTypeAdapter.java
|
{
"start": 5915,
"end": 6145
}
|
enum ____ proto annotation that, when set, overrides the default <b>enum</b> value
* serialization/deserialization of this adapter. For example, if you add the ' {@code
* serialized_value}' annotation and you define an
|
value
|
java
|
apache__dubbo
|
dubbo-metrics/dubbo-tracing/src/test/java/org/apache/dubbo/tracing/exporter/otlp/OTlpSpanExporterTest.java
|
{
"start": 1199,
"end": 1756
}
|
class ____ {
@Test
void getSpanExporter() {
ExporterConfig.OtlpConfig otlpConfig = mock(ExporterConfig.OtlpConfig.class);
when(otlpConfig.getEndpoint()).thenReturn("http://localhost:9411/api/v2/spans");
when(otlpConfig.getTimeout()).thenReturn(Duration.ofSeconds(5));
when(otlpConfig.getCompressionMethod()).thenReturn("gzip");
SpanExporter spanExporter = OTlpSpanExporter.getSpanExporter(ApplicationModel.defaultModel(), otlpConfig);
Assertions.assertNotNull(spanExporter);
}
}
|
OTlpSpanExporterTest
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/ClientStreamObserver.java
|
{
"start": 978,
"end": 1436
}
|
interface ____<T> extends CallStreamObserver<T> {
/**
* Swaps to manual flow control where no message will be delivered to {@link
* StreamObserver#onNext(Object)} unless it is {@link #request request()}ed. Since {@code
* request()} may not be called before the call is started, a number of initial requests may be
* specified.
*/
default void disableAutoRequest() {
disableAutoFlowControl();
}
}
|
ClientStreamObserver
|
java
|
apache__maven
|
impl/maven-impl/src/main/java/org/apache/maven/impl/model/profile/ConditionParser.java
|
{
"start": 1168,
"end": 1340
}
|
class ____ a recursive descent parser to handle various operations including
* arithmetic, logical, and comparison operations, as well as function calls.
*/
public
|
implements
|
java
|
alibaba__nacos
|
persistence/src/test/java/com/alibaba/nacos/persistence/repository/embedded/EmbeddedStorageContextHolderTest.java
|
{
"start": 1063,
"end": 2625
}
|
class ____ {
private static final String TEST_SQL = "SELECT * FROM config_info";
@BeforeEach
void setUp() {
}
@AfterEach
void tearDown() {
EmbeddedStorageContextHolder.cleanAllContext();
}
@Test
void testAddSqlContextRollbackOnUpdateFail() {
EmbeddedStorageContextHolder.addSqlContext(true, TEST_SQL, "test");
List<ModifyRequest> requests = EmbeddedStorageContextHolder.getCurrentSqlContext();
assertEquals(1, requests.size());
assertEquals(TEST_SQL, requests.get(0).getSql());
assertEquals(0, requests.get(0).getExecuteNo());
assertEquals("test", requests.get(0).getArgs()[0]);
assertTrue(requests.get(0).isRollBackOnUpdateFail());
}
@Test
void testPutExtendInfo() {
EmbeddedStorageContextHolder.putExtendInfo("testPutExtendInfo", "test_value");
assertTrue(EmbeddedStorageContextHolder.containsExtendInfo("testPutExtendInfo"));
assertEquals("test_value", EmbeddedStorageContextHolder.getCurrentExtendInfo().get("testPutExtendInfo"));
}
@Test
void testPutAllExtendInfo() {
Map<String, String> map = new HashMap<>();
map.put("testPutAllExtendInfo", "test_value");
EmbeddedStorageContextHolder.putAllExtendInfo(map);
assertTrue(EmbeddedStorageContextHolder.containsExtendInfo("testPutAllExtendInfo"));
assertEquals("test_value", EmbeddedStorageContextHolder.getCurrentExtendInfo().get("testPutAllExtendInfo"));
}
}
|
EmbeddedStorageContextHolderTest
|
java
|
quarkusio__quarkus
|
integration-tests/maven/src/test/resources-filtered/projects/project-with-extension/common/src/main/java/org/acme/CommonBean.java
|
{
"start": 99,
"end": 284
}
|
class ____ {
private final CommonTransitiveBean bean;
public CommonBean(CommonTransitiveBean bean) {
this.bean = java.util.Objects.requireNonNull(bean);
}
}
|
CommonBean
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/capacity/PreemptionCandidatesSelector.java
|
{
"start": 1343,
"end": 3427
}
|
class ____ {
protected CapacitySchedulerPreemptionContext preemptionContext;
protected ResourceCalculator rc;
private long maximumKillWaitTime = -1;
PreemptionCandidatesSelector(
CapacitySchedulerPreemptionContext preemptionContext) {
this.preemptionContext = preemptionContext;
this.rc = preemptionContext.getResourceCalculator();
}
/**
* Get preemption candidates from computed resource sharing and already
* selected candidates.
*
* @param selectedCandidates already selected candidates from previous policies
* @param clusterResource total resource
* @param totalPreemptedResourceAllowed how many resources allowed to be
* preempted in this round. Should be
* updated(in-place set) after the call
* @return merged selected candidates.
*/
public abstract Map<ApplicationAttemptId, Set<RMContainer>> selectCandidates(
Map<ApplicationAttemptId, Set<RMContainer>> selectedCandidates,
Resource clusterResource, Resource totalPreemptedResourceAllowed);
/**
* Compare by reversed priority order first, and then reversed containerId
* order.
*
* @param containers list of containers to sort for.
*/
@VisibleForTesting
static void sortContainers(List<RMContainer> containers) {
Collections.sort(containers, new Comparator<RMContainer>() {
@Override
public int compare(RMContainer a, RMContainer b) {
int schedKeyComp = b.getAllocatedSchedulerKey()
.compareTo(a.getAllocatedSchedulerKey());
if (schedKeyComp != 0) {
return schedKeyComp;
}
return b.getContainerId().compareTo(a.getContainerId());
}
});
}
public long getMaximumKillWaitTimeMs() {
if (maximumKillWaitTime > 0) {
return maximumKillWaitTime;
}
return preemptionContext.getDefaultMaximumKillWaitTimeout();
}
public void setMaximumKillWaitTime(long maximumKillWaitTime) {
this.maximumKillWaitTime = maximumKillWaitTime;
}
}
|
PreemptionCandidatesSelector
|
java
|
quarkusio__quarkus
|
extensions/security/deployment/src/test/java/io/quarkus/security/test/permissionsallowed/PermitAllWithPermissionsAllowedValidationFailureTest.java
|
{
"start": 364,
"end": 738
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.setExpectedException(IllegalStateException.class);
@Test
public void test() {
Assertions.fail();
}
@PermissionsAllowed(value = "ignored")
@PermitAll
@Singleton
public static
|
PermitAllWithPermissionsAllowedValidationFailureTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/DeleteLifecycleAction.java
|
{
"start": 1064,
"end": 2343
}
|
class ____ extends AcknowledgedRequest<Request> {
private final String policyName;
public Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, String policyName) {
super(masterNodeTimeout, ackTimeout);
this.policyName = policyName;
}
public Request(StreamInput in) throws IOException {
super(in);
policyName = in.readString();
}
public String getPolicyName() {
return policyName;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(policyName);
}
@Override
public int hashCode() {
return Objects.hash(policyName);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj.getClass() != getClass()) {
return false;
}
Request other = (Request) obj;
return Objects.equals(policyName, other.policyName);
}
@Override
public String toString() {
return format("delete lifecycle policy [%s]", policyName);
}
}
}
|
Request
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ModuleFactoryGeneratorTest.java
|
{
"start": 19556,
"end": 20371
}
|
class ____ {",
" @Provides @IntoSet List<List<?>> provideWildcardList() {",
" return new ArrayList<>();",
" }",
"}");
daggerCompiler(moduleFile)
.compile(
subject -> {
subject.hasErrorCount(0);
assertSourceMatchesGolden(subject, "test/TestModule_ProvideWildcardListFactory");
});
}
@Test public void providesSetValues() {
Source moduleFile =
CompilerTests.javaSource(
"test.TestModule",
"package test;",
"",
"import dagger.Module;",
"import dagger.Provides;",
"import dagger.multibindings.ElementsIntoSet;",
"import java.util.Set;",
"",
"@Module",
"final
|
TestModule
|
java
|
apache__camel
|
components/camel-undertow/src/main/java/org/apache/camel/component/undertow/handlers/CamelWebSocketHandler.java
|
{
"start": 15756,
"end": 16480
}
|
class ____ implements WebSocketConnectionCallback {
public UndertowWebSocketConnectionCallback() {
}
@Override
public void onConnect(WebSocketHttpExchange exchange, WebSocketChannel channel) {
LOG.trace("onConnect {}", exchange);
final String connectionKey = UUID.randomUUID().toString();
channel.setAttribute(UndertowConstants.CONNECTION_KEY, connectionKey);
channel.getReceiveSetter().set(receiveListener);
channel.addCloseTask(closeListener);
sendEventNotificationIfNeeded(connectionKey, exchange, channel, EventType.ONOPEN);
channel.resumeReceives();
}
}
}
|
UndertowWebSocketConnectionCallback
|
java
|
apache__camel
|
core/camel-support/src/main/java/org/apache/camel/support/scan/DefaultPackageScanResourceResolver.java
|
{
"start": 11785,
"end": 12390
}
|
class ____ by default
accept = !name.endsWith(".class");
}
if (accept) {
boolean match = PATH_MATCHER.match(subPattern, name);
LOG.debug("Found resource: {} matching pattern: {} -> {}", name, subPattern, match);
if (match) {
final ResourceLoader loader = PluginHelper.getResourceLoader(getCamelContext());
resources.add(loader.resolveResource("file:" + file.getPath()));
}
}
}
}
}
}
|
files
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/operators/over/NonTimeRangeUnboundedPrecedingFunctionTest.java
|
{
"start": 1802,
"end": 41554
}
|
class ____ extends NonTimeOverWindowTestBase {
private NonTimeRangeUnboundedPrecedingFunction<RowData>
getNonTimeRangeUnboundedPrecedingFunction(
long retentionTime, GeneratedRecordComparator generatedSortKeyComparator) {
return new NonTimeRangeUnboundedPrecedingFunction<>(
retentionTime,
aggsHandleFunction,
GENERATED_ROW_VALUE_EQUALISER,
GENERATED_SORT_KEY_EQUALISER,
generatedSortKeyComparator,
accTypes,
inputFieldTypes,
SORT_KEY_TYPES,
SORT_KEY_SELECTOR) {};
}
@Test
public void testInsertOnlyRecordsWithCustomSortKey() throws Exception {
KeyedProcessOperator<RowData, RowData, RowData> operator =
new KeyedProcessOperator<>(
getNonTimeRangeUnboundedPrecedingFunction(
0L, GENERATED_SORT_KEY_COMPARATOR_ASC));
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(operator);
testHarness.open();
// put some records
testHarness.processElement(insertRecord("key1", 1L, 100L));
testHarness.processElement(insertRecord("key1", 2L, 200L));
testHarness.processElement(insertRecord("key1", 5L, 500L));
testHarness.processElement(insertRecord("key1", 6L, 600L));
testHarness.processElement(insertRecord("key2", 1L, 100L));
testHarness.processElement(insertRecord("key2", 2L, 200L));
// out of order record should trigger updates for all records after its inserted position
testHarness.processElement(insertRecord("key1", 4L, 400L));
List<RowData> expectedRows =
Arrays.asList(
outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L),
outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L),
outputRecord(RowKind.INSERT, "key1", 5L, 500L, 8L),
outputRecord(RowKind.INSERT, "key1", 6L, 600L, 14L),
outputRecord(RowKind.INSERT, "key2", 1L, 100L, 1L),
outputRecord(RowKind.INSERT, "key2", 2L, 200L, 3L),
outputRecord(RowKind.INSERT, "key1", 4L, 400L, 7L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 8L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 12L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 14L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 18L));
List<RowData> actualRows = testHarness.extractOutputValues();
validateRows(actualRows, expectedRows);
}
@Test
public void testInsertOnlyRecordsWithCustomSortKeyAndLongSumAgg() throws Exception {
KeyedProcessOperator<RowData, RowData, RowData> operator =
new KeyedProcessOperator<>(
new NonTimeRangeUnboundedPrecedingFunction<>(
0L,
aggsSumLongHandleFunction,
GENERATED_ROW_VALUE_EQUALISER,
GENERATED_SORT_KEY_EQUALISER,
GENERATED_SORT_KEY_COMPARATOR_ASC,
accTypes,
inputFieldTypes,
SORT_KEY_TYPES,
SORT_KEY_SELECTOR) {});
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(operator);
testHarness.open();
// put some records
testHarness.processElement(insertRecord("key1", 1L, 100L));
testHarness.processElement(insertRecord("key1", 2L, 200L));
testHarness.processElement(insertRecord("key1", 5L, 500L));
testHarness.processElement(insertRecord("key1", 6L, 600L));
testHarness.processElement(insertRecord("key2", 1L, 100L));
testHarness.processElement(insertRecord("key2", 2L, 200L));
// out of order record should trigger updates for all records after its inserted position
testHarness.processElement(insertRecord("key1", 4L, 400L));
List<RowData> expectedRows =
Arrays.asList(
outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L),
outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L),
outputRecord(RowKind.INSERT, "key1", 5L, 500L, 8L),
outputRecord(RowKind.INSERT, "key1", 6L, 600L, 14L),
outputRecord(RowKind.INSERT, "key2", 1L, 100L, 1L),
outputRecord(RowKind.INSERT, "key2", 2L, 200L, 3L),
outputRecord(RowKind.INSERT, "key1", 4L, 400L, 7L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 8L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 12L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 14L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 18L));
List<RowData> actualRows = testHarness.extractOutputValues();
validateRows(actualRows, expectedRows);
}
@Test
public void testInsertOnlyRecordsWithDuplicateSortKeys() throws Exception {
KeyedProcessOperator<RowData, RowData, RowData> operator =
new KeyedProcessOperator<>(
getNonTimeRangeUnboundedPrecedingFunction(
0L, GENERATED_SORT_KEY_COMPARATOR_ASC));
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(operator);
testHarness.open();
// put some records
testHarness.processElement(insertRecord("key1", 1L, 100L));
testHarness.processElement(insertRecord("key1", 2L, 200L));
testHarness.processElement(insertRecord("key1", 5L, 500L));
testHarness.processElement(insertRecord("key1", 5L, 502L));
testHarness.processElement(insertRecord("key1", 5L, 501L));
testHarness.processElement(insertRecord("key1", 6L, 600L));
testHarness.processElement(insertRecord("key2", 1L, 100L));
testHarness.processElement(insertRecord("key2", 2L, 200L));
// out of order record should trigger updates for all records after its inserted position
testHarness.processElement(insertRecord("key1", 2L, 203L));
testHarness.processElement(insertRecord("key1", 2L, 201L));
testHarness.processElement(insertRecord("key1", 4L, 400L));
List<RowData> expectedRows =
Arrays.asList(
outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L),
outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L),
outputRecord(RowKind.INSERT, "key1", 5L, 500L, 8L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 8L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 13L),
outputRecord(RowKind.INSERT, "key1", 5L, 502L, 13L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 13L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 18L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 502L, 13L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 502L, 18L),
outputRecord(RowKind.INSERT, "key1", 5L, 501L, 18L),
outputRecord(RowKind.INSERT, "key1", 6L, 600L, 24L),
outputRecord(RowKind.INSERT, "key2", 1L, 100L, 1L),
outputRecord(RowKind.INSERT, "key2", 2L, 200L, 3L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 200L, 3L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 200L, 5L),
outputRecord(RowKind.INSERT, "key1", 2L, 203L, 5L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 18L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 20L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 502L, 18L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 502L, 20L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 501L, 18L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 501L, 20L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 24L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 26L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 200L, 5L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 200L, 7L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 203L, 5L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 203L, 7L),
outputRecord(RowKind.INSERT, "key1", 2L, 201L, 7L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 20L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 22L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 502L, 20L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 502L, 22L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 501L, 20L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 501L, 22L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 26L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 28L),
outputRecord(RowKind.INSERT, "key1", 4L, 400L, 11L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 22L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 26L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 502L, 22L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 502L, 26L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 501L, 22L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 501L, 26L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 28L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 32L));
List<RowData> actualRows = testHarness.extractOutputValues();
validateRows(actualRows, expectedRows);
}
@Test
public void testRetractingRecordsWithCustomSortKey() throws Exception {
KeyedProcessOperator<RowData, RowData, RowData> operator =
new KeyedProcessOperator<>(
getNonTimeRangeUnboundedPrecedingFunction(
0L, GENERATED_SORT_KEY_COMPARATOR_ASC));
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(operator);
testHarness.open();
// put some records
testHarness.processElement(insertRecord("key1", 1L, 100L));
testHarness.processElement(insertRecord("key1", 2L, 200L));
testHarness.processElement(insertRecord("key1", 5L, 500L));
testHarness.processElement(insertRecord("key1", 6L, 600L));
testHarness.processElement(updateBeforeRecord("key1", 2L, 200L));
testHarness.processElement(updateAfterRecord("key1", 3L, 200L));
testHarness.processElement(insertRecord("key2", 1L, 100L));
testHarness.processElement(insertRecord("key2", 2L, 200L));
testHarness.processElement(insertRecord("key3", 1L, 100L));
testHarness.processElement(insertRecord("key1", 4L, 400L));
testHarness.processElement(updateBeforeRecord("key1", 3L, 200L));
testHarness.processElement(updateAfterRecord("key1", 3L, 300L));
List<RowData> expectedRows =
Arrays.asList(
outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L),
outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L),
outputRecord(RowKind.INSERT, "key1", 5L, 500L, 8L),
outputRecord(RowKind.INSERT, "key1", 6L, 600L, 14L),
outputRecord(RowKind.DELETE, "key1", 2L, 200L, 3L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 8L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 6L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 14L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 12L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 3L, 200L, 4L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 6L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 9L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 12L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 15L),
outputRecord(RowKind.INSERT, "key2", 1L, 100L, 1L),
outputRecord(RowKind.INSERT, "key2", 2L, 200L, 3L),
outputRecord(RowKind.INSERT, "key3", 1L, 100L, 1L),
outputRecord(RowKind.INSERT, "key1", 4L, 400L, 8L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 9L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 13L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 15L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 19L),
outputRecord(RowKind.DELETE, "key1", 3L, 200L, 4L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 4L, 400L, 8L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 4L, 400L, 5L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 13L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 10L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 19L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 16L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 3L, 300L, 4L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 4L, 400L, 5L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 4L, 400L, 8L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 10L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 13L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 16L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 19L));
List<RowData> actualRows = testHarness.extractOutputValues();
validateRows(actualRows, expectedRows);
}
@Test
public void testRetractWithFirstDuplicateSortKey() throws Exception {
KeyedProcessOperator<RowData, RowData, RowData> operator =
new KeyedProcessOperator<>(
getNonTimeRangeUnboundedPrecedingFunction(
0L, GENERATED_SORT_KEY_COMPARATOR_ASC));
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(operator);
testHarness.open();
// put some records
testHarness.processElement(insertRecord("key1", 1L, 100L));
testHarness.processElement(insertRecord("key1", 2L, 200L));
testHarness.processElement(insertRecord("key1", 2L, 201L));
testHarness.processElement(insertRecord("key1", 5L, 500L));
testHarness.processElement(insertRecord("key1", 5L, 502L));
testHarness.processElement(insertRecord("key1", 5L, 501L));
testHarness.processElement(insertRecord("key1", 6L, 600L));
testHarness.processElement(updateBeforeRecord("key1", 5L, 500L));
List<RowData> expectedRows =
Arrays.asList(
outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L),
outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 200L, 3L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 200L, 5L),
outputRecord(RowKind.INSERT, "key1", 2L, 201L, 5L),
outputRecord(RowKind.INSERT, "key1", 5L, 500L, 10L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 10L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 15L),
outputRecord(RowKind.INSERT, "key1", 5L, 502L, 15L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 15L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 20L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 502L, 15L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 502L, 20L),
outputRecord(RowKind.INSERT, "key1", 5L, 501L, 20L),
outputRecord(RowKind.INSERT, "key1", 6L, 600L, 26L),
outputRecord(RowKind.DELETE, "key1", 5L, 500L, 20L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 502L, 20L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 502L, 15L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 501L, 20L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 501L, 15L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 26L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 21L));
List<RowData> actualRows = testHarness.extractOutputValues();
validateRows(actualRows, expectedRows);
}
@Test
public void testRetractWithMiddleDuplicateSortKey() throws Exception {
KeyedProcessOperator<RowData, RowData, RowData> operator =
new KeyedProcessOperator<>(
getNonTimeRangeUnboundedPrecedingFunction(
0L, GENERATED_SORT_KEY_COMPARATOR_ASC));
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(operator);
testHarness.open();
// put some records
testHarness.processElement(insertRecord("key1", 1L, 100L));
testHarness.processElement(insertRecord("key1", 2L, 200L));
testHarness.processElement(insertRecord("key1", 2L, 201L));
testHarness.processElement(insertRecord("key1", 5L, 500L));
testHarness.processElement(insertRecord("key1", 5L, 502L));
testHarness.processElement(insertRecord("key1", 5L, 501L));
testHarness.processElement(insertRecord("key1", 6L, 600L));
testHarness.processElement(updateBeforeRecord("key1", 5L, 502L));
List<RowData> expectedRows =
Arrays.asList(
outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L),
outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 200L, 3L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 200L, 5L),
outputRecord(RowKind.INSERT, "key1", 2L, 201L, 5L),
outputRecord(RowKind.INSERT, "key1", 5L, 500L, 10L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 10L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 15L),
outputRecord(RowKind.INSERT, "key1", 5L, 502L, 15L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 15L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 20L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 502L, 15L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 502L, 20L),
outputRecord(RowKind.INSERT, "key1", 5L, 501L, 20L),
outputRecord(RowKind.INSERT, "key1", 6L, 600L, 26L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 20L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 15L),
outputRecord(RowKind.DELETE, "key1", 5L, 502L, 20L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 501L, 20L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 501L, 15L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 26L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 21L));
List<RowData> actualRows = testHarness.extractOutputValues();
validateRows(actualRows, expectedRows);
}
@Test
public void testRetractWithLastDuplicateSortKey() throws Exception {
KeyedProcessOperator<RowData, RowData, RowData> operator =
new KeyedProcessOperator<>(
getNonTimeRangeUnboundedPrecedingFunction(
0L, GENERATED_SORT_KEY_COMPARATOR_ASC));
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(operator);
testHarness.open();
// put some records
testHarness.processElement(insertRecord("key1", 1L, 100L));
testHarness.processElement(insertRecord("key1", 2L, 200L));
testHarness.processElement(insertRecord("key1", 2L, 201L));
testHarness.processElement(insertRecord("key1", 5L, 500L));
testHarness.processElement(insertRecord("key1", 5L, 502L));
testHarness.processElement(insertRecord("key1", 5L, 501L));
testHarness.processElement(insertRecord("key1", 6L, 600L));
testHarness.processElement(updateBeforeRecord("key1", 5L, 501L));
List<RowData> expectedRows =
Arrays.asList(
outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L),
outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 200L, 3L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 200L, 5L),
outputRecord(RowKind.INSERT, "key1", 2L, 201L, 5L),
outputRecord(RowKind.INSERT, "key1", 5L, 500L, 10L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 10L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 15L),
outputRecord(RowKind.INSERT, "key1", 5L, 502L, 15L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 15L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 20L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 502L, 15L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 502L, 20L),
outputRecord(RowKind.INSERT, "key1", 5L, 501L, 20L),
outputRecord(RowKind.INSERT, "key1", 6L, 600L, 26L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 20L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 15L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 502L, 20L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 502L, 15L),
outputRecord(RowKind.DELETE, "key1", 5L, 501L, 20L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 26L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 21L));
List<RowData> actualRows = testHarness.extractOutputValues();
validateRows(actualRows, expectedRows);
}
@Test
public void testRetractWithDescendingSort() throws Exception {
KeyedProcessOperator<RowData, RowData, RowData> operator =
new KeyedProcessOperator<>(
getNonTimeRangeUnboundedPrecedingFunction(
0L, GENERATED_SORT_KEY_COMPARATOR_DESC));
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(operator);
testHarness.open();
// put some records
testHarness.processElement(insertRecord("key1", 1L, 100L));
testHarness.processElement(insertRecord("key1", 2L, 200L));
testHarness.processElement(insertRecord("key1", 2L, 201L));
testHarness.processElement(insertRecord("key1", 5L, 500L));
testHarness.processElement(insertRecord("key1", 6L, 600L));
testHarness.processElement(updateBeforeRecord("key1", 2L, 200L));
List<RowData> expectedRows =
Arrays.asList(
outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L),
outputRecord(RowKind.INSERT, "key1", 2L, 200L, 2L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 1L, 100L, 1L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 1L, 100L, 3L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 200L, 2L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 200L, 4L),
outputRecord(RowKind.INSERT, "key1", 2L, 201L, 4L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 1L, 100L, 3L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 1L, 100L, 5L),
outputRecord(RowKind.INSERT, "key1", 5L, 500L, 5L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 200L, 4L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 200L, 9L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 201L, 4L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 201L, 9L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 1L, 100L, 5L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 1L, 100L, 10L),
outputRecord(RowKind.INSERT, "key1", 6L, 600L, 6L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 5L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 11L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 200L, 9L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 200L, 15L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 201L, 9L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 201L, 15L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 1L, 100L, 10L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 1L, 100L, 16L),
outputRecord(RowKind.DELETE, "key1", 2L, 200L, 15L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 201L, 15L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 201L, 13L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 1L, 100L, 16L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 1L, 100L, 14L));
List<RowData> actualRows = testHarness.extractOutputValues();
validateRows(actualRows, expectedRows);
}
@Test
public void testRetractWithEarlyOut() throws Exception {
KeyedProcessOperator<RowData, RowData, RowData> operator =
new KeyedProcessOperator<>(
getNonTimeRangeUnboundedPrecedingFunction(
0L, GENERATED_SORT_KEY_COMPARATOR_ASC));
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(operator);
testHarness.open();
// put some records
testHarness.processElement(insertRecord("key1", 0L, 100L));
testHarness.processElement(insertRecord("key1", 0L, 101L));
testHarness.processElement(insertRecord("key1", 0L, 102L));
testHarness.processElement(insertRecord("key1", 1L, 100L));
testHarness.processElement(insertRecord("key1", 2L, 200L));
testHarness.processElement(insertRecord("key1", 2L, 201L));
testHarness.processElement(insertRecord("key1", 5L, 500L));
testHarness.processElement(insertRecord("key1", 5L, 502L));
testHarness.processElement(insertRecord("key1", 5L, 501L));
testHarness.processElement(insertRecord("key1", 6L, 600L));
testHarness.processElement(updateBeforeRecord("key1", 0L, 100L));
List<RowData> expectedRows =
Arrays.asList(
outputRecord(RowKind.INSERT, "key1", 0L, 100L, 0L),
outputRecord(RowKind.INSERT, "key1", 0L, 101L, 0L),
outputRecord(RowKind.INSERT, "key1", 0L, 102L, 0L),
outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L),
outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 200L, 3L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 200L, 5L),
outputRecord(RowKind.INSERT, "key1", 2L, 201L, 5L),
outputRecord(RowKind.INSERT, "key1", 5L, 500L, 10L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 10L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 15L),
outputRecord(RowKind.INSERT, "key1", 5L, 502L, 15L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 15L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 20L),
outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 502L, 15L),
outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 502L, 20L),
outputRecord(RowKind.INSERT, "key1", 5L, 501L, 20L),
outputRecord(RowKind.INSERT, "key1", 6L, 600L, 26L),
outputRecord(RowKind.DELETE, "key1", 0L, 100L, 0L));
List<RowData> actualRows = testHarness.extractOutputValues();
validateRows(actualRows, expectedRows);
}
@Test
public void testInsertAndRetractAllWithStateValidation() throws Exception {
NonTimeRangeUnboundedPrecedingFunction<RowData> function =
getNonTimeRangeUnboundedPrecedingFunction(0L, GENERATED_SORT_KEY_COMPARATOR_ASC);
KeyedProcessOperator<RowData, RowData, RowData> operator =
new KeyedProcessOperator<>(function);
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(operator);
testHarness.open();
// put some records
GenericRowData firstRecord = GenericRowData.of("key1", 1L, 100L);
testHarness.processElement(insertRecord("key1", 1L, 100L));
validateState(function, firstRecord, 0, 1, 0, 1, 0, 1, true);
GenericRowData secondRecord = GenericRowData.of("key1", 2L, 200L);
testHarness.processElement(insertRecord("key1", 2L, 200L));
validateState(function, secondRecord, 1, 2, 0, 1, 1, 2, true);
GenericRowData thirdRecord = GenericRowData.of("key1", 2L, 201L);
testHarness.processElement(insertRecord("key1", 2L, 201L));
validateState(function, thirdRecord, 1, 2, 1, 2, 2, 3, true);
GenericRowData fourthRecord = GenericRowData.of("key1", 5L, 500L);
testHarness.processElement(insertRecord("key1", 5L, 500L));
validateState(function, fourthRecord, 2, 3, 0, 1, 3, 4, true);
GenericRowData fifthRecord = GenericRowData.of("key1", 5L, 502L);
testHarness.processElement(insertRecord("key1", 5L, 502L));
validateState(function, fifthRecord, 2, 3, 1, 2, 4, 5, true);
GenericRowData sixthRecord = GenericRowData.of("key1", 5L, 501L);
testHarness.processElement(insertRecord("key1", 5L, 501L));
validateState(function, sixthRecord, 2, 3, 2, 3, 5, 6, true);
GenericRowData seventhRecord = GenericRowData.of("key1", 6L, 600L);
testHarness.processElement(insertRecord("key1", 6L, 600L));
validateState(function, seventhRecord, 3, 4, 0, 1, 6, 7, true);
testHarness.processElement(updateBeforeRecord("key1", 5L, 502L));
validateState(function, fifthRecord, 2, 4, 1, 2, 4, 6, false);
testHarness.processElement(updateBeforeRecord("key1", 6L, 600L));
validateState(function, seventhRecord, 3, 3, 0, 0, 6, 5, false);
testHarness.processElement(updateBeforeRecord("key1", 2L, 201L));
validateState(function, thirdRecord, 1, 3, 1, 1, 2, 4, false);
testHarness.processElement(updateBeforeRecord("key1", 2L, 200L));
validateState(function, secondRecord, 1, 2, -1, 0, 1, 3, false);
testHarness.processElement(updateBeforeRecord("key1", 5L, 500L));
validateState(function, fourthRecord, 1, 2, 0, 1, 3, 2, false);
testHarness.processElement(updateBeforeRecord("key1", 5L, 501L));
validateState(function, sixthRecord, 1, 1, -1, 0, 5, 1, false);
testHarness.processElement(updateBeforeRecord("key1", 1L, 100L));
validateState(function, firstRecord, 0, 0, -1, 0, 0, 0, false);
List<RowData> actualRows = testHarness.extractOutputValues();
assertThat(actualRows.size()).isEqualTo(40);
assertThat(function.getNumOfSortKeysNotFound().getCount()).isEqualTo(0L);
assertThat(function.getNumOfIdsNotFound().getCount()).isEqualTo(0L);
}
@Test
public void testInsertWithStateTTLExpiration() throws Exception {
Duration stateTtlTime = Duration.ofMillis(10);
NonTimeRangeUnboundedPrecedingFunction<RowData> function =
getNonTimeRangeUnboundedPrecedingFunction(
stateTtlTime.toMillis(), GENERATED_SORT_KEY_COMPARATOR_ASC);
KeyedProcessOperator<RowData, RowData, RowData> operator =
new KeyedProcessOperator<>(function);
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(operator);
testHarness.open();
// put some records
GenericRowData firstRecord = GenericRowData.of("key1", 1L, 100L);
testHarness.processElement(insertRecord("key1", 1L, 100L));
validateState(function, firstRecord, 0, 1, 0, 1, 0, 1, true);
GenericRowData secondRecord = GenericRowData.of("key1", 2L, 200L);
testHarness.processElement(insertRecord("key1", 2L, 200L));
validateState(function, secondRecord, 1, 2, 0, 1, 1, 2, true);
GenericRowData thirdRecord = GenericRowData.of("key1", 2L, 201L);
testHarness.processElement(insertRecord("key1", 2L, 201L));
validateState(function, thirdRecord, 1, 2, 1, 2, 2, 3, true);
// expire the state
testHarness.setStateTtlProcessingTime(stateTtlTime.toMillis() + 1);
// After insertion of the following record, there should be only 1 record in state
// After insertion of the following record, there should be only 1 record in state
GenericRowData fourthRecord = GenericRowData.of("key1", 5L, 500L);
testHarness.processElement(insertRecord("key1", 5L, 500L));
validateState(function, fourthRecord, 0, 1, 0, 1, 0, 1, true);
List<RowData> actualRows = testHarness.extractOutputValues();
assertThat(actualRows.size()).isEqualTo(6);
assertThat(function.getNumOfSortKeysNotFound().getCount()).isEqualTo(0L);
assertThat(function.getNumOfIdsNotFound().getCount()).isEqualTo(0L);
}
@Test
public void testInsertAndRetractWithStateTTLExpiration() throws Exception {
Duration stateTtlTime = Duration.ofMillis(10);
NonTimeRangeUnboundedPrecedingFunction<RowData> function =
getNonTimeRangeUnboundedPrecedingFunction(
stateTtlTime.toMillis(), GENERATED_SORT_KEY_COMPARATOR_ASC);
KeyedProcessOperator<RowData, RowData, RowData> operator =
new KeyedProcessOperator<>(function);
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(operator);
testHarness.open();
// put some records
GenericRowData firstRecord = GenericRowData.of("key1", 1L, 100L);
testHarness.processElement(insertRecord("key1", 1L, 100L));
validateState(function, firstRecord, 0, 1, 0, 1, 0, 1, true);
GenericRowData secondRecord = GenericRowData.of("key1", 2L, 200L);
testHarness.processElement(insertRecord("key1", 2L, 200L));
validateState(function, secondRecord, 1, 2, 0, 1, 1, 2, true);
GenericRowData thirdRecord = GenericRowData.of("key1", 2L, 201L);
testHarness.processElement(insertRecord("key1", 2L, 201L));
validateState(function, thirdRecord, 1, 2, 1, 2, 2, 3, true);
GenericRowData fourthRecord = GenericRowData.of("key1", 5L, 500L);
testHarness.processElement(insertRecord("key1", 5L, 500L));
validateState(function, fourthRecord, 2, 3, 0, 1, 3, 4, true);
GenericRowData fifthRecord = GenericRowData.of("key1", 5L, 502L);
testHarness.processElement(insertRecord("key1", 5L, 502L));
validateState(function, fifthRecord, 2, 3, 1, 2, 4, 5, true);
// expire the state
testHarness.setStateTtlProcessingTime(stateTtlTime.toMillis() + 1);
// Retract a non-existent record due to state ttl expiration
testHarness.processElement(updateBeforeRecord("key1", 5L, 502L));
// Ensure state is null/empty
List<Tuple2<RowData, List<Long>>> sortedList =
function.getRuntimeContext().getState(function.sortedListStateDescriptor).value();
assertThat(sortedList).isNull();
MapState<RowData, RowData> mapState =
function.getRuntimeContext().getMapState(function.accStateDescriptor);
assertThat(mapState.isEmpty()).isTrue();
Long idValue = function.getRuntimeContext().getState(function.idStateDescriptor).value();
assertThat(idValue).isNull();
List<RowData> actualRows = testHarness.extractOutputValues();
assertThat(actualRows.size()).isEqualTo(9);
assertThat(function.getNumOfSortKeysNotFound().getCount()).isEqualTo(1L);
assertThat(function.getNumOfIdsNotFound().getCount()).isEqualTo(0L);
}
void validateNumAccRows(int numAccRows, int expectedNumAccRows, int totalRows) {
assertThat(numAccRows).isEqualTo(expectedNumAccRows);
}
void validateEntry(
AbstractNonTimeUnboundedPrecedingOver<RowData> function, RowData record, int idOffset)
throws Exception {
assertThat(
function.getRuntimeContext()
.getMapState(function.accStateDescriptor)
.get(SORT_KEY_SELECTOR.getKey(record)))
.isNotNull();
}
}
|
NonTimeRangeUnboundedPrecedingFunctionTest
|
java
|
apache__camel
|
components/camel-ai/camel-neo4j/src/main/java/org/apache/camel/component/neo4j/Neo4jConstants.java
|
{
"start": 1069,
"end": 1236
}
|
class ____ been moved to its own class. Use
* {@link org.apache.camel.component.neo4j.Neo4jHeaders} instead.
*/
@Deprecated
public static
|
has
|
java
|
apache__camel
|
components/camel-quartz/src/test/java/org/apache/camel/component/quartz/SpringQuartzConsumerTwoAppsClusteredFailoverTest.java
|
{
"start": 1433,
"end": 4658
}
|
class ____ {
protected final Logger log = LoggerFactory.getLogger(getClass());
@Test
public void testQuartzPersistentStoreClusteredApp() throws Exception {
// boot up the database the two apps are going to share inside a clustered quartz setup
AbstractXmlApplicationContext db = newAppContext("SpringQuartzConsumerClusteredAppDatabase.xml");
db.start();
// now launch the first clustered app which will acquire the quartz database lock and become the master
AbstractXmlApplicationContext app = newAppContext("SpringQuartzConsumerClusteredAppOne.xml");
app.start();
// as well as the second one which will run in slave mode as it will not be able to acquire the same lock
AbstractXmlApplicationContext app2 = newAppContext("SpringQuartzConsumerClusteredAppTwo.xml");
app2.start();
CamelContext camel = app.getBean("camelContext-" + getClass().getSimpleName(), CamelContext.class);
MockEndpoint mock = camel.getEndpoint("mock:result", MockEndpoint.class);
mock.expectedMinimumMessageCount(3);
mock.expectedMessagesMatches(new ClusteringPredicate(true));
mock.assertIsSatisfied();
// now let's simulate a crash of the first app (the quartz instance 'app-one')
log.warn("The first app is going to crash NOW!");
IOHelper.close(app);
log.warn("Crashed...");
log.warn("Crashed...");
log.warn("Crashed...");
// wait long enough until the second app takes it over...
Awaitility.await().untilAsserted(() -> {
CamelContext camel2 = app2.getBean("camelContext2-" + getClass().getSimpleName(), CamelContext.class);
MockEndpoint mock2 = camel2.getEndpoint("mock:result", MockEndpoint.class);
mock2.expectedMinimumMessageCount(3);
mock2.expectedMessagesMatches(new ClusteringPredicate(false));
mock2.assertIsSatisfied();
});
// inside the logs one can then clearly see how the route of the second app ('app-two') starts consuming:
// 2013-09-30 11:22:20,349 [main ] WARN erTwoAppsClusteredFailoverTest - Crashed...
// 2013-09-30 11:22:20,349 [main ] WARN erTwoAppsClusteredFailoverTest - Crashed...
// 2013-09-30 11:22:20,349 [main ] WARN erTwoAppsClusteredFailoverTest - Crashed...
// 2013-09-30 11:22:35,340 [_ClusterManager] INFO LocalDataSourceJobStore - ClusterManager: detected 1 failed or restarted instances.
// 2013-09-30 11:22:35,340 [_ClusterManager] INFO LocalDataSourceJobStore - ClusterManager: Scanning for instance "app-one"'s failed in-progress jobs.
// 2013-09-30 11:22:35,369 [eduler_Worker-1] INFO triggered - Exchange[ExchangePattern: InOnly, BodyType: String, Body: clustering PONGS!]
// and as the last step shutdown the second app as well as the database
IOHelper.close(app2, db);
}
private AbstractXmlApplicationContext newAppContext(String config) {
return CamelSpringTestSupport.newAppContext(config, getClass());
}
private static
|
SpringQuartzConsumerTwoAppsClusteredFailoverTest
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/health/RestGetHealthActionTests.java
|
{
"start": 595,
"end": 804
}
|
class ____ extends ESTestCase {
public void testHealthReportAPIDoesNotTripCircuitBreakers() {
assertThat(new RestGetHealthAction().canTripCircuitBreaker(), is(false));
}
}
|
RestGetHealthActionTests
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/MissingBindingValidationTest.java
|
{
"start": 15171,
"end": 16773
}
|
interface ____ {",
" @Provides",
" static Map<List<String>, Map<List<String>, Map<String, String>>> provideComplex() {",
" return null;",
" }",
"}");
CompilerTests.daggerCompiler(component, module)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
String.join(
"\n",
"Map<List,Map<List,Map>> cannot be provided without an @Provides-annotated "
+ "method.",
"",
" Map<List,Map<List,Map>> is requested at",
" [TestComponent] TestComponent.getRawComplex()",
"",
"Note: A similar binding is provided in the following other components:",
" Map<List<String>,Map<List<String>,Map<String,String>>> is provided at:",
" [TestComponent] TestModule.provideComplex()",
JVM_SUPPRESS_WILDCARDS_MESSAGE,
"",
"======================"));
});
}
@Test
public void noSimilarKey_withRawTypeArgument() {
Source component =
CompilerTests.javaSource(
"test.TestComponent",
"package test;",
"",
"import dagger.Component;",
"",
"@Component(modules = TestModule.class)",
"
|
TestModule
|
java
|
quarkusio__quarkus
|
integration-tests/rest-client/src/test/java/io/quarkus/it/rest/client/wronghost/ExternalWrongHostTestResourceUsingHostnameVerifier.java
|
{
"start": 244,
"end": 322
}
|
class ____ to propagate the properties when running the native tests
*/
public
|
is
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/multitenancy/schema/AbstractSchemaBasedMultiTenancyTest.java
|
{
"start": 1709,
"end": 9029
}
|
class ____<T extends MultiTenantConnectionProvider<String>, C extends ConnectionProvider> {
protected C acmeProvider;
protected C jbossProvider;
protected ServiceRegistryImplementor serviceRegistry;
protected SessionFactoryImplementor sessionFactory;
protected SessionFactoryImplementor getSessionFactory() {
return sessionFactory;
}
@BeforeEach
public void setUp() {
T multiTenantConnectionProvider = buildMultiTenantConnectionProvider();
Map<String, Object> settings = new HashMap<>();
settings.put( Environment.CACHE_REGION_FACTORY, CachingRegionFactory.class.getName() );
settings.put( Environment.GENERATE_STATISTICS, "true" );
serviceRegistry = (ServiceRegistryImplementor) ServiceRegistryUtil.serviceRegistryBuilder()
.applySettings( settings )
// Make sure to continue configuring the MultiTenantConnectionProvider by adding a service,
// rather than by setting 'hibernate.multi_tenant_connection_provider':
// that's important to reproduce the regression we test in 'testJdbcMetadataAccessible'.
.addService( MultiTenantConnectionProvider.class, multiTenantConnectionProvider )
.build();
MetadataSources ms = new MetadataSources( serviceRegistry );
ms.addAnnotatedClass( Customer.class );
ms.addAnnotatedClass( Invoice.class );
Metadata metadata = ms.buildMetadata();
final PersistentClass customerMapping = metadata.getEntityBinding( Customer.class.getName() );
customerMapping.setCached( true );
((RootClass) customerMapping).setCacheConcurrencyStrategy( "read-write" );
HibernateSchemaManagementTool tool = new HibernateSchemaManagementTool();
tool.injectServices( serviceRegistry );
new SchemaDropperImpl( serviceRegistry ).doDrop(
metadata,
serviceRegistry,
settings,
true,
new GenerationTargetToDatabase(
new DdlTransactionIsolatorTestingImpl(
serviceRegistry,
acmeProvider
)
),
new GenerationTargetToDatabase(
new DdlTransactionIsolatorTestingImpl(
serviceRegistry,
jbossProvider
)
)
);
new SchemaCreatorImpl( serviceRegistry ).doCreation(
metadata,
serviceRegistry,
settings,
true,
new GenerationTargetToDatabase(
new DdlTransactionIsolatorTestingImpl(
serviceRegistry,
acmeProvider
)
),
new GenerationTargetToDatabase(
new DdlTransactionIsolatorTestingImpl(
serviceRegistry,
jbossProvider
)
)
);
final SessionFactoryBuilder sfb = metadata.getSessionFactoryBuilder();
configure( sfb );
sessionFactory = (SessionFactoryImplementor) sfb.build();
}
protected void configure(SessionFactoryBuilder sfb) {
}
protected abstract T buildMultiTenantConnectionProvider();
@AfterEach
public void tearDown() {
if ( sessionFactory != null ) {
sessionFactory.close();
}
if ( serviceRegistry != null ) {
serviceRegistry.destroy();
}
if ( jbossProvider != null ) {
((Stoppable) jbossProvider).stop();
}
if ( acmeProvider != null ) {
((Stoppable) acmeProvider).stop();
}
}
@Test
@JiraKey(value = "HHH-16310")
public void testJdbcMetadataAccessible() {
assertThat( ((ExtractedDatabaseMetaDataImpl) sessionFactory.getJdbcServices().getJdbcEnvironment()
.getExtractedDatabaseMetaData()).isJdbcMetadataAccessible() )
.isTrue();
}
@Test
public void testBasicExpectedBehavior() {
Customer steve = doInHibernateSessionBuilder( this::jboss, session -> {
Customer _steve = new Customer( 1L, "steve" );
session.persist( _steve );
return _steve;
} );
doInHibernateSessionBuilder( this::acme, session -> {
Customer check = session.get( Customer.class, steve.getId() );
assertThat( check )
.describedAs( "tenancy not properly isolated" )
.isNull();
} );
doInHibernateSessionBuilder( this::jboss, session -> {
session.remove( steve );
} );
}
@Test
public void testSameIdentifiers() {
// create a customer 'steve' in jboss
Customer steve = doInHibernateSessionBuilder( this::jboss, session -> {
Customer _steve = new Customer( 1L, "steve" );
session.persist( _steve );
return _steve;
} );
// now, create a customer 'john' in acme
Customer john = doInHibernateSessionBuilder( this::acme, session -> {
Customer _john = new Customer( 1L, "john" );
session.persist( _john );
return _john;
} );
sessionFactory.getStatistics().clear();
// make sure we get the correct people back, from cache
// first, jboss
doInHibernateSessionBuilder( this::jboss, session -> {
Customer customer = session.getReference( Customer.class, 1L );
assertThat( customer.getName()).isEqualTo("steve");
// also, make sure this came from second level
assertThat( sessionFactory.getStatistics().getSecondLevelCacheHitCount()).isEqualTo(1);
} );
sessionFactory.getStatistics().clear();
// then, acme
doInHibernateSessionBuilder( this::acme, session -> {
Customer customer = session.getReference( Customer.class, 1L );
assertThat( customer.getName()).isEqualTo("john");
// also, make sure this came from second level
assertThat( sessionFactory.getStatistics().getSecondLevelCacheHitCount()).isEqualTo(1);
} );
// make sure the same works from datastore too
sessionFactory.getStatistics().clear();
sessionFactory.getCache().evictEntityData();
// first jboss
doInHibernateSessionBuilder( this::jboss, session -> {
Customer customer = session.getReference( Customer.class, 1L );
assertThat( customer.getName()).isEqualTo("steve");
// also, make sure this came from second level
assertThat( sessionFactory.getStatistics().getSecondLevelCacheHitCount()).isEqualTo(0);
} );
sessionFactory.getStatistics().clear();
// then, acme
doInHibernateSessionBuilder( this::acme, session -> {
Customer customer = session.getReference( Customer.class, 1L );
assertThat( customer.getName()).isEqualTo("john");
// also, make sure this came from second level
assertThat( sessionFactory.getStatistics().getSecondLevelCacheHitCount()).isEqualTo(0);
} );
doInHibernateSessionBuilder( this::jboss, session -> {
session.remove( steve );
} );
doInHibernateSessionBuilder( this::acme, session -> {
session.remove( john );
} );
}
@Test
public void testTableIdentifiers() {
Invoice orderJboss = doInHibernateSessionBuilder( this::jboss, session -> {
Invoice _orderJboss = new Invoice();
session.persist( _orderJboss );
assertThat( _orderJboss.getId()).isEqualTo(1L);
return _orderJboss;
} );
Invoice orderAcme = doInHibernateSessionBuilder( this::acme, session -> {
Invoice _orderAcme = new Invoice();
session.persist( _orderAcme );
assertThat( _orderAcme.getId()).isEqualTo(1L);
return _orderAcme;
} );
doInHibernateSessionBuilder( this::jboss, session -> {
session.remove( orderJboss );
} );
doInHibernateSessionBuilder( this::acme, session -> {
session.remove( orderAcme );
} );
sessionFactory.getStatistics().clear();
}
protected SessionBuilder newSession(String tenant) {
return sessionFactory
.withOptions()
.tenantIdentifier( (Object) tenant );
}
private SessionBuilder jboss() {
return newSession( "jboss" );
}
private SessionBuilder acme() {
return newSession( "acme" );
}
}
|
AbstractSchemaBasedMultiTenancyTest
|
java
|
netty__netty
|
handler/src/test/java/io/netty/handler/ssl/MockAlternativeKeyProvider.java
|
{
"start": 4404,
"end": 7901
}
|
class ____ extends SignatureSpi {
protected final String algorithm;
protected final String providerName;
protected final Signature realSignature;
protected MockSignature(String algorithm, String providerName)
throws NoSuchProviderException, NoSuchAlgorithmException {
this.algorithm = algorithm;
this.providerName = providerName;
this.realSignature = Signature.getInstance(algorithm, providerName);
}
@Override
protected void engineInitVerify(PublicKey publicKey) throws InvalidKeyException {
try {
realSignature.initVerify(publicKey);
} catch (Exception e) {
throw new InvalidKeyException("Failed to initialize signature", e);
}
}
@Override
protected void engineInitSign(PrivateKey privateKey) throws InvalidKeyException {
engineInitSign(privateKey, null);
}
@Override
protected void engineInitSign(PrivateKey privateKey, SecureRandom random) throws InvalidKeyException {
try {
// Extract the real key if it's wrapped
if (privateKey instanceof AlternativePrivateKeyWrapper) {
privateKey = ((AlternativePrivateKeyWrapper) privateKey).getDelegate();
realSignature.initSign(privateKey, random);
} else {
throw new InvalidKeyException("Unrecognized key type: " + privateKey.getClass().getName());
}
} catch (Exception e) {
throw new InvalidKeyException("Failed to initialize signature", e);
}
}
@Override
protected void engineUpdate(byte b) throws SignatureException {
realSignature.update(b);
}
@Override
protected void engineUpdate(byte[] b, int off, int len) throws SignatureException {
realSignature.update(b, off, len);
}
@Override
protected byte[] engineSign() throws SignatureException {
// Track signature operations
signatureOperations.incrementAndGet();
return realSignature.sign();
}
@Override
protected boolean engineVerify(byte[] sigBytes) throws SignatureException {
return realSignature.verify(sigBytes);
}
@Override
protected void engineSetParameter(String param, Object value) {
try {
realSignature.setParameter(param, value);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
protected void engineSetParameter(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
try {
realSignature.setParameter(params);
} catch (InvalidAlgorithmParameterException e) {
throw e;
} catch (Exception e) {
throw new InvalidAlgorithmParameterException("Failed to set parameter", e);
}
}
@Override
protected Object engineGetParameter(String param) {
try {
return realSignature.getParameter(param);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
// Concrete RSA signature implementations
public static final
|
MockSignature
|
java
|
apache__rocketmq
|
broker/src/main/java/org/apache/rocketmq/broker/processor/AdminBrokerProcessor.java
|
{
"start": 15065,
"end": 181375
}
|
class ____ implements NettyRequestProcessor {
private static final Logger LOGGER = LoggerFactory.getLogger(LoggerName.BROKER_LOGGER_NAME);
protected final BrokerController brokerController;
protected Set<String> configBlackList = new HashSet<>();
private final ExecutorService asyncExecuteWorker = new ThreadPoolExecutor(0, 4, 60L, TimeUnit.SECONDS, new SynchronousQueue<>());
public AdminBrokerProcessor(final BrokerController brokerController) {
this.brokerController = brokerController;
initConfigBlackList();
}
private void initConfigBlackList() {
configBlackList.add("brokerConfigPath");
configBlackList.add("rocketmqHome");
configBlackList.add("configBlackList");
String[] configArray = brokerController.getBrokerConfig().getConfigBlackList().split(";");
configBlackList.addAll(Arrays.asList(configArray));
}
@Override
public RemotingCommand processRequest(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
switch (request.getCode()) {
case RequestCode.UPDATE_AND_CREATE_TOPIC:
return this.updateAndCreateTopic(ctx, request);
case RequestCode.UPDATE_AND_CREATE_TOPIC_LIST:
return this.updateAndCreateTopicList(ctx, request);
case RequestCode.DELETE_TOPIC_IN_BROKER:
return this.deleteTopic(ctx, request);
case RequestCode.GET_ALL_TOPIC_CONFIG:
return this.getAllTopicConfig(ctx, request);
case RequestCode.GET_TIMER_CHECK_POINT:
return this.getTimerCheckPoint(ctx, request);
case RequestCode.GET_TIMER_METRICS:
return this.getTimerMetrics(ctx, request);
case RequestCode.UPDATE_BROKER_CONFIG:
return this.updateBrokerConfig(ctx, request);
case RequestCode.GET_BROKER_CONFIG:
return this.getBrokerConfig(ctx, request);
case RequestCode.UPDATE_COLD_DATA_FLOW_CTR_CONFIG:
return this.updateColdDataFlowCtrGroupConfig(ctx, request);
case RequestCode.REMOVE_COLD_DATA_FLOW_CTR_CONFIG:
return this.removeColdDataFlowCtrGroupConfig(ctx, request);
case RequestCode.GET_COLD_DATA_FLOW_CTR_INFO:
return this.getColdDataFlowCtrInfo(ctx);
case RequestCode.SET_COMMITLOG_READ_MODE:
return this.setCommitLogReadaheadMode(ctx, request);
case RequestCode.SEARCH_OFFSET_BY_TIMESTAMP:
return this.searchOffsetByTimestamp(ctx, request);
case RequestCode.GET_MAX_OFFSET:
return this.getMaxOffset(ctx, request);
case RequestCode.GET_MIN_OFFSET:
return this.getMinOffset(ctx, request);
case RequestCode.GET_EARLIEST_MSG_STORETIME:
return this.getEarliestMsgStoretime(ctx, request);
case RequestCode.GET_BROKER_RUNTIME_INFO:
return this.getBrokerRuntimeInfo(ctx, request);
case RequestCode.LOCK_BATCH_MQ:
return this.lockBatchMQ(ctx, request);
case RequestCode.UNLOCK_BATCH_MQ:
return this.unlockBatchMQ(ctx, request);
case RequestCode.UPDATE_AND_CREATE_SUBSCRIPTIONGROUP:
return this.updateAndCreateSubscriptionGroup(ctx, request);
case RequestCode.UPDATE_AND_CREATE_SUBSCRIPTIONGROUP_LIST:
return this.updateAndCreateSubscriptionGroupList(ctx, request);
case RequestCode.GET_ALL_SUBSCRIPTIONGROUP_CONFIG:
return this.getAllSubscriptionGroup(ctx, request);
case RequestCode.DELETE_SUBSCRIPTIONGROUP:
return this.deleteSubscriptionGroup(ctx, request);
case RequestCode.GET_TOPIC_STATS_INFO:
return this.getTopicStatsInfo(ctx, request);
case RequestCode.GET_CONSUMER_CONNECTION_LIST:
return this.getConsumerConnectionList(ctx, request);
case RequestCode.GET_PRODUCER_CONNECTION_LIST:
return this.getProducerConnectionList(ctx, request);
case RequestCode.GET_ALL_PRODUCER_INFO:
return this.getAllProducerInfo(ctx, request);
case RequestCode.GET_CONSUME_STATS:
return this.getConsumeStats(ctx, request);
case RequestCode.GET_ALL_CONSUMER_OFFSET:
return this.getAllConsumerOffset(ctx, request);
case RequestCode.GET_ALL_DELAY_OFFSET:
return this.getAllDelayOffset(ctx, request);
case RequestCode.GET_ALL_MESSAGE_REQUEST_MODE:
return this.getAllMessageRequestMode(ctx, request);
case RequestCode.INVOKE_BROKER_TO_RESET_OFFSET:
return this.resetOffset(ctx, request);
case RequestCode.INVOKE_BROKER_TO_GET_CONSUMER_STATUS:
return this.getConsumerStatus(ctx, request);
case RequestCode.QUERY_TOPIC_CONSUME_BY_WHO:
return this.queryTopicConsumeByWho(ctx, request);
case RequestCode.QUERY_TOPICS_BY_CONSUMER:
return this.queryTopicsByConsumer(ctx, request);
case RequestCode.QUERY_SUBSCRIPTION_BY_CONSUMER:
return this.querySubscriptionByConsumer(ctx, request);
case RequestCode.QUERY_CONSUME_TIME_SPAN:
return this.queryConsumeTimeSpan(ctx, request);
case RequestCode.GET_SYSTEM_TOPIC_LIST_FROM_BROKER:
return this.getSystemTopicListFromBroker(ctx, request);
case RequestCode.CLEAN_EXPIRED_CONSUMEQUEUE:
return this.cleanExpiredConsumeQueue();
case RequestCode.DELETE_EXPIRED_COMMITLOG:
return this.deleteExpiredCommitLog();
case RequestCode.CLEAN_UNUSED_TOPIC:
return this.cleanUnusedTopic();
case RequestCode.GET_CONSUMER_RUNNING_INFO:
return this.getConsumerRunningInfo(ctx, request);
case RequestCode.QUERY_CORRECTION_OFFSET:
return this.queryCorrectionOffset(ctx, request);
case RequestCode.CONSUME_MESSAGE_DIRECTLY:
return this.consumeMessageDirectly(ctx, request);
case RequestCode.CLONE_GROUP_OFFSET:
return this.cloneGroupOffset(ctx, request);
case RequestCode.VIEW_BROKER_STATS_DATA:
return ViewBrokerStatsData(ctx, request);
case RequestCode.GET_BROKER_CONSUME_STATS:
return fetchAllConsumeStatsInBroker(ctx, request);
case RequestCode.QUERY_CONSUME_QUEUE:
return queryConsumeQueue(ctx, request);
case RequestCode.CHECK_ROCKSDB_CQ_WRITE_PROGRESS:
return this.checkRocksdbCqWriteProgress(ctx, request);
case RequestCode.EXPORT_ROCKSDB_CONFIG_TO_JSON:
return this.exportRocksDBConfigToJson(ctx, request);
case RequestCode.UPDATE_AND_GET_GROUP_FORBIDDEN:
return this.updateAndGetGroupForbidden(ctx, request);
case RequestCode.GET_SUBSCRIPTIONGROUP_CONFIG:
return this.getSubscriptionGroup(ctx, request);
case RequestCode.RESUME_CHECK_HALF_MESSAGE:
return resumeCheckHalfMessage(ctx, request);
case RequestCode.GET_TOPIC_CONFIG:
return getTopicConfig(ctx, request);
case RequestCode.UPDATE_AND_CREATE_STATIC_TOPIC:
return this.updateAndCreateStaticTopic(ctx, request);
case RequestCode.NOTIFY_MIN_BROKER_ID_CHANGE:
return this.notifyMinBrokerIdChange(ctx, request);
case RequestCode.EXCHANGE_BROKER_HA_INFO:
return this.updateBrokerHaInfo(ctx, request);
case RequestCode.GET_BROKER_HA_STATUS:
return this.getBrokerHaStatus(ctx, request);
case RequestCode.RESET_MASTER_FLUSH_OFFSET:
return this.resetMasterFlushOffset(ctx, request);
case RequestCode.GET_BROKER_EPOCH_CACHE:
return this.getBrokerEpochCache(ctx, request);
case RequestCode.NOTIFY_BROKER_ROLE_CHANGED:
return this.notifyBrokerRoleChanged(ctx, request);
case RequestCode.AUTH_CREATE_USER:
return this.createUser(ctx, request);
case RequestCode.AUTH_UPDATE_USER:
return this.updateUser(ctx, request);
case RequestCode.AUTH_DELETE_USER:
return this.deleteUser(ctx, request);
case RequestCode.AUTH_GET_USER:
return this.getUser(ctx, request);
case RequestCode.AUTH_LIST_USER:
return this.listUser(ctx, request);
case RequestCode.AUTH_CREATE_ACL:
return this.createAcl(ctx, request);
case RequestCode.AUTH_UPDATE_ACL:
return this.updateAcl(ctx, request);
case RequestCode.AUTH_DELETE_ACL:
return this.deleteAcl(ctx, request);
case RequestCode.AUTH_GET_ACL:
return this.getAcl(ctx, request);
case RequestCode.AUTH_LIST_ACL:
return this.listAcl(ctx, request);
case RequestCode.POP_ROLLBACK:
return this.transferPopToFsStore(ctx, request);
default:
return getUnknownCmdResponse(ctx, request);
}
}
/**
* @param ctx
* @param request
* @return
* @throws RemotingCommandException
*/
private RemotingCommand getSubscriptionGroup(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
GetSubscriptionGroupConfigRequestHeader requestHeader = (GetSubscriptionGroupConfigRequestHeader) request.decodeCommandCustomHeader(GetSubscriptionGroupConfigRequestHeader.class);
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
SubscriptionGroupConfig groupConfig = this.brokerController.getSubscriptionGroupManager().findSubscriptionGroupConfig(requestHeader.getGroup());
if (groupConfig == null) {
LOGGER.error("No group in this broker, client: {} group: {}", ctx.channel().remoteAddress(), requestHeader.getGroup());
response.setCode(ResponseCode.SUBSCRIPTION_GROUP_NOT_EXIST);
response.setRemark("No group in this broker");
return response;
}
String content = JSONObject.toJSONString(groupConfig);
try {
response.setBody(content.getBytes(MixAll.DEFAULT_CHARSET));
} catch (UnsupportedEncodingException e) {
LOGGER.error("UnsupportedEncodingException getSubscriptionGroup: group=" + groupConfig.getGroupName(), e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("UnsupportedEncodingException " + e.getMessage());
return response;
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
/**
* @param ctx
* @param request
* @return
*/
private RemotingCommand updateAndGetGroupForbidden(ChannelHandlerContext ctx, RemotingCommand request)
throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
UpdateGroupForbiddenRequestHeader requestHeader = (UpdateGroupForbiddenRequestHeader) //
request.decodeCommandCustomHeader(UpdateGroupForbiddenRequestHeader.class);
String group = requestHeader.getGroup();
String topic = requestHeader.getTopic();
LOGGER.info("updateAndGetGroupForbidden called by {} for object {}@{} readable={}",//
RemotingHelper.parseChannelRemoteAddr(ctx.channel()), group, //
topic, requestHeader.getReadable());
SubscriptionGroupManager groupManager = this.brokerController.getSubscriptionGroupManager();
if (requestHeader.getReadable() != null) {
groupManager.updateForbidden(group, topic, PermName.INDEX_PERM_READ, !requestHeader.getReadable());
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark("");
GroupForbidden groupForbidden = new GroupForbidden();
groupForbidden.setGroup(group);
groupForbidden.setTopic(topic);
groupForbidden.setReadable(!groupManager.getForbidden(group, topic, PermName.INDEX_PERM_READ));
response.setBody(groupForbidden.toJson().getBytes(StandardCharsets.UTF_8));
return response;
}
private RemotingCommand checkRocksdbCqWriteProgress(ChannelHandlerContext ctx, RemotingCommand request) {
CheckRocksdbCqWriteResult result = new CheckRocksdbCqWriteResult();
result.setCheckStatus(CheckRocksdbCqWriteResult.CheckStatus.CHECK_IN_PROGRESS.getValue());
Runnable runnable = () -> {
try {
CheckRocksdbCqWriteResult checkResult = doCheckRocksdbCqWriteProgress(ctx, request);
LOGGER.info("checkRocksdbCqWriteProgress result: {}", JSON.toJSONString(checkResult));
} catch (Exception e) {
LOGGER.error("checkRocksdbCqWriteProgress error", e);
}
};
asyncExecuteWorker.submit(runnable);
RemotingCommand response = RemotingCommand.createResponseCommand(null);
response.setCode(ResponseCode.SUCCESS);
response.setBody(JSON.toJSONBytes(result));
return response;
}
private RemotingCommand exportRocksDBConfigToJson(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
ExportRocksDBConfigToJsonRequestHeader requestHeader = request.decodeCommandCustomHeader(ExportRocksDBConfigToJsonRequestHeader.class);
List<ExportRocksDBConfigToJsonRequestHeader.ConfigType> configTypes = requestHeader.fetchConfigType();
List<CompletableFuture<Void>> futureList = new ArrayList<>(configTypes.size());
for (ExportRocksDBConfigToJsonRequestHeader.ConfigType type : configTypes) {
switch (type) {
case TOPICS:
if (this.brokerController.getTopicConfigManager() instanceof RocksDBTopicConfigManager) {
RocksDBTopicConfigManager rocksDBTopicConfigManager = (RocksDBTopicConfigManager) this.brokerController.getTopicConfigManager();
futureList.add(CompletableFuture.runAsync(rocksDBTopicConfigManager::exportToJson, asyncExecuteWorker));
}
break;
case SUBSCRIPTION_GROUPS:
if (this.brokerController.getSubscriptionGroupManager() instanceof RocksDBSubscriptionGroupManager) {
RocksDBSubscriptionGroupManager rocksDBSubscriptionGroupManager = (RocksDBSubscriptionGroupManager) this.brokerController.getSubscriptionGroupManager();
futureList.add(CompletableFuture.runAsync(rocksDBSubscriptionGroupManager::exportToJson, asyncExecuteWorker));
}
break;
case CONSUMER_OFFSETS:
if (this.brokerController.getConsumerOffsetManager() instanceof RocksDBConsumerOffsetManager) {
RocksDBConsumerOffsetManager rocksDBConsumerOffsetManager = (RocksDBConsumerOffsetManager) this.brokerController.getConsumerOffsetManager();
futureList.add(CompletableFuture.runAsync(rocksDBConsumerOffsetManager::exportToJson, asyncExecuteWorker));
}
break;
default:
break;
}
}
try {
CompletableFuture.allOf(futureList.toArray(new CompletableFuture[0])).join();
} catch (CompletionException e) {
RemotingCommand response = RemotingCommand.createResponseCommand(null);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(String.valueOf(e));
return response;
}
RemotingCommand response = RemotingCommand.createResponseCommand(null);
response.setCode(ResponseCode.SUCCESS);
response.setRemark("export done.");
return response;
}
@Override
public boolean rejectRequest() {
return false;
}
private synchronized RemotingCommand updateAndCreateTopic(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
long startTime = System.currentTimeMillis();
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final CreateTopicRequestHeader requestHeader =
(CreateTopicRequestHeader) request.decodeCommandCustomHeader(CreateTopicRequestHeader.class);
LOGGER.info("Broker receive request to update or create topic={}, caller address={}",
requestHeader.getTopic(), RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
String topic = requestHeader.getTopic();
long executionTime;
try {
TopicValidator.ValidateResult result = TopicValidator.validateTopic(topic);
if (!result.isValid()) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark(result.getRemark());
return response;
}
if (brokerController.getBrokerConfig().isValidateSystemTopicWhenUpdateTopic()) {
if (TopicValidator.isSystemTopic(topic)) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark("The topic[" + topic + "] is conflict with system topic.");
return response;
}
}
TopicConfig topicConfig = new TopicConfig(topic);
topicConfig.setReadQueueNums(requestHeader.getReadQueueNums());
topicConfig.setWriteQueueNums(requestHeader.getWriteQueueNums());
topicConfig.setTopicFilterType(requestHeader.getTopicFilterTypeEnum());
topicConfig.setPerm(requestHeader.getPerm());
topicConfig.setTopicSysFlag(requestHeader.getTopicSysFlag() == null ? 0 : requestHeader.getTopicSysFlag());
topicConfig.setOrder(requestHeader.getOrder());
String attributesModification = requestHeader.getAttributes();
topicConfig.setAttributes(AttributeParser.parseToMap(attributesModification));
if (!brokerController.getBrokerConfig().isEnableMixedMessageType() && topicConfig.getAttributes() != null) {
// Get attribute by key with prefix sign
String msgTypeAttrKey = AttributeParser.ATTR_ADD_PLUS_SIGN + TopicAttributes.TOPIC_MESSAGE_TYPE_ATTRIBUTE.getName();
String msgTypeAttrValue = topicConfig.getAttributes().get(msgTypeAttrKey);
if (msgTypeAttrValue != null && msgTypeAttrValue.equals(TopicMessageType.MIXED.getValue())) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark("MIXED message type is not supported.");
return response;
}
}
if (topicConfig.equals(this.brokerController.getTopicConfigManager().getTopicConfigTable().get(topic))) {
LOGGER.info("Broker receive request to update or create topic={}, but topicConfig has no changes , so idempotent, caller address={}",
requestHeader.getTopic(), RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
response.setCode(ResponseCode.SUCCESS);
return response;
}
this.brokerController.getTopicConfigManager().updateTopicConfig(topicConfig);
if (brokerController.getBrokerConfig().isEnableSingleTopicRegister()) {
this.brokerController.registerSingleTopicAll(topicConfig);
} else {
this.brokerController.registerIncrementBrokerData(topicConfig, this.brokerController.getTopicConfigManager().getDataVersion());
}
response.setCode(ResponseCode.SUCCESS);
} catch (Exception e) {
LOGGER.error("Update / create topic failed for [{}]", request, e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(e.getMessage());
return response;
} finally {
executionTime = System.currentTimeMillis() - startTime;
InvocationStatus status = response.getCode() == ResponseCode.SUCCESS ?
InvocationStatus.SUCCESS : InvocationStatus.FAILURE;
Attributes attributes = this.brokerController.getBrokerMetricsManager().newAttributesBuilder()
.put(LABEL_INVOCATION_STATUS, status.getName())
.put(LABEL_IS_SYSTEM, TopicValidator.isSystemTopic(topic))
.build();
this.brokerController.getBrokerMetricsManager().getTopicCreateExecuteTime().record(executionTime, attributes);
}
LOGGER.info("executionTime of create topic:{} is {} ms", topic, executionTime);
return response;
}
private synchronized RemotingCommand updateAndCreateTopicList(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
long startTime = System.currentTimeMillis();
final CreateTopicListRequestBody requestBody = CreateTopicListRequestBody.decode(request.getBody(), CreateTopicListRequestBody.class);
List<TopicConfig> topicConfigList = requestBody.getTopicConfigList();
StringBuilder builder = new StringBuilder();
for (TopicConfig topicConfig : topicConfigList) {
builder.append(topicConfig.getTopicName()).append(";");
}
String topicNames = builder.toString();
LOGGER.info("AdminBrokerProcessor#updateAndCreateTopicList: topicNames: {}, called by {}", topicNames, RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
long executionTime;
try {
// Valid topics
for (TopicConfig topicConfig : topicConfigList) {
String topic = topicConfig.getTopicName();
TopicValidator.ValidateResult result = TopicValidator.validateTopic(topic);
if (!result.isValid()) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark(result.getRemark());
return response;
}
if (brokerController.getBrokerConfig().isValidateSystemTopicWhenUpdateTopic()) {
if (TopicValidator.isSystemTopic(topic)) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark("The topic[" + topic + "] is conflict with system topic.");
return response;
}
}
if (!brokerController.getBrokerConfig().isEnableMixedMessageType() && topicConfig.getAttributes() != null) {
// Get attribute by key with prefix sign
String msgTypeAttrKey = AttributeParser.ATTR_ADD_PLUS_SIGN + TopicAttributes.TOPIC_MESSAGE_TYPE_ATTRIBUTE.getName();
String msgTypeAttrValue = topicConfig.getAttributes().get(msgTypeAttrKey);
if (msgTypeAttrValue != null && msgTypeAttrValue.equals(TopicMessageType.MIXED.getValue())) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark("MIXED message type is not supported.");
return response;
}
}
if (topicConfig.equals(this.brokerController.getTopicConfigManager().getTopicConfigTable().get(topic))) {
LOGGER.info("Broker receive request to update or create topic={}, but topicConfig has no changes , so idempotent, caller address={}",
topic, RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
response.setCode(ResponseCode.SUCCESS);
return response;
}
}
this.brokerController.getTopicConfigManager().updateTopicConfigList(topicConfigList);
if (brokerController.getBrokerConfig().isEnableSingleTopicRegister()) {
for (TopicConfig topicConfig : topicConfigList) {
this.brokerController.registerSingleTopicAll(topicConfig);
}
} else {
this.brokerController.registerIncrementBrokerData(topicConfigList, this.brokerController.getTopicConfigManager().getDataVersion());
}
response.setCode(ResponseCode.SUCCESS);
} catch (Exception e) {
LOGGER.error("Update / create topic failed for [{}]", request, e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(e.getMessage());
return response;
} finally {
executionTime = System.currentTimeMillis() - startTime;
InvocationStatus status = response.getCode() == ResponseCode.SUCCESS ?
InvocationStatus.SUCCESS : InvocationStatus.FAILURE;
Attributes attributes = this.brokerController.getBrokerMetricsManager().newAttributesBuilder()
.put(LABEL_INVOCATION_STATUS, status.getName())
.put(LABEL_IS_SYSTEM, TopicValidator.isSystemTopic(topicNames))
.build();
this.brokerController.getBrokerMetricsManager().getTopicCreateExecuteTime().record(executionTime, attributes);
}
LOGGER.info("executionTime of all topics:{} is {} ms", topicNames, executionTime);
return response;
}
private synchronized RemotingCommand updateAndCreateStaticTopic(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final CreateTopicRequestHeader requestHeader =
(CreateTopicRequestHeader) request.decodeCommandCustomHeader(CreateTopicRequestHeader.class);
LOGGER.info("Broker receive request to update or create static topic={}, caller address={}", requestHeader.getTopic(), RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
final TopicQueueMappingDetail topicQueueMappingDetail = RemotingSerializable.decode(request.getBody(), TopicQueueMappingDetail.class);
String topic = requestHeader.getTopic();
TopicValidator.ValidateResult result = TopicValidator.validateTopic(topic);
if (!result.isValid()) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark(result.getRemark());
return response;
}
if (brokerController.getBrokerConfig().isValidateSystemTopicWhenUpdateTopic()) {
if (TopicValidator.isSystemTopic(topic)) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark("The topic[" + topic + "] is conflict with system topic.");
return response;
}
}
boolean force = requestHeader.getForce() != null && requestHeader.getForce();
TopicConfig topicConfig = new TopicConfig(topic);
topicConfig.setReadQueueNums(requestHeader.getReadQueueNums());
topicConfig.setWriteQueueNums(requestHeader.getWriteQueueNums());
topicConfig.setTopicFilterType(requestHeader.getTopicFilterTypeEnum());
topicConfig.setPerm(requestHeader.getPerm());
topicConfig.setTopicSysFlag(requestHeader.getTopicSysFlag() == null ? 0 : requestHeader.getTopicSysFlag());
try {
this.brokerController.getTopicConfigManager().updateTopicConfig(topicConfig);
this.brokerController.getTopicQueueMappingManager().updateTopicQueueMapping(topicQueueMappingDetail, force, false, true);
this.brokerController.registerIncrementBrokerData(topicConfig, this.brokerController.getTopicConfigManager().getDataVersion());
response.setCode(ResponseCode.SUCCESS);
} catch (Exception e) {
LOGGER.error("Update static topic failed for [{}]", request, e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(e.getMessage());
}
return response;
}
private synchronized RemotingCommand deleteTopic(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
DeleteTopicRequestHeader requestHeader =
(DeleteTopicRequestHeader) request.decodeCommandCustomHeader(DeleteTopicRequestHeader.class);
LOGGER.info("AdminBrokerProcessor#deleteTopic: broker receive request to delete topic={}, caller={}",
requestHeader.getTopic(), RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
String topic = requestHeader.getTopic();
if (UtilAll.isBlank(topic)) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark("The specified topic is blank.");
return response;
}
if (brokerController.getBrokerConfig().isValidateSystemTopicWhenUpdateTopic()) {
if (TopicValidator.isSystemTopic(topic)) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark("The topic[" + topic + "] is conflict with system topic.");
return response;
}
}
List<String> topicsToClean = new ArrayList<>();
topicsToClean.add(topic);
if (brokerController.getBrokerConfig().isClearRetryTopicWhenDeleteTopic()) {
final Set<String> groups = this.brokerController.getConsumerOffsetManager().whichGroupByTopic(topic);
for (String group : groups) {
final String popRetryTopicV2 = KeyBuilder.buildPopRetryTopic(topic, group, true);
if (brokerController.getTopicConfigManager().selectTopicConfig(popRetryTopicV2) != null) {
topicsToClean.add(popRetryTopicV2);
}
final String popRetryTopicV1 = KeyBuilder.buildPopRetryTopicV1(topic, group);
if (brokerController.getTopicConfigManager().selectTopicConfig(popRetryTopicV1) != null) {
topicsToClean.add(popRetryTopicV1);
}
}
}
try {
for (String topicToClean : topicsToClean) {
// delete topic
deleteTopicInBroker(topicToClean);
}
} catch (Throwable t) {
return buildErrorResponse(ResponseCode.SYSTEM_ERROR, t.getMessage());
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private void deleteTopicInBroker(String topic) {
this.brokerController.getTopicConfigManager().deleteTopicConfig(topic);
this.brokerController.getTopicQueueMappingManager().delete(topic);
this.brokerController.getConsumerOffsetManager().cleanOffsetByTopic(topic);
this.brokerController.getPopInflightMessageCounter().clearInFlightMessageNumByTopicName(topic);
this.brokerController.getMessageStore().deleteTopics(Sets.newHashSet(topic));
this.brokerController.getMessageStore().getTimerMessageStore().getTimerMetrics().removeTimingCount(topic);
}
private RemotingCommand getUnknownCmdResponse(ChannelHandlerContext ctx, RemotingCommand request) {
String error = " request type " + request.getCode() + " not supported";
final RemotingCommand response =
RemotingCommand.createResponseCommand(RemotingSysResponseCode.REQUEST_CODE_NOT_SUPPORTED, error);
return response;
}
private RemotingCommand getAllTopicConfig(ChannelHandlerContext ctx, RemotingCommand request)
throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(GetAllTopicConfigResponseHeader.class);
final GetAllTopicConfigResponseHeader responseHeader =
(GetAllTopicConfigResponseHeader) response.readCustomHeader();
final GetAllTopicConfigRequestHeader requestHeader =
request.decodeCommandCustomHeader(GetAllTopicConfigRequestHeader.class);
String dataVersionStr = requestHeader.getDataVersion();
Integer topicSeq = requestHeader.getTopicSeq();
Integer maxTopicNum = requestHeader.getMaxTopicNum();
TopicConfigManager tcManager = brokerController.getTopicConfigManager();
TopicQueueMappingManager tqmManager = brokerController.getTopicQueueMappingManager();
TopicConfigAndMappingSerializeWrapper topicConfigAndMappingSerializeWrapper = new TopicConfigAndMappingSerializeWrapper();
if (!brokerController.getBrokerConfig().isEnableSplitMetadata()
|| ObjectUtils.allNull(dataVersionStr, topicSeq, maxTopicNum)) { // old client, return all topic config
topicConfigAndMappingSerializeWrapper.setDataVersion(tcManager.getDataVersion());
topicConfigAndMappingSerializeWrapper.setTopicConfigTable(tcManager.getTopicConfigTable());
topicConfigAndMappingSerializeWrapper.setMappingDataVersion(tqmManager.getDataVersion());
topicConfigAndMappingSerializeWrapper.setTopicQueueMappingDetailMap(tqmManager.getTopicQueueMappingTable());
} else {
int topicNum = Math.min(brokerController.getBrokerConfig().getSplitMetadataSize(),
Optional.ofNullable(maxTopicNum).orElse(Integer.MAX_VALUE)); // use smaller value
ConcurrentHashMap<String, TopicConfig> subTopicConfigTable =
tcManager.subTopicConfigTable(dataVersionStr, topicSeq, topicNum);
topicConfigAndMappingSerializeWrapper.setTopicConfigTable(subTopicConfigTable);
topicConfigAndMappingSerializeWrapper.setDataVersion(tcManager.getDataVersion());
topicConfigAndMappingSerializeWrapper.setMappingDataVersion(tqmManager.getDataVersion());
topicConfigAndMappingSerializeWrapper.setTopicQueueMappingDetailMap(
tqmManager.subTopicQueueMappingTable(subTopicConfigTable.keySet()));
}
responseHeader.setTotalTopicNum(tcManager.getTopicConfigTable().size());
String content = topicConfigAndMappingSerializeWrapper.toJson();
if (StringUtils.isNotBlank(content)) {
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
response.setBody(content.getBytes(StandardCharsets.UTF_8));
} else {
LOGGER.error("No topic in this broker, client: {}", ctx.channel().remoteAddress());
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("No topic in this broker");
}
return response;
}
private RemotingCommand getTimerCheckPoint(ChannelHandlerContext ctx, RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(ResponseCode.SYSTEM_ERROR, "Unknown");
TimerCheckpoint timerCheckpoint = this.brokerController.getTimerCheckpoint();
if (null == timerCheckpoint) {
LOGGER.error("AdminBrokerProcessor#getTimerCheckPoint: checkpoint is null, caller={}", ctx.channel().remoteAddress());
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("The checkpoint is null");
return response;
}
response.setBody(TimerCheckpoint.encode(timerCheckpoint).array());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getTimerMetrics(ChannelHandlerContext ctx, RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(ResponseCode.SYSTEM_ERROR, "Unknown");
TimerMessageStore timerMessageStore = this.brokerController.getMessageStore().getTimerMessageStore();
if (null == timerMessageStore) {
LOGGER.error("The timer message store is null, client: {}", ctx.channel().remoteAddress());
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("The timer message store is null");
return response;
}
response.setBody(timerMessageStore.getTimerMetrics().encode().getBytes(StandardCharsets.UTF_8));
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private synchronized RemotingCommand updateColdDataFlowCtrGroupConfig(ChannelHandlerContext ctx,
RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
LOGGER.info("updateColdDataFlowCtrGroupConfig called by {}", RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
byte[] body = request.getBody();
if (body != null) {
try {
String bodyStr = new String(body, MixAll.DEFAULT_CHARSET);
Properties properties = MixAll.string2Properties(bodyStr);
if (properties != null) {
LOGGER.info("updateColdDataFlowCtrGroupConfig new config: {}, client: {}", properties, ctx.channel().remoteAddress());
properties.forEach((key, value) -> {
try {
String consumerGroup = String.valueOf(key);
Long threshold = Long.valueOf(String.valueOf(value));
this.brokerController.getColdDataCgCtrService()
.addOrUpdateGroupConfig(consumerGroup, threshold);
} catch (Exception e) {
LOGGER.error("updateColdDataFlowCtrGroupConfig properties on entry error, key: {}, val: {}",
key, value, e);
}
});
} else {
LOGGER.error("updateColdDataFlowCtrGroupConfig string2Properties error");
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("string2Properties error");
return response;
}
} catch (UnsupportedEncodingException e) {
LOGGER.error("updateColdDataFlowCtrGroupConfig UnsupportedEncodingException", e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("UnsupportedEncodingException " + e);
return response;
}
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private synchronized RemotingCommand removeColdDataFlowCtrGroupConfig(ChannelHandlerContext ctx,
RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
LOGGER.info("removeColdDataFlowCtrGroupConfig called by {}", RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
byte[] body = request.getBody();
if (body != null) {
try {
String consumerGroup = new String(body, MixAll.DEFAULT_CHARSET);
if (consumerGroup != null) {
LOGGER.info("removeColdDataFlowCtrGroupConfig, consumerGroup: {} client: {}", consumerGroup, ctx.channel().remoteAddress());
this.brokerController.getColdDataCgCtrService().removeGroupConfig(consumerGroup);
} else {
LOGGER.error("removeColdDataFlowCtrGroupConfig string parse error");
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("string parse error");
return response;
}
} catch (UnsupportedEncodingException e) {
LOGGER.error("removeColdDataFlowCtrGroupConfig UnsupportedEncodingException", e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("UnsupportedEncodingException " + e);
return response;
}
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getColdDataFlowCtrInfo(ChannelHandlerContext ctx) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
LOGGER.info("getColdDataFlowCtrInfo called by {}", RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
String content = this.brokerController.getColdDataCgCtrService().getColdDataFlowCtrInfo();
if (content != null) {
try {
response.setBody(content.getBytes(MixAll.DEFAULT_CHARSET));
} catch (UnsupportedEncodingException e) {
LOGGER.error("getColdDataFlowCtrInfo UnsupportedEncodingException", e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("UnsupportedEncodingException " + e);
return response;
}
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand setCommitLogReadaheadMode(ChannelHandlerContext ctx, RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
LOGGER.info("setCommitLogReadaheadMode called by {}", RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
try {
HashMap<String, String> extFields = request.getExtFields();
if (null == extFields) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("set commitlog readahead mode param error");
return response;
}
int mode = Integer.parseInt(extFields.get(FIleReadaheadMode.READ_AHEAD_MODE));
if (mode != LibC.MADV_RANDOM && mode != LibC.MADV_NORMAL) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark("set commitlog readahead mode param value error");
return response;
}
MessageStore messageStore = this.brokerController.getMessageStore();
if (messageStore instanceof DefaultMessageStore) {
DefaultMessageStore defaultMessageStore = (DefaultMessageStore) messageStore;
if (mode == LibC.MADV_NORMAL) {
defaultMessageStore.getMessageStoreConfig().setDataReadAheadEnable(true);
} else {
defaultMessageStore.getMessageStoreConfig().setDataReadAheadEnable(false);
}
defaultMessageStore.getCommitLog().scanFileAndSetReadMode(mode);
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark("set commitlog readahead mode success, mode: " + mode);
} catch (Exception e) {
LOGGER.error("set commitlog readahead mode failed", e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("set commitlog readahead mode failed");
}
return response;
}
private synchronized RemotingCommand updateBrokerConfig(ChannelHandlerContext ctx, RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final String callerAddress = RemotingHelper.parseChannelRemoteAddr(ctx.channel());
LOGGER.info("Broker receive request to update config, caller address={}", callerAddress);
byte[] body = request.getBody();
if (body != null) {
try {
String bodyStr = new String(body, MixAll.DEFAULT_CHARSET);
Properties properties = MixAll.string2Properties(bodyStr);
if (properties != null) {
LOGGER.info("updateBrokerConfig, new config: [{}] client: {} ", properties, callerAddress);
if (validateBlackListConfigExist(properties)) {
response.setCode(ResponseCode.NO_PERMISSION);
response.setRemark("Can not update config in black list.");
return response;
}
this.brokerController.getConfiguration().update(properties);
if (properties.containsKey("brokerPermission")) {
long stateMachineVersion = brokerController.getMessageStore() != null ? brokerController.getMessageStore().getStateMachineVersion() : 0;
this.brokerController.getTopicConfigManager().getDataVersion().nextVersion(stateMachineVersion);
this.brokerController.registerBrokerAll(false, false, true);
}
} else {
LOGGER.error("string2Properties error");
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("string2Properties error");
return response;
}
} catch (UnsupportedEncodingException e) {
LOGGER.error("AdminBrokerProcessor#updateBrokerConfig: unexpected error, caller={}",
callerAddress, e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("UnsupportedEncodingException " + e);
return response;
}
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getBrokerConfig(ChannelHandlerContext ctx, RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(GetBrokerConfigResponseHeader.class);
final GetBrokerConfigResponseHeader responseHeader = (GetBrokerConfigResponseHeader) response.readCustomHeader();
String content = this.brokerController.getConfiguration().getAllConfigsFormatString();
if (content != null && content.length() > 0) {
try {
content = MixAll.adjustConfigForPlatform(content);
response.setBody(content.getBytes(MixAll.DEFAULT_CHARSET));
} catch (UnsupportedEncodingException e) {
LOGGER.error("AdminBrokerProcessor#getBrokerConfig: unexpected error, caller={}",
RemotingHelper.parseChannelRemoteAddr(ctx.channel()), e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("UnsupportedEncodingException " + e);
return response;
}
}
responseHeader.setVersion(this.brokerController.getConfiguration().getDataVersionJson());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand rewriteRequestForStaticTopic(SearchOffsetRequestHeader requestHeader,
TopicQueueMappingContext mappingContext) {
try {
if (mappingContext.getMappingDetail() == null) {
return null;
}
TopicQueueMappingDetail mappingDetail = mappingContext.getMappingDetail();
List<LogicQueueMappingItem> mappingItems = mappingContext.getMappingItemList();
if (!mappingContext.isLeader()) {
return buildErrorResponse(ResponseCode.NOT_LEADER_FOR_QUEUE, String.format("%s-%d does not exit in request process of current broker %s", mappingContext.getTopic(), mappingContext.getGlobalId(), mappingDetail.getBname()));
}
//TO DO should make sure the timestampOfOffset is equal or bigger than the searched timestamp
Long timestamp = requestHeader.getTimestamp();
long offset = -1;
for (int i = 0; i < mappingItems.size(); i++) {
LogicQueueMappingItem item = mappingItems.get(i);
if (!item.checkIfLogicoffsetDecided()) {
continue;
}
if (mappingDetail.getBname().equals(item.getBname())) {
offset = this.brokerController.getMessageStore().getOffsetInQueueByTime(mappingContext.getTopic(), item.getQueueId(), timestamp, requestHeader.getBoundaryType());
if (offset > 0) {
offset = item.computeStaticQueueOffsetStrictly(offset);
break;
}
} else {
requestHeader.setLo(false);
requestHeader.setTimestamp(timestamp);
requestHeader.setQueueId(item.getQueueId());
requestHeader.setBrokerName(item.getBname());
RpcRequest rpcRequest = new RpcRequest(RequestCode.SEARCH_OFFSET_BY_TIMESTAMP, requestHeader, null);
RpcResponse rpcResponse = this.brokerController.getBrokerOuterAPI().getRpcClient().invoke(rpcRequest, this.brokerController.getBrokerConfig().getForwardTimeout()).get();
if (rpcResponse.getException() != null) {
throw rpcResponse.getException();
}
SearchOffsetResponseHeader offsetResponseHeader = (SearchOffsetResponseHeader) rpcResponse.getHeader();
if (offsetResponseHeader.getOffset() < 0
|| item.checkIfEndOffsetDecided() && offsetResponseHeader.getOffset() >= item.getEndOffset()) {
continue;
} else {
offset = item.computeStaticQueueOffsetStrictly(offsetResponseHeader.getOffset());
}
}
}
final RemotingCommand response = RemotingCommand.createResponseCommand(SearchOffsetResponseHeader.class);
final SearchOffsetResponseHeader responseHeader = (SearchOffsetResponseHeader) response.readCustomHeader();
responseHeader.setOffset(offset);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
} catch (Throwable t) {
return buildErrorResponse(ResponseCode.SYSTEM_ERROR, t.getMessage());
}
}
private RemotingCommand searchOffsetByTimestamp(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(SearchOffsetResponseHeader.class);
final SearchOffsetResponseHeader responseHeader = (SearchOffsetResponseHeader) response.readCustomHeader();
final SearchOffsetRequestHeader requestHeader =
(SearchOffsetRequestHeader) request.decodeCommandCustomHeader(SearchOffsetRequestHeader.class);
TopicQueueMappingContext mappingContext = this.brokerController.getTopicQueueMappingManager().buildTopicQueueMappingContext(requestHeader);
RemotingCommand rewriteResult = rewriteRequestForStaticTopic(requestHeader, mappingContext);
if (rewriteResult != null) {
return rewriteResult;
}
long offset = this.brokerController.getMessageStore().getOffsetInQueueByTime(requestHeader.getTopic(), requestHeader.getQueueId(),
requestHeader.getTimestamp(), requestHeader.getBoundaryType());
responseHeader.setOffset(offset);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand rewriteRequestForStaticTopic(GetMaxOffsetRequestHeader requestHeader,
TopicQueueMappingContext mappingContext) {
if (mappingContext.getMappingDetail() == null) {
return null;
}
TopicQueueMappingDetail mappingDetail = mappingContext.getMappingDetail();
LogicQueueMappingItem mappingItem = mappingContext.getLeaderItem();
if (!mappingContext.isLeader()) {
return buildErrorResponse(ResponseCode.NOT_LEADER_FOR_QUEUE, String.format("%s-%d does not exit in request process of current broker %s", mappingContext.getTopic(), mappingContext.getGlobalId(), mappingDetail.getBname()));
}
try {
LogicQueueMappingItem maxItem = TopicQueueMappingUtils.findLogicQueueMappingItem(mappingContext.getMappingItemList(), Long.MAX_VALUE, true);
assert maxItem != null;
assert maxItem.getLogicOffset() >= 0;
requestHeader.setBrokerName(maxItem.getBname());
requestHeader.setLo(false);
requestHeader.setQueueId(mappingItem.getQueueId());
long maxPhysicalOffset = Long.MAX_VALUE;
if (maxItem.getBname().equals(mappingDetail.getBname())) {
//current broker
maxPhysicalOffset = this.brokerController.getMessageStore().getMaxOffsetInQueue(mappingContext.getTopic(), mappingItem.getQueueId());
} else {
RpcRequest rpcRequest = new RpcRequest(RequestCode.GET_MAX_OFFSET, requestHeader, null);
RpcResponse rpcResponse = this.brokerController.getBrokerOuterAPI().getRpcClient().invoke(rpcRequest, this.brokerController.getBrokerConfig().getForwardTimeout()).get();
if (rpcResponse.getException() != null) {
throw rpcResponse.getException();
}
GetMaxOffsetResponseHeader offsetResponseHeader = (GetMaxOffsetResponseHeader) rpcResponse.getHeader();
maxPhysicalOffset = offsetResponseHeader.getOffset();
}
final RemotingCommand response = RemotingCommand.createResponseCommand(GetMaxOffsetResponseHeader.class);
final GetMaxOffsetResponseHeader responseHeader = (GetMaxOffsetResponseHeader) response.readCustomHeader();
responseHeader.setOffset(maxItem.computeStaticQueueOffsetStrictly(maxPhysicalOffset));
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
} catch (Throwable t) {
return buildErrorResponse(ResponseCode.SYSTEM_ERROR, t.getMessage());
}
}
private RemotingCommand getMaxOffset(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(GetMaxOffsetResponseHeader.class);
final GetMaxOffsetResponseHeader responseHeader = (GetMaxOffsetResponseHeader) response.readCustomHeader();
final GetMaxOffsetRequestHeader requestHeader = request.decodeCommandCustomHeader(GetMaxOffsetRequestHeader.class);
TopicQueueMappingContext mappingContext = this.brokerController.getTopicQueueMappingManager().buildTopicQueueMappingContext(requestHeader);
RemotingCommand rewriteResult = rewriteRequestForStaticTopic(requestHeader, mappingContext);
if (rewriteResult != null) {
return rewriteResult;
}
try {
long offset = this.brokerController.getMessageStore().getMaxOffsetInQueue(requestHeader.getTopic(), requestHeader.getQueueId());
responseHeader.setOffset(offset);
} catch (ConsumeQueueException e) {
throw new RemotingCommandException("Failed to get max offset in queue", e);
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private CompletableFuture<RpcResponse> handleGetMinOffsetForStaticTopic(RpcRequest request,
TopicQueueMappingContext mappingContext) {
if (mappingContext.getMappingDetail() == null) {
return null;
}
TopicQueueMappingDetail mappingDetail = mappingContext.getMappingDetail();
if (!mappingContext.isLeader()) {
//this may not
return CompletableFuture.completedFuture(new RpcResponse(new RpcException(ResponseCode.NOT_LEADER_FOR_QUEUE,
String.format("%s-%d is not leader in broker %s, request code %d", mappingContext.getTopic(), mappingContext.getGlobalId(), mappingDetail.getBname(), request.getCode()))));
}
GetMinOffsetRequestHeader requestHeader = (GetMinOffsetRequestHeader) request.getHeader();
LogicQueueMappingItem mappingItem = TopicQueueMappingUtils.findLogicQueueMappingItem(mappingContext.getMappingItemList(), 0L, true);
assert mappingItem != null;
try {
requestHeader.setBrokerName(mappingItem.getBname());
requestHeader.setLo(false);
requestHeader.setQueueId(mappingItem.getQueueId());
long physicalOffset;
//run in local
if (mappingItem.getBname().equals(mappingDetail.getBname())) {
physicalOffset = this.brokerController.getMessageStore().getMinOffsetInQueue(mappingDetail.getTopic(), mappingItem.getQueueId());
} else {
RpcRequest rpcRequest = new RpcRequest(RequestCode.GET_MIN_OFFSET, requestHeader, null);
RpcResponse rpcResponse = this.brokerController.getBrokerOuterAPI().getRpcClient().invoke(rpcRequest, this.brokerController.getBrokerConfig().getForwardTimeout()).get();
if (rpcResponse.getException() != null) {
throw rpcResponse.getException();
}
GetMinOffsetResponseHeader offsetResponseHeader = (GetMinOffsetResponseHeader) rpcResponse.getHeader();
physicalOffset = offsetResponseHeader.getOffset();
}
long offset = mappingItem.computeStaticQueueOffsetLoosely(physicalOffset);
final GetMinOffsetResponseHeader responseHeader = new GetMinOffsetResponseHeader();
responseHeader.setOffset(offset);
return CompletableFuture.completedFuture(new RpcResponse(ResponseCode.SUCCESS, responseHeader, null));
} catch (Throwable t) {
LOGGER.error("rewriteRequestForStaticTopic failed", t);
return CompletableFuture.completedFuture(new RpcResponse(new RpcException(ResponseCode.SYSTEM_ERROR, t.getMessage(), t)));
}
}
private CompletableFuture<RpcResponse> handleGetMinOffset(RpcRequest request) {
assert request.getCode() == RequestCode.GET_MIN_OFFSET;
GetMinOffsetRequestHeader requestHeader = (GetMinOffsetRequestHeader) request.getHeader();
TopicQueueMappingContext mappingContext = this.brokerController.getTopicQueueMappingManager().buildTopicQueueMappingContext(requestHeader, false);
CompletableFuture<RpcResponse> rewriteResult = handleGetMinOffsetForStaticTopic(request, mappingContext);
if (rewriteResult != null) {
return rewriteResult;
}
final GetMinOffsetResponseHeader responseHeader = new GetMinOffsetResponseHeader();
long offset = this.brokerController.getMessageStore().getMinOffsetInQueue(requestHeader.getTopic(), requestHeader.getQueueId());
responseHeader.setOffset(offset);
return CompletableFuture.completedFuture(new RpcResponse(ResponseCode.SUCCESS, responseHeader, null));
}
private RemotingCommand getMinOffset(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final GetMinOffsetRequestHeader requestHeader =
(GetMinOffsetRequestHeader) request.decodeCommandCustomHeader(GetMinOffsetRequestHeader.class);
try {
CompletableFuture<RpcResponse> responseFuture = handleGetMinOffset(new RpcRequest(RequestCode.GET_MIN_OFFSET, requestHeader, null));
RpcResponse rpcResponse = responseFuture.get();
return RpcClientUtils.createCommandForRpcResponse(rpcResponse);
} catch (Throwable t) {
return buildErrorResponse(ResponseCode.SYSTEM_ERROR, t.getMessage());
}
}
private RemotingCommand rewriteRequestForStaticTopic(GetEarliestMsgStoretimeRequestHeader requestHeader,
TopicQueueMappingContext mappingContext) {
if (mappingContext.getMappingDetail() == null) {
return null;
}
TopicQueueMappingDetail mappingDetail = mappingContext.getMappingDetail();
if (!mappingContext.isLeader()) {
return buildErrorResponse(ResponseCode.NOT_LEADER_FOR_QUEUE, String.format("%s-%d does not exit in request process of current broker %s", mappingContext.getTopic(), mappingContext.getGlobalId(), mappingDetail.getBname()));
}
LogicQueueMappingItem mappingItem = TopicQueueMappingUtils.findLogicQueueMappingItem(mappingContext.getMappingItemList(), 0L, true);
assert mappingItem != null;
try {
requestHeader.setBrokerName(mappingItem.getBname());
requestHeader.setLo(false);
RpcRequest rpcRequest = new RpcRequest(RequestCode.GET_EARLIEST_MSG_STORETIME, requestHeader, null);
//TO DO check if it is in current broker
RpcResponse rpcResponse = this.brokerController.getBrokerOuterAPI().getRpcClient().invoke(rpcRequest, this.brokerController.getBrokerConfig().getForwardTimeout()).get();
if (rpcResponse.getException() != null) {
throw rpcResponse.getException();
}
GetEarliestMsgStoretimeResponseHeader offsetResponseHeader = (GetEarliestMsgStoretimeResponseHeader) rpcResponse.getHeader();
final RemotingCommand response = RemotingCommand.createResponseCommand(GetEarliestMsgStoretimeResponseHeader.class);
final GetEarliestMsgStoretimeResponseHeader responseHeader = (GetEarliestMsgStoretimeResponseHeader) response.readCustomHeader();
responseHeader.setTimestamp(offsetResponseHeader.getTimestamp());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
} catch (Throwable t) {
return buildErrorResponse(ResponseCode.SYSTEM_ERROR, t.getMessage());
}
}
private RemotingCommand getEarliestMsgStoretime(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(GetEarliestMsgStoretimeResponseHeader.class);
final GetEarliestMsgStoretimeResponseHeader responseHeader = (GetEarliestMsgStoretimeResponseHeader) response.readCustomHeader();
final GetEarliestMsgStoretimeRequestHeader requestHeader =
(GetEarliestMsgStoretimeRequestHeader) request.decodeCommandCustomHeader(GetEarliestMsgStoretimeRequestHeader.class);
TopicQueueMappingContext mappingContext = this.brokerController.getTopicQueueMappingManager().buildTopicQueueMappingContext(requestHeader, false);
RemotingCommand rewriteResult = rewriteRequestForStaticTopic(requestHeader, mappingContext);
if (rewriteResult != null) {
return rewriteResult;
}
long timestamp =
this.brokerController.getMessageStore().getEarliestMessageTime(requestHeader.getTopic(), requestHeader.getQueueId());
responseHeader.setTimestamp(timestamp);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getBrokerRuntimeInfo(ChannelHandlerContext ctx, RemotingCommand request)
throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
HashMap<String, String> runtimeInfo = this.prepareRuntimeInfo();
KVTable kvTable = new KVTable();
kvTable.setTable(runtimeInfo);
byte[] body = kvTable.encode();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand lockBatchMQ(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
LockBatchRequestBody requestBody = LockBatchRequestBody.decode(request.getBody(), LockBatchRequestBody.class);
Set<MessageQueue> lockOKMQSet = new HashSet<>();
Set<MessageQueue> selfLockOKMQSet = this.brokerController.getRebalanceLockManager().tryLockBatch(
requestBody.getConsumerGroup(),
requestBody.getMqSet(),
requestBody.getClientId());
if (requestBody.isOnlyThisBroker() || !brokerController.getBrokerConfig().isLockInStrictMode()) {
lockOKMQSet = selfLockOKMQSet;
} else {
requestBody.setOnlyThisBroker(true);
int replicaSize = this.brokerController.getMessageStoreConfig().getTotalReplicas();
int quorum = replicaSize / 2 + 1;
if (quorum <= 1) {
lockOKMQSet = selfLockOKMQSet;
} else {
final ConcurrentMap<MessageQueue, Integer> mqLockMap = new ConcurrentHashMap<>();
for (MessageQueue mq : selfLockOKMQSet) {
if (!mqLockMap.containsKey(mq)) {
mqLockMap.put(mq, 0);
}
mqLockMap.put(mq, mqLockMap.get(mq) + 1);
}
BrokerMemberGroup memberGroup = this.brokerController.getBrokerMemberGroup();
if (memberGroup != null) {
Map<Long, String> addrMap = new HashMap<>(memberGroup.getBrokerAddrs());
addrMap.remove(this.brokerController.getBrokerConfig().getBrokerId());
final CountDownLatch countDownLatch = new CountDownLatch(addrMap.size());
requestBody.setMqSet(selfLockOKMQSet);
requestBody.setOnlyThisBroker(true);
for (Long brokerId : addrMap.keySet()) {
try {
this.brokerController.getBrokerOuterAPI().lockBatchMQAsync(addrMap.get(brokerId),
requestBody, 1000, new LockCallback() {
@Override
public void onSuccess(Set<MessageQueue> lockOKMQSet) {
for (MessageQueue mq : lockOKMQSet) {
if (!mqLockMap.containsKey(mq)) {
mqLockMap.put(mq, 0);
}
mqLockMap.put(mq, mqLockMap.get(mq) + 1);
}
countDownLatch.countDown();
}
@Override
public void onException(Throwable e) {
LOGGER.warn("lockBatchMQAsync on {} failed, {}", addrMap.get(brokerId), e);
countDownLatch.countDown();
}
});
} catch (Exception e) {
LOGGER.warn("lockBatchMQAsync on {} failed, {}", addrMap.get(brokerId), e);
countDownLatch.countDown();
}
}
try {
countDownLatch.await(2000, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
LOGGER.warn("lockBatchMQ exception on {}, {}", this.brokerController.getBrokerConfig().getBrokerName(), e);
}
}
for (MessageQueue mq : mqLockMap.keySet()) {
if (mqLockMap.get(mq) >= quorum) {
lockOKMQSet.add(mq);
}
}
}
}
LockBatchResponseBody responseBody = new LockBatchResponseBody();
responseBody.setLockOKMQSet(lockOKMQSet);
response.setBody(responseBody.encode());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand unlockBatchMQ(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
UnlockBatchRequestBody requestBody = UnlockBatchRequestBody.decode(request.getBody(), UnlockBatchRequestBody.class);
if (requestBody.isOnlyThisBroker() || !this.brokerController.getBrokerConfig().isLockInStrictMode()) {
this.brokerController.getRebalanceLockManager().unlockBatch(
requestBody.getConsumerGroup(),
requestBody.getMqSet(),
requestBody.getClientId());
} else {
requestBody.setOnlyThisBroker(true);
BrokerMemberGroup memberGroup = this.brokerController.getBrokerMemberGroup();
if (memberGroup != null) {
Map<Long, String> addrMap = memberGroup.getBrokerAddrs();
for (Long brokerId : addrMap.keySet()) {
try {
this.brokerController.getBrokerOuterAPI().unlockBatchMQAsync(addrMap.get(brokerId), requestBody, 1000, new UnlockCallback() {
@Override
public void onSuccess() {
}
@Override
public void onException(Throwable e) {
LOGGER.warn("unlockBatchMQ exception on {}, {}", addrMap.get(brokerId), e);
}
});
} catch (Exception e) {
LOGGER.warn("unlockBatchMQ exception on {}, {}", addrMap.get(brokerId), e);
}
}
}
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand updateAndCreateSubscriptionGroup(ChannelHandlerContext ctx, RemotingCommand request)
throws RemotingCommandException {
long startTime = System.currentTimeMillis();
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
LOGGER.info("AdminBrokerProcessor#updateAndCreateSubscriptionGroup called by {}",
RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
SubscriptionGroupConfig config = RemotingSerializable.decode(request.getBody(), SubscriptionGroupConfig.class);
if (null != config) {
TopicValidator.ValidateResult result = TopicValidator.validateGroup(config.getGroupName());
if (!result.isValid()) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark(result.getRemark());
return response;
}
this.brokerController.getSubscriptionGroupManager().updateSubscriptionGroupConfig(config);
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
long executionTime = System.currentTimeMillis() - startTime;
if (null != config) {
LOGGER.info("executionTime of create subscriptionGroup:{} is {} ms", config.getGroupName(), executionTime);
}
InvocationStatus status = response.getCode() == ResponseCode.SUCCESS ?
InvocationStatus.SUCCESS : InvocationStatus.FAILURE;
Attributes attributes = this.brokerController.getBrokerMetricsManager().newAttributesBuilder()
.put(LABEL_INVOCATION_STATUS, status.getName())
.build();
this.brokerController.getBrokerMetricsManager().getConsumerGroupCreateExecuteTime().record(executionTime, attributes);
return response;
}
private RemotingCommand updateAndCreateSubscriptionGroupList(ChannelHandlerContext ctx, RemotingCommand request) {
final long startTime = System.nanoTime();
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final SubscriptionGroupList subscriptionGroupList = SubscriptionGroupList.decode(request.getBody(), SubscriptionGroupList.class);
final List<SubscriptionGroupConfig> groupConfigList = subscriptionGroupList.getGroupConfigList();
final StringBuilder builder = new StringBuilder();
for (SubscriptionGroupConfig config : groupConfigList) {
TopicValidator.ValidateResult result = TopicValidator.validateGroup(config.getGroupName());
if (!result.isValid()) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark(result.getRemark());
return response;
}
builder.append(config.getGroupName()).append(";");
}
final String groupNames = builder.toString();
LOGGER.info("AdminBrokerProcessor#updateAndCreateSubscriptionGroupList: groupNames: {}, called by {}",
groupNames,
RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
try {
this.brokerController.getSubscriptionGroupManager().updateSubscriptionGroupConfigList(groupConfigList);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
} finally {
long executionTime = (System.nanoTime() - startTime) / 1000000L;
LOGGER.info("executionTime of create updateAndCreateSubscriptionGroupList: {} is {} ms", groupNames, executionTime);
InvocationStatus status = response.getCode() == ResponseCode.SUCCESS ?
InvocationStatus.SUCCESS : InvocationStatus.FAILURE;
Attributes attributes = this.brokerController.getBrokerMetricsManager().newAttributesBuilder()
.put(LABEL_INVOCATION_STATUS, status.getName())
.build();
this.brokerController.getBrokerMetricsManager().getConsumerGroupCreateExecuteTime().record(executionTime, attributes);
}
return response;
}
private void initConsumerOffset(String clientHost, String groupName, int mode, TopicConfig topicConfig)
throws ConsumeQueueException {
String topic = topicConfig.getTopicName();
for (int queueId = 0; queueId < topicConfig.getReadQueueNums(); queueId++) {
if (this.brokerController.getConsumerOffsetManager().queryOffset(groupName, topic, queueId) > -1) {
continue;
}
long offset = 0;
if (this.brokerController.getMessageStore().getConsumeQueue(topic, queueId) != null) {
if (ConsumeInitMode.MAX == mode) {
offset = this.brokerController.getMessageStore().getMaxOffsetInQueue(topic, queueId);
} else if (ConsumeInitMode.MIN == mode) {
offset = this.brokerController.getMessageStore().getMinOffsetInQueue(topic, queueId);
}
}
this.brokerController.getConsumerOffsetManager().commitOffset(clientHost, groupName, topic, queueId, offset);
LOGGER.info("AdminBrokerProcessor#initConsumerOffset: consumerGroup={}, topic={}, queueId={}, offset={}",
groupName, topic, queueId, offset);
}
}
private RemotingCommand getAllSubscriptionGroup(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(GetAllSubscriptionGroupResponseHeader.class);
final GetAllSubscriptionGroupResponseHeader responseHeader =
(GetAllSubscriptionGroupResponseHeader) response.readCustomHeader();
final GetAllSubscriptionGroupRequestHeader requestHeader =
request.decodeCommandCustomHeader(GetAllSubscriptionGroupRequestHeader.class);
String dataVersionStr = requestHeader.getDataVersion();
Integer groupSeq = requestHeader.getGroupSeq();
Integer maxGroupNum = requestHeader.getMaxGroupNum();
SubscriptionGroupManager sgManager = this.brokerController.getSubscriptionGroupManager();
SubscriptionGroupWrapper subscriptionGroupWrapper = new SubscriptionGroupWrapper();
if (!brokerController.getBrokerConfig().isEnableSplitMetadata()
|| ObjectUtils.allNull(dataVersionStr, groupSeq, maxGroupNum)) {
subscriptionGroupWrapper.setSubscriptionGroupTable(sgManager.getSubscriptionGroupTable());
subscriptionGroupWrapper.setForbiddenTable(sgManager.getForbiddenTable());
subscriptionGroupWrapper.setDataVersion(sgManager.getDataVersion());
} else {
int groupNum = Math.min(brokerController.getBrokerConfig().getSplitMetadataSize(),
Optional.ofNullable(maxGroupNum).orElse(Integer.MAX_VALUE));
ConcurrentMap<String, SubscriptionGroupConfig> subGroupTable =
sgManager.subGroupTable(dataVersionStr, groupSeq, groupNum);
subscriptionGroupWrapper.setSubscriptionGroupTable(subGroupTable);
subscriptionGroupWrapper.setDataVersion(sgManager.getDataVersion());
subscriptionGroupWrapper.setForbiddenTable(sgManager.subForbiddenTable(subGroupTable.keySet()));
}
responseHeader.setTotalGroupNum(sgManager.getSubscriptionGroupTable().size());
String content = subscriptionGroupWrapper.toJson();
if (StringUtils.isNotBlank(content)) {
response.setBody(content.getBytes(StandardCharsets.UTF_8));
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
} else {
LOGGER.error("No subscription group in this broker, client:{} ", ctx.channel().remoteAddress());
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("No subscription group in this broker");
}
return response;
}
private RemotingCommand deleteSubscriptionGroup(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
DeleteSubscriptionGroupRequestHeader requestHeader =
(DeleteSubscriptionGroupRequestHeader) request.decodeCommandCustomHeader(DeleteSubscriptionGroupRequestHeader.class);
LOGGER.info("AdminBrokerProcessor#deleteSubscriptionGroup, caller={}",
RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
this.brokerController.getSubscriptionGroupManager().deleteSubscriptionGroupConfig(requestHeader.getGroupName());
if (requestHeader.isCleanOffset()) {
this.brokerController.getConsumerOffsetManager().removeOffset(requestHeader.getGroupName());
this.brokerController.getPopInflightMessageCounter().clearInFlightMessageNumByGroupName(requestHeader.getGroupName());
}
if (this.brokerController.getBrokerConfig().isAutoDeleteUnusedStats()) {
this.brokerController.getBrokerStatsManager().onGroupDeleted(requestHeader.getGroupName());
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getTopicStatsInfo(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final GetTopicStatsInfoRequestHeader requestHeader = request.decodeCommandCustomHeader(GetTopicStatsInfoRequestHeader.class);
final String topic = requestHeader.getTopic();
TopicConfig topicConfig = this.brokerController.getTopicConfigManager().selectTopicConfig(topic);
if (null == topicConfig) {
response.setCode(ResponseCode.TOPIC_NOT_EXIST);
response.setRemark("topic[" + topic + "] not exist");
return response;
}
TopicStatsTable topicStatsTable = new TopicStatsTable();
int maxQueueNums = Math.max(topicConfig.getWriteQueueNums(), topicConfig.getReadQueueNums());
try {
for (int i = 0; i < maxQueueNums; i++) {
MessageQueue mq = new MessageQueue();
mq.setTopic(topic);
mq.setBrokerName(this.brokerController.getBrokerConfig().getBrokerName());
mq.setQueueId(i);
TopicOffset topicOffset = new TopicOffset();
long min = this.brokerController.getMessageStore().getMinOffsetInQueue(topic, i);
if (min < 0) {
min = 0;
}
long max = this.brokerController.getMessageStore().getMaxOffsetInQueue(topic, i);
if (max < 0) {
max = 0;
}
long timestamp = 0;
if (max > 0) {
timestamp = this.brokerController.getMessageStore().getMessageStoreTimeStamp(topic, i, max - 1);
}
topicOffset.setMinOffset(min);
topicOffset.setMaxOffset(max);
topicOffset.setLastUpdateTimestamp(timestamp);
topicStatsTable.getOffsetTable().put(mq, topicOffset);
}
topicStatsTable.setTopicPutTps(this.brokerController.getBrokerStatsManager().tpsTopicPutNums(requestHeader.getTopic()));
byte[] body = topicStatsTable.encode();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
} catch (ConsumeQueueException e) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(e.getMessage());
}
return response;
}
private RemotingCommand getConsumerConnectionList(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final GetConsumerConnectionListRequestHeader requestHeader =
(GetConsumerConnectionListRequestHeader) request.decodeCommandCustomHeader(GetConsumerConnectionListRequestHeader.class);
ConsumerGroupInfo consumerGroupInfo =
this.brokerController.getConsumerManager().getConsumerGroupInfo(requestHeader.getConsumerGroup());
if (consumerGroupInfo != null) {
ConsumerConnection bodydata = new ConsumerConnection();
bodydata.setConsumeFromWhere(consumerGroupInfo.getConsumeFromWhere());
bodydata.setConsumeType(consumerGroupInfo.getConsumeType());
bodydata.setMessageModel(consumerGroupInfo.getMessageModel());
bodydata.getSubscriptionTable().putAll(consumerGroupInfo.getSubscriptionTable());
Iterator<Map.Entry<Channel, ClientChannelInfo>> it = consumerGroupInfo.getChannelInfoTable().entrySet().iterator();
while (it.hasNext()) {
ClientChannelInfo info = it.next().getValue();
Connection connection = new Connection();
connection.setClientId(info.getClientId());
connection.setLanguage(info.getLanguage());
connection.setVersion(info.getVersion());
connection.setClientAddr(RemotingHelper.parseChannelRemoteAddr(info.getChannel()));
bodydata.getConnectionSet().add(connection);
}
byte[] body = bodydata.encode();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
response.setCode(ResponseCode.CONSUMER_NOT_ONLINE);
response.setRemark("the consumer group[" + requestHeader.getConsumerGroup() + "] not online");
return response;
}
private RemotingCommand getAllProducerInfo(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final GetAllProducerInfoRequestHeader requestHeader =
(GetAllProducerInfoRequestHeader) request.decodeCommandCustomHeader(GetAllProducerInfoRequestHeader.class);
ProducerTableInfo producerTable = this.brokerController.getProducerManager().getProducerTable();
if (producerTable != null) {
byte[] body = producerTable.encode();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
response.setCode(ResponseCode.SYSTEM_ERROR);
return response;
}
private RemotingCommand getProducerConnectionList(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final GetProducerConnectionListRequestHeader requestHeader =
(GetProducerConnectionListRequestHeader) request.decodeCommandCustomHeader(GetProducerConnectionListRequestHeader.class);
ProducerConnection bodydata = new ProducerConnection();
Map<Channel, ClientChannelInfo> channelInfoHashMap =
this.brokerController.getProducerManager().getGroupChannelTable().get(requestHeader.getProducerGroup());
if (channelInfoHashMap != null) {
Iterator<Map.Entry<Channel, ClientChannelInfo>> it = channelInfoHashMap.entrySet().iterator();
while (it.hasNext()) {
ClientChannelInfo info = it.next().getValue();
Connection connection = new Connection();
connection.setClientId(info.getClientId());
connection.setLanguage(info.getLanguage());
connection.setVersion(info.getVersion());
connection.setClientAddr(RemotingHelper.parseChannelRemoteAddr(info.getChannel()));
bodydata.getConnectionSet().add(connection);
}
byte[] body = bodydata.encode();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("the producer group[" + requestHeader.getProducerGroup() + "] not exist");
return response;
}
private RemotingCommand getConsumeStats(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
try {
final GetConsumeStatsRequestHeader requestHeader = request.decodeCommandCustomHeader(GetConsumeStatsRequestHeader.class);
List<String> topicListProvided = requestHeader.fetchTopicList();
String topicProvided = requestHeader.getTopic();
String group = requestHeader.getConsumerGroup();
ConsumeStats consumeStats = new ConsumeStats();
Set<String> topicsForCollecting = getTopicsForCollectingConsumeStats(topicListProvided, topicProvided, group);
for (String topic : topicsForCollecting) {
TopicConfig topicConfig = this.brokerController.getTopicConfigManager().selectTopicConfig(topic);
if (null == topicConfig) {
LOGGER.warn("AdminBrokerProcessor#getConsumeStats: topic config does not exist, topic={}", topic);
continue;
}
TopicQueueMappingDetail mappingDetail = this.brokerController.getTopicQueueMappingManager().getTopicQueueMapping(topic);
for (int i = 0; i < topicConfig.getReadQueueNums(); i++) {
MessageQueue mq = new MessageQueue();
mq.setTopic(topic);
mq.setBrokerName(this.brokerController.getBrokerConfig().getBrokerName());
mq.setQueueId(i);
OffsetWrapper offsetWrapper = new OffsetWrapper();
long brokerOffset = this.brokerController.getMessageStore().getMaxOffsetInQueue(topic, i);
if (brokerOffset < 0) {
brokerOffset = 0;
}
long consumerOffset = this.brokerController.getConsumerOffsetManager().queryOffset(
requestHeader.getConsumerGroup(), topic, i);
// the consumerOffset cannot be zero for static topic because of the "double read check" strategy
// just remain the logic for dynamic topic
// maybe we should remove it in the future
if (mappingDetail == null) {
if (consumerOffset < 0) {
consumerOffset = 0;
}
}
long pullOffset = this.brokerController.getConsumerOffsetManager().queryPullOffset(
requestHeader.getConsumerGroup(), topic, i);
offsetWrapper.setBrokerOffset(brokerOffset);
offsetWrapper.setConsumerOffset(consumerOffset);
offsetWrapper.setPullOffset(Math.max(consumerOffset, pullOffset));
long timeOffset = consumerOffset - 1;
if (timeOffset >= 0) {
long lastTimestamp = this.brokerController.getMessageStore().getMessageStoreTimeStamp(topic, i, timeOffset);
if (lastTimestamp > 0) {
offsetWrapper.setLastTimestamp(lastTimestamp);
}
}
consumeStats.getOffsetTable().put(mq, offsetWrapper);
}
double consumeTps = this.brokerController.getBrokerStatsManager().tpsGroupGetNums(requestHeader.getConsumerGroup(), topic);
consumeTps += consumeStats.getConsumeTps();
consumeStats.setConsumeTps(consumeTps);
}
byte[] body = consumeStats.encode();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
} catch (ConsumeQueueException e) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(e.getMessage());
}
return response;
}
private Set<String> getTopicsForCollectingConsumeStats(List<String> topicListProvided, String topicProvided,
String group) {
Set<String> topicsForCollecting = new HashSet<>();
if (!topicListProvided.isEmpty()) {
// if topic list is provided, only collect the topics in the list
// and ignore subscription check
topicsForCollecting.addAll(topicListProvided);
} else {
// In order to be compatible with the old logic,
// even if the topic has been provided here, the subscription will be checked.
if (UtilAll.isBlank(topicProvided)) {
topicsForCollecting.addAll(
this.brokerController.getConsumerOffsetManager().whichTopicByConsumer(group));
} else {
topicsForCollecting.add(topicProvided);
}
int subscriptionCount = this.brokerController.getConsumerManager().findSubscriptionDataCount(group);
Iterator<String> iterator = topicsForCollecting.iterator();
while (iterator.hasNext()) {
String topic = iterator.next();
SubscriptionData findSubscriptionData = this.brokerController.getConsumerManager().findSubscriptionData(group, topic);
if (findSubscriptionData == null && subscriptionCount > 0) {
LOGGER.warn(
"AdminBrokerProcessor#getConsumeStats: topic does not exist in consumer group's subscription, topic={}, consumer group={}",
topic, group);
iterator.remove();
}
}
}
return topicsForCollecting;
}
private RemotingCommand getAllConsumerOffset(ChannelHandlerContext ctx, RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
String content = this.brokerController.getConsumerOffsetManager().encode();
if (content != null && content.length() > 0) {
try {
response.setBody(content.getBytes(MixAll.DEFAULT_CHARSET));
} catch (UnsupportedEncodingException e) {
LOGGER.error("get all consumer offset from master error.", e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("UnsupportedEncodingException " + e.getMessage());
return response;
}
} else {
LOGGER.error("No consumer offset in this broker, client: {} ", ctx.channel().remoteAddress());
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("No consumer offset in this broker");
return response;
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getAllDelayOffset(ChannelHandlerContext ctx, RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
String content = this.brokerController.getScheduleMessageService().encode();
if (content != null && content.length() > 0) {
try {
response.setBody(content.getBytes(MixAll.DEFAULT_CHARSET));
} catch (UnsupportedEncodingException e) {
LOGGER.error("AdminBrokerProcessor#getAllDelayOffset: unexpected error, caller={}.",
RemotingHelper.parseChannelRemoteAddr(ctx.channel()), e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("UnsupportedEncodingException " + e);
return response;
}
} else {
LOGGER.error("AdminBrokerProcessor#getAllDelayOffset: no delay offset in this broker, caller={}",
RemotingHelper.parseChannelRemoteAddr(ctx.channel()));
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("No delay offset in this broker");
return response;
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getAllMessageRequestMode(ChannelHandlerContext ctx, RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
String content = this.brokerController.getQueryAssignmentProcessor().getMessageRequestModeManager().encode();
if (content != null && !content.isEmpty()) {
try {
response.setBody(content.getBytes(MixAll.DEFAULT_CHARSET));
} catch (UnsupportedEncodingException e) {
LOGGER.error("get all message request mode from master error.", e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("UnsupportedEncodingException " + e);
return response;
}
} else {
LOGGER.error("No message request mode in this broker, client: {} ", ctx.channel().remoteAddress());
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("No message request mode in this broker");
return response;
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
public RemotingCommand resetOffset(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final ResetOffsetRequestHeader requestHeader =
(ResetOffsetRequestHeader) request.decodeCommandCustomHeader(ResetOffsetRequestHeader.class);
LOGGER.info("[reset-offset] reset offset started by {}. topic={}, group={}, timestamp={}, isForce={}",
RemotingHelper.parseChannelRemoteAddr(ctx.channel()), requestHeader.getTopic(), requestHeader.getGroup(),
requestHeader.getTimestamp(), requestHeader.isForce());
if (this.brokerController.getBrokerConfig().isUseServerSideResetOffset()) {
String topic = requestHeader.getTopic();
String group = requestHeader.getGroup();
int queueId = requestHeader.getQueueId();
long timestamp = requestHeader.getTimestamp();
Long offset = requestHeader.getOffset();
return resetOffsetInner(topic, group, queueId, timestamp, offset);
}
boolean isC = false;
LanguageCode language = request.getLanguage();
switch (language) {
case CPP:
isC = true;
break;
}
return this.brokerController.getBroker2Client().resetOffset(requestHeader.getTopic(), requestHeader.getGroup(),
requestHeader.getTimestamp(), requestHeader.isForce(), isC);
}
private Long searchOffsetByTimestamp(String topic, int queueId, long timestamp) throws ConsumeQueueException {
if (timestamp < 0) {
return brokerController.getMessageStore().getMaxOffsetInQueue(topic, queueId);
} else {
return brokerController.getMessageStore().getOffsetInQueueByTime(topic, queueId, timestamp);
}
}
/**
* Reset consumer offset.
*
* @param topic Required, not null.
* @param group Required, not null.
* @param queueId if target queue ID is negative, all message queues will be reset; otherwise, only the target queue
* would get reset.
* @param timestamp if timestamp is negative, offset would be reset to broker offset at the time being; otherwise,
* binary search is performed to locate target offset.
* @param offset Target offset to reset to if target queue ID is properly provided.
* @return Affected queues and their new offset
*/
private RemotingCommand resetOffsetInner(String topic, String group, int queueId, long timestamp, Long offset) {
RemotingCommand response = RemotingCommand.createResponseCommand(ResponseCode.SUCCESS, null);
if (BrokerRole.SLAVE == brokerController.getMessageStoreConfig().getBrokerRole()) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("Can not reset offset in slave broker");
return response;
}
Map<Integer, Long> queueOffsetMap = new HashMap<>();
// Reset offset for all queues belonging to the specified topic
TopicConfig topicConfig = brokerController.getTopicConfigManager().selectTopicConfig(topic);
if (null == topicConfig) {
response.setCode(ResponseCode.TOPIC_NOT_EXIST);
response.setRemark("Topic " + topic + " does not exist");
LOGGER.warn("Reset offset failed, topic does not exist. topic={}, group={}", topic, group);
return response;
}
if (!brokerController.getSubscriptionGroupManager().containsSubscriptionGroup(group)) {
response.setCode(ResponseCode.SUBSCRIPTION_GROUP_NOT_EXIST);
response.setRemark("Group " + group + " does not exist");
LOGGER.warn("Reset offset failed, group does not exist. topic={}, group={}", topic, group);
return response;
}
try {
if (queueId >= 0) {
if (null != offset && -1 != offset) {
long min = brokerController.getMessageStore().getMinOffsetInQueue(topic, queueId);
long max = brokerController.getMessageStore().getMaxOffsetInQueue(topic, queueId);
if (min >= 0 && offset < min || offset > max + 1) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(
String.format("Target offset %d not in consume queue range [%d-%d]", offset, min, max));
return response;
}
} else {
offset = searchOffsetByTimestamp(topic, queueId, timestamp);
}
queueOffsetMap.put(queueId, offset);
} else {
for (int index = 0; index < topicConfig.getReadQueueNums(); index++) {
offset = searchOffsetByTimestamp(topic, index, timestamp);
queueOffsetMap.put(index, offset);
}
}
} catch (ConsumeQueueException e) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(e.getMessage());
return response;
}
if (queueOffsetMap.isEmpty()) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("No queues to reset.");
LOGGER.warn("Reset offset aborted: no queues to reset");
return response;
}
for (Map.Entry<Integer, Long> entry : queueOffsetMap.entrySet()) {
brokerController.getConsumerOffsetManager()
.assignResetOffset(topic, group, entry.getKey(), entry.getValue());
}
// Prepare reset result.
ResetOffsetBody body = new ResetOffsetBody();
String brokerName = brokerController.getBrokerConfig().getBrokerName();
for (Map.Entry<Integer, Long> entry : queueOffsetMap.entrySet()) {
if (brokerController.getPopInflightMessageCounter() != null) {
brokerController.getPopInflightMessageCounter().clearInFlightMessageNum(topic, group, entry.getKey());
}
if (brokerController.getBrokerConfig().isPopConsumerKVServiceEnable()) {
brokerController.getPopConsumerService().clearCache(group, topic, entry.getKey());
brokerController.getConsumerOffsetManager().clearPullOffset(group, topic);
}
body.getOffsetTable().put(new MessageQueue(topic, brokerName, entry.getKey()), entry.getValue());
}
LOGGER.info("Reset offset, topic={}, group={}, queues={}", topic, group, body.toJson(false));
response.setBody(body.encode());
return response;
}
public RemotingCommand getConsumerStatus(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final GetConsumerStatusRequestHeader requestHeader =
(GetConsumerStatusRequestHeader) request.decodeCommandCustomHeader(GetConsumerStatusRequestHeader.class);
LOGGER.info("[get-consumer-status] get consumer status by {}. topic={}, group={}",
RemotingHelper.parseChannelRemoteAddr(ctx.channel()), requestHeader.getTopic(), requestHeader.getGroup());
return this.brokerController.getBroker2Client().getConsumeStatus(requestHeader.getTopic(), requestHeader.getGroup(),
requestHeader.getClientAddr());
}
private RemotingCommand queryTopicConsumeByWho(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
QueryTopicConsumeByWhoRequestHeader requestHeader =
(QueryTopicConsumeByWhoRequestHeader) request.decodeCommandCustomHeader(QueryTopicConsumeByWhoRequestHeader.class);
HashSet<String> groups = this.brokerController.getConsumerManager().queryTopicConsumeByWho(requestHeader.getTopic());
Set<String> groupInOffset = this.brokerController.getConsumerOffsetManager().whichGroupByTopic(requestHeader.getTopic());
if (groupInOffset != null && !groupInOffset.isEmpty()) {
groups.addAll(groupInOffset);
}
GroupList groupList = new GroupList();
groupList.setGroupList(groups);
byte[] body = groupList.encode();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand queryTopicsByConsumer(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
QueryTopicsByConsumerRequestHeader requestHeader =
(QueryTopicsByConsumerRequestHeader) request.decodeCommandCustomHeader(QueryTopicsByConsumerRequestHeader.class);
Set<String> topics = this.brokerController.getConsumerOffsetManager().whichTopicByConsumer(requestHeader.getGroup());
TopicList topicList = new TopicList();
topicList.setTopicList(topics);
topicList.setBrokerAddr(brokerController.getBrokerAddr());
byte[] body = topicList.encode();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand querySubscriptionByConsumer(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
QuerySubscriptionByConsumerRequestHeader requestHeader =
(QuerySubscriptionByConsumerRequestHeader) request.decodeCommandCustomHeader(QuerySubscriptionByConsumerRequestHeader.class);
SubscriptionData subscriptionData = this.brokerController.getConsumerManager()
.findSubscriptionData(requestHeader.getGroup(), requestHeader.getTopic());
QuerySubscriptionResponseBody responseBody = new QuerySubscriptionResponseBody();
responseBody.setGroup(requestHeader.getGroup());
responseBody.setTopic(requestHeader.getTopic());
responseBody.setSubscriptionData(subscriptionData);
byte[] body = responseBody.encode();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand queryConsumeTimeSpan(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
QueryConsumeTimeSpanRequestHeader requestHeader = request.decodeCommandCustomHeader(QueryConsumeTimeSpanRequestHeader.class);
final String topic = requestHeader.getTopic();
TopicConfig topicConfig = this.brokerController.getTopicConfigManager().selectTopicConfig(topic);
if (null == topicConfig) {
response.setCode(ResponseCode.TOPIC_NOT_EXIST);
response.setRemark("topic[" + topic + "] not exist");
return response;
}
List<QueueTimeSpan> timeSpanSet = new ArrayList<>();
for (int i = 0; i < topicConfig.getWriteQueueNums(); i++) {
QueueTimeSpan timeSpan = new QueueTimeSpan();
MessageQueue mq = new MessageQueue();
mq.setTopic(topic);
mq.setBrokerName(this.brokerController.getBrokerConfig().getBrokerName());
mq.setQueueId(i);
timeSpan.setMessageQueue(mq);
long minTime = this.brokerController.getMessageStore().getEarliestMessageTime(topic, i);
timeSpan.setMinTimeStamp(minTime);
long max;
try {
max = this.brokerController.getMessageStore().getMaxOffsetInQueue(topic, i);
} catch (ConsumeQueueException e) {
throw new RemotingCommandException("Failed to get max offset in queue", e);
}
long maxTime = this.brokerController.getMessageStore().getMessageStoreTimeStamp(topic, i, max - 1);
timeSpan.setMaxTimeStamp(maxTime);
long consumeTime;
long consumerOffset = this.brokerController.getConsumerOffsetManager().queryOffset(
requestHeader.getGroup(), topic, i);
if (consumerOffset > 0) {
consumeTime = this.brokerController.getMessageStore().getMessageStoreTimeStamp(topic, i, consumerOffset - 1);
} else {
consumeTime = minTime;
}
timeSpan.setConsumeTimeStamp(consumeTime);
long maxBrokerOffset;
try {
maxBrokerOffset = this.brokerController.getMessageStore().getMaxOffsetInQueue(requestHeader.getTopic(), i);
} catch (ConsumeQueueException e) {
throw new RemotingCommandException("Failed to get max offset in queue", e);
}
if (consumerOffset < maxBrokerOffset) {
long nextTime = this.brokerController.getMessageStore().getMessageStoreTimeStamp(topic, i, consumerOffset);
timeSpan.setDelayTime(System.currentTimeMillis() - nextTime);
}
timeSpanSet.add(timeSpan);
}
QueryConsumeTimeSpanBody queryConsumeTimeSpanBody = new QueryConsumeTimeSpanBody();
queryConsumeTimeSpanBody.setConsumeTimeSpanSet(timeSpanSet);
response.setBody(queryConsumeTimeSpanBody.encode());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getSystemTopicListFromBroker(ChannelHandlerContext ctx, RemotingCommand request)
throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
Set<String> topics = TopicValidator.getSystemTopicSet();
TopicList topicList = new TopicList();
topicList.setTopicList(topics);
response.setBody(topicList.encode());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
public RemotingCommand cleanExpiredConsumeQueue() {
LOGGER.info("AdminBrokerProcessor#cleanExpiredConsumeQueue: start.");
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
try {
brokerController.getMessageStore().cleanExpiredConsumerQueue();
} catch (Throwable t) {
return buildErrorResponse(ResponseCode.SYSTEM_ERROR, t.getMessage());
}
LOGGER.info("AdminBrokerProcessor#cleanExpiredConsumeQueue: end.");
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
public RemotingCommand deleteExpiredCommitLog() {
LOGGER.warn("invoke deleteExpiredCommitLog start.");
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
brokerController.getMessageStore().executeDeleteFilesManually();
LOGGER.warn("invoke deleteExpiredCommitLog end.");
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
public RemotingCommand cleanUnusedTopic() {
LOGGER.warn("invoke cleanUnusedTopic start.");
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
brokerController.getMessageStore().cleanUnusedTopic(brokerController.getTopicConfigManager().getTopicConfigTable().keySet());
LOGGER.warn("invoke cleanUnusedTopic end.");
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getConsumerRunningInfo(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final GetConsumerRunningInfoRequestHeader requestHeader =
(GetConsumerRunningInfoRequestHeader) request.decodeCommandCustomHeader(GetConsumerRunningInfoRequestHeader.class);
return this.callConsumer(RequestCode.GET_CONSUMER_RUNNING_INFO, request, requestHeader.getConsumerGroup(),
requestHeader.getClientId());
}
private RemotingCommand queryCorrectionOffset(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
QueryCorrectionOffsetHeader requestHeader =
(QueryCorrectionOffsetHeader) request.decodeCommandCustomHeader(QueryCorrectionOffsetHeader.class);
Map<Integer, Long> correctionOffset = this.brokerController.getConsumerOffsetManager()
.queryMinOffsetInAllGroup(requestHeader.getTopic(), requestHeader.getFilterGroups());
Map<Integer, Long> compareOffset =
this.brokerController.getConsumerOffsetManager().queryOffset(requestHeader.getCompareGroup(), requestHeader.getTopic());
if (compareOffset != null && !compareOffset.isEmpty()) {
for (Map.Entry<Integer, Long> entry : compareOffset.entrySet()) {
Integer queueId = entry.getKey();
correctionOffset.put(queueId,
correctionOffset.get(queueId) > entry.getValue() ? Long.MAX_VALUE : correctionOffset.get(queueId));
}
}
QueryCorrectionOffsetBody body = new QueryCorrectionOffsetBody();
body.setCorrectionOffsets(correctionOffset);
response.setBody(body.encode());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand consumeMessageDirectly(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final ConsumeMessageDirectlyResultRequestHeader requestHeader = (ConsumeMessageDirectlyResultRequestHeader) request
.decodeCommandCustomHeader(ConsumeMessageDirectlyResultRequestHeader.class);
// brokerName
request.getExtFields().put("brokerName", this.brokerController.getBrokerConfig().getBrokerName());
// topicSysFlag
if (StringUtils.isNotEmpty(requestHeader.getTopic())) {
TopicConfig topicConfig = this.brokerController.getTopicConfigManager().getTopicConfigTable().get(requestHeader.getTopic());
if (topicConfig != null) {
request.addExtField("topicSysFlag", String.valueOf(topicConfig.getTopicSysFlag()));
}
}
// groupSysFlag
if (StringUtils.isNotEmpty(requestHeader.getConsumerGroup())) {
SubscriptionGroupConfig groupConfig = brokerController.getSubscriptionGroupManager().findSubscriptionGroupConfig(requestHeader.getConsumerGroup());
if (groupConfig != null) {
request.addExtField("groupSysFlag", String.valueOf(groupConfig.getGroupSysFlag()));
}
}
SelectMappedBufferResult selectMappedBufferResult = null;
try {
MessageId messageId = MessageDecoder.decodeMessageId(requestHeader.getMsgId());
selectMappedBufferResult = this.brokerController.getMessageStore().selectOneMessageByOffset(messageId.getOffset());
byte[] body = new byte[selectMappedBufferResult.getSize()];
selectMappedBufferResult.getByteBuffer().get(body);
request.setBody(body);
} catch (UnknownHostException e) {
} finally {
if (selectMappedBufferResult != null) {
selectMappedBufferResult.release();
}
}
return this.callConsumer(RequestCode.CONSUME_MESSAGE_DIRECTLY, request, requestHeader.getConsumerGroup(),
requestHeader.getClientId());
}
private RemotingCommand cloneGroupOffset(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
CloneGroupOffsetRequestHeader requestHeader =
(CloneGroupOffsetRequestHeader) request.decodeCommandCustomHeader(CloneGroupOffsetRequestHeader.class);
Set<String> topics;
if (UtilAll.isBlank(requestHeader.getTopic())) {
topics = this.brokerController.getConsumerOffsetManager().whichTopicByConsumer(requestHeader.getSrcGroup());
} else {
topics = new HashSet<>();
topics.add(requestHeader.getTopic());
}
for (String topic : topics) {
TopicConfig topicConfig = this.brokerController.getTopicConfigManager().selectTopicConfig(topic);
if (null == topicConfig) {
LOGGER.warn("[cloneGroupOffset], topic config not exist, {}", topic);
continue;
}
if (!requestHeader.isOffline()) {
SubscriptionData findSubscriptionData =
this.brokerController.getConsumerManager().findSubscriptionData(requestHeader.getSrcGroup(), topic);
if (this.brokerController.getConsumerManager().findSubscriptionDataCount(requestHeader.getSrcGroup()) > 0
&& findSubscriptionData == null) {
LOGGER.warn(
"AdminBrokerProcessor#cloneGroupOffset: topic does not exist in consumer group's "
+ "subscription, topic={}, consumer group={}", topic, requestHeader.getSrcGroup());
continue;
}
}
this.brokerController.getConsumerOffsetManager().cloneOffset(requestHeader.getSrcGroup(), requestHeader.getDestGroup(),
requestHeader.getTopic());
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand ViewBrokerStatsData(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final ViewBrokerStatsDataRequestHeader requestHeader =
(ViewBrokerStatsDataRequestHeader) request.decodeCommandCustomHeader(ViewBrokerStatsDataRequestHeader.class);
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
MessageStore messageStore = this.brokerController.getMessageStore();
StatsItem statsItem = messageStore.getBrokerStatsManager().getStatsItem(requestHeader.getStatsName(), requestHeader.getStatsKey());
if (null == statsItem) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(String.format("The stats <%s> <%s> not exist", requestHeader.getStatsName(), requestHeader.getStatsKey()));
return response;
}
BrokerStatsData brokerStatsData = new BrokerStatsData();
{
BrokerStatsItem it = new BrokerStatsItem();
StatsSnapshot ss = statsItem.getStatsDataInMinute();
it.setSum(ss.getSum());
it.setTps(ss.getTps());
it.setAvgpt(ss.getAvgpt());
brokerStatsData.setStatsMinute(it);
}
{
BrokerStatsItem it = new BrokerStatsItem();
StatsSnapshot ss = statsItem.getStatsDataInHour();
it.setSum(ss.getSum());
it.setTps(ss.getTps());
it.setAvgpt(ss.getAvgpt());
brokerStatsData.setStatsHour(it);
}
{
BrokerStatsItem it = new BrokerStatsItem();
StatsSnapshot ss = statsItem.getStatsDataInDay();
it.setSum(ss.getSum());
it.setTps(ss.getTps());
it.setAvgpt(ss.getAvgpt());
brokerStatsData.setStatsDay(it);
}
response.setBody(brokerStatsData.encode());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand fetchAllConsumeStatsInBroker(ChannelHandlerContext ctx, RemotingCommand request)
throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
GetConsumeStatsInBrokerHeader requestHeader = request.decodeCommandCustomHeader(GetConsumeStatsInBrokerHeader.class);
boolean isOrder = requestHeader.isOrder();
ConcurrentMap<String, SubscriptionGroupConfig> subscriptionGroups =
brokerController.getSubscriptionGroupManager().getSubscriptionGroupTable();
List<Map<String/* subscriptionGroupName */, List<ConsumeStats>>> brokerConsumeStatsList =
new ArrayList<>();
long totalDiff = 0L;
long totalInflightDiff = 0L;
for (String group : subscriptionGroups.keySet()) {
Map<String, List<ConsumeStats>> subscripTopicConsumeMap = new HashMap<>();
Set<String> topics = this.brokerController.getConsumerOffsetManager().whichTopicByConsumer(group);
List<ConsumeStats> consumeStatsList = new ArrayList<>();
for (String topic : topics) {
ConsumeStats consumeStats = new ConsumeStats();
TopicConfig topicConfig = this.brokerController.getTopicConfigManager().selectTopicConfig(topic);
if (null == topicConfig) {
LOGGER.warn(
"AdminBrokerProcessor#fetchAllConsumeStatsInBroker: topic config does not exist, topic={}",
topic);
continue;
}
if (isOrder && !topicConfig.isOrder()) {
continue;
}
{
SubscriptionData findSubscriptionData = this.brokerController.getConsumerManager().findSubscriptionData(group, topic);
if (null == findSubscriptionData
&& this.brokerController.getConsumerManager().findSubscriptionDataCount(group) > 0) {
LOGGER.warn(
"AdminBrokerProcessor#fetchAllConsumeStatsInBroker: topic does not exist in consumer "
+ "group's subscription, topic={}, consumer group={}", topic, group);
continue;
}
}
for (int i = 0; i < topicConfig.getWriteQueueNums(); i++) {
MessageQueue mq = new MessageQueue();
mq.setTopic(topic);
mq.setBrokerName(this.brokerController.getBrokerConfig().getBrokerName());
mq.setQueueId(i);
OffsetWrapper offsetWrapper = new OffsetWrapper();
long brokerOffset;
try {
brokerOffset = this.brokerController.getMessageStore().getMaxOffsetInQueue(topic, i);
} catch (ConsumeQueueException e) {
throw new RemotingCommandException("Failed to get max offset", e);
}
if (brokerOffset < 0) {
brokerOffset = 0;
}
long consumerOffset = this.brokerController.getConsumerOffsetManager().queryOffset(
group,
topic,
i);
if (consumerOffset < 0)
consumerOffset = 0;
offsetWrapper.setBrokerOffset(brokerOffset);
offsetWrapper.setConsumerOffset(consumerOffset);
long timeOffset = consumerOffset - 1;
if (timeOffset >= 0) {
long lastTimestamp = this.brokerController.getMessageStore().getMessageStoreTimeStamp(topic, i, timeOffset);
if (lastTimestamp > 0) {
offsetWrapper.setLastTimestamp(lastTimestamp);
}
}
consumeStats.getOffsetTable().put(mq, offsetWrapper);
}
double consumeTps = this.brokerController.getBrokerStatsManager().tpsGroupGetNums(group, topic);
consumeTps += consumeStats.getConsumeTps();
consumeStats.setConsumeTps(consumeTps);
totalDiff += consumeStats.computeTotalDiff();
totalInflightDiff += consumeStats.computeInflightTotalDiff();
consumeStatsList.add(consumeStats);
}
subscripTopicConsumeMap.put(group, consumeStatsList);
brokerConsumeStatsList.add(subscripTopicConsumeMap);
}
ConsumeStatsList consumeStats = new ConsumeStatsList();
consumeStats.setBrokerAddr(brokerController.getBrokerAddr());
consumeStats.setConsumeStatsList(brokerConsumeStatsList);
consumeStats.setTotalDiff(totalDiff);
consumeStats.setTotalInflightDiff(totalInflightDiff);
response.setBody(consumeStats.encode());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private HashMap<String, String> prepareRuntimeInfo() throws RemotingCommandException {
HashMap<String, String> runtimeInfo = this.brokerController.getMessageStore().getRuntimeInfo();
for (BrokerAttachedPlugin brokerAttachedPlugin : brokerController.getBrokerAttachedPlugins()) {
if (brokerAttachedPlugin != null) {
brokerAttachedPlugin.buildRuntimeInfo(runtimeInfo);
}
}
try {
this.brokerController.getScheduleMessageService().buildRunningStats(runtimeInfo);
} catch (ConsumeQueueException e) {
throw new RemotingCommandException("Failed to get max offset in queue", e);
}
runtimeInfo.put("brokerActive", String.valueOf(this.brokerController.isSpecialServiceRunning()));
runtimeInfo.put("brokerVersionDesc", MQVersion.getVersionDesc(MQVersion.CURRENT_VERSION));
runtimeInfo.put("msgPutTotalYesterdayMorning",
String.valueOf(this.brokerController.getBrokerStats().getMsgPutTotalYesterdayMorning()));
runtimeInfo.put("msgPutTotalTodayMorning", String.valueOf(this.brokerController.getBrokerStats().getMsgPutTotalTodayMorning()));
runtimeInfo.put("msgPutTotalTodayNow", String.valueOf(this.brokerController.getBrokerStats().getMsgPutTotalTodayNow()));
runtimeInfo.put("msgGetTotalYesterdayMorning",
String.valueOf(this.brokerController.getBrokerStats().getMsgGetTotalYesterdayMorning()));
runtimeInfo.put("msgGetTotalTodayMorning", String.valueOf(this.brokerController.getBrokerStats().getMsgGetTotalTodayMorning()));
runtimeInfo.put("msgGetTotalTodayNow", String.valueOf(this.brokerController.getBrokerStats().getMsgGetTotalTodayNow()));
runtimeInfo.put("dispatchBehindBytes", String.valueOf(this.brokerController.getMessageStore().dispatchBehindBytes()));
runtimeInfo.put("pageCacheLockTimeMills", String.valueOf(this.brokerController.getMessageStore().lockTimeMills()));
runtimeInfo.put("earliestMessageTimeStamp", String.valueOf(this.brokerController.getMessageStore().getEarliestMessageTime()));
runtimeInfo.put("startAcceptSendRequestTimeStamp", String.valueOf(this.brokerController.getBrokerConfig().getStartAcceptSendRequestTimeStamp()));
if (this.brokerController.getMessageStoreConfig().isTimerWheelEnable()) {
runtimeInfo.put("timerReadBehind", String.valueOf(this.brokerController.getMessageStore().getTimerMessageStore().getDequeueBehind()));
runtimeInfo.put("timerOffsetBehind", String.valueOf(this.brokerController.getMessageStore().getTimerMessageStore().getEnqueueBehindMessages()));
runtimeInfo.put("timerCongestNum", String.valueOf(this.brokerController.getMessageStore().getTimerMessageStore().getAllCongestNum()));
runtimeInfo.put("timerEnqueueTps", String.valueOf(this.brokerController.getMessageStore().getTimerMessageStore().getEnqueueTps()));
runtimeInfo.put("timerDequeueTps", String.valueOf(this.brokerController.getMessageStore().getTimerMessageStore().getDequeueTps()));
} else {
runtimeInfo.put("timerReadBehind", "0");
runtimeInfo.put("timerOffsetBehind", "0");
runtimeInfo.put("timerCongestNum", "0");
runtimeInfo.put("timerEnqueueTps", "0.0");
runtimeInfo.put("timerDequeueTps", "0.0");
}
MessageStore messageStore = this.brokerController.getMessageStore();
runtimeInfo.put("remainTransientStoreBufferNumbs", String.valueOf(messageStore.remainTransientStoreBufferNumbs()));
if (this.brokerController.getMessageStore() instanceof DefaultMessageStore && ((DefaultMessageStore) this.brokerController.getMessageStore()).isTransientStorePoolEnable()) {
runtimeInfo.put("remainHowManyDataToCommit", MixAll.humanReadableByteCount(messageStore.remainHowManyDataToCommit(), false));
}
runtimeInfo.put("remainHowManyDataToFlush", MixAll.humanReadableByteCount(messageStore.remainHowManyDataToFlush(), false));
java.io.File commitLogDir = new java.io.File(this.brokerController.getMessageStoreConfig().getStorePathRootDir());
if (commitLogDir.exists()) {
runtimeInfo.put("commitLogDirCapacity", String.format("Total : %s, Free : %s.", MixAll.humanReadableByteCount(commitLogDir.getTotalSpace(), false), MixAll.humanReadableByteCount(commitLogDir.getFreeSpace(), false)));
}
runtimeInfo.put("sendThreadPoolQueueSize", String.valueOf(this.brokerController.getSendThreadPoolQueue().size()));
runtimeInfo.put("sendThreadPoolQueueCapacity",
String.valueOf(this.brokerController.getBrokerConfig().getSendThreadPoolQueueCapacity()));
runtimeInfo.put("pullThreadPoolQueueSize", String.valueOf(this.brokerController.getPullThreadPoolQueue().size()));
runtimeInfo.put("pullThreadPoolQueueCapacity",
String.valueOf(this.brokerController.getBrokerConfig().getPullThreadPoolQueueCapacity()));
runtimeInfo.put("litePullThreadPoolQueueSize", String.valueOf(brokerController.getLitePullThreadPoolQueue().size()));
runtimeInfo.put("litePullThreadPoolQueueCapacity",
String.valueOf(this.brokerController.getBrokerConfig().getLitePullThreadPoolQueueCapacity()));
runtimeInfo.put("queryThreadPoolQueueSize", String.valueOf(this.brokerController.getQueryThreadPoolQueue().size()));
runtimeInfo.put("queryThreadPoolQueueCapacity",
String.valueOf(this.brokerController.getBrokerConfig().getQueryThreadPoolQueueCapacity()));
runtimeInfo.put("ackThreadPoolQueueSize", String.valueOf(this.brokerController.getAckThreadPoolQueue().size()));
runtimeInfo.put("ackThreadPoolQueueCapacity",
String.valueOf(this.brokerController.getBrokerConfig().getAckThreadPoolQueueCapacity()));
runtimeInfo.put("sendThreadPoolQueueHeadWaitTimeMills", String.valueOf(this.brokerController.headSlowTimeMills4SendThreadPoolQueue()));
runtimeInfo.put("pullThreadPoolQueueHeadWaitTimeMills", String.valueOf(brokerController.headSlowTimeMills4PullThreadPoolQueue()));
runtimeInfo.put("queryThreadPoolQueueHeadWaitTimeMills", String.valueOf(this.brokerController.headSlowTimeMills4QueryThreadPoolQueue()));
runtimeInfo.put("litePullThreadPoolQueueHeadWaitTimeMills", String.valueOf(brokerController.headSlowTimeMills4LitePullThreadPoolQueue()));
runtimeInfo.put("ackThreadPoolQueueHeadWaitTimeMills", String.valueOf(brokerController.headSlowTimeMills4AckThreadPoolQueue()));
runtimeInfo.put("EndTransactionQueueSize", String.valueOf(this.brokerController.getEndTransactionThreadPoolQueue().size()));
runtimeInfo.put("EndTransactionThreadPoolQueueCapacity",
String.valueOf(this.brokerController.getBrokerConfig().getEndTransactionPoolQueueCapacity()));
return runtimeInfo;
}
private RemotingCommand callConsumer(
final int requestCode,
final RemotingCommand request,
final String consumerGroup,
final String clientId) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
ClientChannelInfo clientChannelInfo = this.brokerController.getConsumerManager().findChannel(consumerGroup, clientId);
if (null == clientChannelInfo) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(String.format("The Consumer <%s> <%s> not online", consumerGroup, clientId));
return response;
}
if (clientChannelInfo.getVersion() < MQVersion.Version.V3_1_8_SNAPSHOT.ordinal()) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(String.format("The Consumer <%s> Version <%s> too low to finish, please upgrade it to V3_1_8_SNAPSHOT",
clientId,
MQVersion.getVersionDesc(clientChannelInfo.getVersion())));
return response;
}
try {
RemotingCommand newRequest = RemotingCommand.createRequestCommand(requestCode, null);
newRequest.setExtFields(request.getExtFields());
newRequest.setBody(request.getBody());
return this.brokerController.getBroker2Client().callClient(clientChannelInfo.getChannel(), newRequest);
} catch (RemotingTimeoutException e) {
response.setCode(ResponseCode.CONSUME_MSG_TIMEOUT);
response
.setRemark(String.format("consumer <%s> <%s> Timeout: %s", consumerGroup, clientId, UtilAll.exceptionSimpleDesc(e)));
return response;
} catch (Exception e) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(
String.format("invoke consumer <%s> <%s> Exception: %s", consumerGroup, clientId, UtilAll.exceptionSimpleDesc(e)));
return response;
}
}
private RemotingCommand queryConsumeQueue(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
QueryConsumeQueueRequestHeader requestHeader =
(QueryConsumeQueueRequestHeader) request.decodeCommandCustomHeader(QueryConsumeQueueRequestHeader.class);
RemotingCommand response = RemotingCommand.createResponseCommand(null);
ConsumeQueueInterface consumeQueue = this.brokerController.getMessageStore().getConsumeQueue(requestHeader.getTopic(),
requestHeader.getQueueId());
if (consumeQueue == null) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(String.format("%d@%s is not exist!", requestHeader.getQueueId(), requestHeader.getTopic()));
return response;
}
response.setCode(ResponseCode.SUCCESS);
QueryConsumeQueueResponseBody body = new QueryConsumeQueueResponseBody();
body.setMaxQueueIndex(consumeQueue.getMaxOffsetInQueue());
body.setMinQueueIndex(consumeQueue.getMinOffsetInQueue());
MessageFilter messageFilter = null;
if (requestHeader.getConsumerGroup() != null) {
SubscriptionData subscriptionData = this.brokerController.getConsumerManager().findSubscriptionData(
requestHeader.getConsumerGroup(), requestHeader.getTopic()
);
body.setSubscriptionData(subscriptionData);
if (subscriptionData == null) {
body.setFilterData(String.format("%s@%s is not online!", requestHeader.getConsumerGroup(), requestHeader.getTopic()));
} else {
ConsumerFilterData filterData = this.brokerController.getConsumerFilterManager()
.get(requestHeader.getTopic(), requestHeader.getConsumerGroup());
body.setFilterData(JSON.toJSONString(filterData, true));
messageFilter = new ExpressionMessageFilter(subscriptionData, filterData,
this.brokerController.getConsumerFilterManager());
}
}
ReferredIterator<CqUnit> result = consumeQueue.iterateFrom(requestHeader.getIndex());
if (result == null) {
response.setRemark(String.format("Index %d of %d@%s is not exist!", requestHeader.getIndex(), requestHeader.getQueueId(), requestHeader.getTopic()));
return response;
}
try {
List<ConsumeQueueData> queues = new ArrayList<>();
while (result.hasNext()) {
CqUnit cqUnit = result.next();
if (cqUnit.getQueueOffset() - requestHeader.getIndex() >= requestHeader.getCount()) {
break;
}
ConsumeQueueData one = new ConsumeQueueData();
one.setPhysicOffset(cqUnit.getPos());
one.setPhysicSize(cqUnit.getSize());
one.setTagsCode(cqUnit.getTagsCode());
if (cqUnit.getCqExtUnit() == null && cqUnit.isTagsCodeValid()) {
queues.add(one);
continue;
}
if (cqUnit.getCqExtUnit() != null) {
ConsumeQueueExt.CqExtUnit cqExtUnit = cqUnit.getCqExtUnit();
one.setExtendDataJson(JSON.toJSONString(cqExtUnit));
if (cqExtUnit.getFilterBitMap() != null) {
one.setBitMap(BitsArray.create(cqExtUnit.getFilterBitMap()).toString());
}
if (messageFilter != null) {
one.setEval(messageFilter.isMatchedByConsumeQueue(cqExtUnit.getTagsCode(), cqExtUnit));
}
} else {
one.setMsg("Cq extend not exist!addr: " + one.getTagsCode());
}
queues.add(one);
}
body.setQueueData(queues);
} finally {
result.release();
}
response.setBody(body.encode());
return response;
}
private RemotingCommand resumeCheckHalfMessage(ChannelHandlerContext ctx,
RemotingCommand request)
throws RemotingCommandException {
final ResumeCheckHalfMessageRequestHeader requestHeader = (ResumeCheckHalfMessageRequestHeader) request
.decodeCommandCustomHeader(ResumeCheckHalfMessageRequestHeader.class);
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
SelectMappedBufferResult selectMappedBufferResult = null;
try {
MessageId messageId = MessageDecoder.decodeMessageId(requestHeader.getMsgId());
selectMappedBufferResult = this.brokerController.getMessageStore()
.selectOneMessageByOffset(messageId.getOffset());
MessageExt msg = MessageDecoder.decode(selectMappedBufferResult.getByteBuffer());
msg.putUserProperty(MessageConst.PROPERTY_TRANSACTION_CHECK_TIMES, String.valueOf(0));
PutMessageResult putMessageResult = this.brokerController.getMessageStore()
.putMessage(toMessageExtBrokerInner(msg));
if (putMessageResult != null
&& putMessageResult.getPutMessageStatus() == PutMessageStatus.PUT_OK) {
LOGGER.info(
"Put message back to RMQ_SYS_TRANS_HALF_TOPIC. real topic={}",
msg.getUserProperty(MessageConst.PROPERTY_REAL_TOPIC));
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
} else {
LOGGER.error("Put message back to RMQ_SYS_TRANS_HALF_TOPIC failed.");
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("Put message back to RMQ_SYS_TRANS_HALF_TOPIC failed.");
}
} catch (Exception e) {
LOGGER.error("Exception was thrown when putting message back to RMQ_SYS_TRANS_HALF_TOPIC.");
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("Exception was thrown when putting message back to RMQ_SYS_TRANS_HALF_TOPIC.");
} finally {
if (selectMappedBufferResult != null) {
selectMappedBufferResult.release();
}
}
return response;
}
private MessageExtBrokerInner toMessageExtBrokerInner(MessageExt msgExt) {
MessageExtBrokerInner inner = new MessageExtBrokerInner();
inner.setTopic(TransactionalMessageUtil.buildHalfTopic());
inner.setBody(msgExt.getBody());
inner.setFlag(msgExt.getFlag());
MessageAccessor.setProperties(inner, msgExt.getProperties());
inner.setPropertiesString(MessageDecoder.messageProperties2String(msgExt.getProperties()));
inner.setTagsCode(MessageExtBrokerInner.tagsString2tagsCode(msgExt.getTags()));
inner.setQueueId(0);
inner.setSysFlag(msgExt.getSysFlag());
inner.setBornHost(msgExt.getBornHost());
inner.setBornTimestamp(msgExt.getBornTimestamp());
inner.setStoreHost(msgExt.getStoreHost());
inner.setReconsumeTimes(msgExt.getReconsumeTimes());
inner.setMsgId(msgExt.getMsgId());
inner.setWaitStoreMsgOK(false);
return inner;
}
private RemotingCommand getTopicConfig(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
GetTopicConfigRequestHeader requestHeader = (GetTopicConfigRequestHeader) request.decodeCommandCustomHeader(GetTopicConfigRequestHeader.class);
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
TopicConfig topicConfig = this.brokerController.getTopicConfigManager().selectTopicConfig(requestHeader.getTopic());
if (topicConfig == null) {
LOGGER.error("No topic in this broker, client: {} topic: {}", ctx.channel().remoteAddress(), requestHeader.getTopic());
//be care of the response code, should set "not-exist" explicitly
response.setCode(ResponseCode.TOPIC_NOT_EXIST);
response.setRemark("No topic in this broker. topic: " + requestHeader.getTopic());
return response;
}
TopicQueueMappingDetail topicQueueMappingDetail = null;
if (Boolean.TRUE.equals(requestHeader.getLo())) {
topicQueueMappingDetail = this.brokerController.getTopicQueueMappingManager().getTopicQueueMapping(requestHeader.getTopic());
}
String content = JSONObject.toJSONString(new TopicConfigAndQueueMapping(topicConfig, topicQueueMappingDetail));
try {
response.setBody(content.getBytes(MixAll.DEFAULT_CHARSET));
} catch (UnsupportedEncodingException e) {
LOGGER.error("UnsupportedEncodingException getTopicConfig: topic=" + topicConfig.getTopicName(), e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("UnsupportedEncodingException " + e.getMessage());
return response;
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand notifyMinBrokerIdChange(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
NotifyMinBrokerIdChangeRequestHeader requestHeader = (NotifyMinBrokerIdChangeRequestHeader) request.decodeCommandCustomHeader(NotifyMinBrokerIdChangeRequestHeader.class);
RemotingCommand response = RemotingCommand.createResponseCommand(null);
LOGGER.warn("min broker id changed, prev {}, new {}", this.brokerController.getMinBrokerIdInGroup(), requestHeader.getMinBrokerId());
this.brokerController.updateMinBroker(requestHeader.getMinBrokerId(), requestHeader.getMinBrokerAddr(),
requestHeader.getOfflineBrokerAddr(),
requestHeader.getHaBrokerAddr());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand updateBrokerHaInfo(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
RemotingCommand response = RemotingCommand.createResponseCommand(ExchangeHAInfoResponseHeader.class);
ExchangeHAInfoRequestHeader requestHeader = (ExchangeHAInfoRequestHeader) request.decodeCommandCustomHeader(ExchangeHAInfoRequestHeader.class);
if (requestHeader.getMasterHaAddress() != null) {
this.brokerController.getMessageStore().updateHaMasterAddress(requestHeader.getMasterHaAddress());
this.brokerController.getMessageStore().updateMasterAddress(requestHeader.getMasterAddress());
if (this.brokerController.getMessageStore().getMasterFlushedOffset() == 0
&& this.brokerController.getMessageStoreConfig().isSyncMasterFlushOffsetWhenStartup()) {
LOGGER.info("Set master flush offset in slave to {}", requestHeader.getMasterFlushOffset());
this.brokerController.getMessageStore().setMasterFlushedOffset(requestHeader.getMasterFlushOffset());
}
} else if (this.brokerController.getBrokerConfig().getBrokerId() == MixAll.MASTER_ID) {
final ExchangeHAInfoResponseHeader responseHeader = (ExchangeHAInfoResponseHeader) response.readCustomHeader();
responseHeader.setMasterHaAddress(this.brokerController.getHAServerAddr());
responseHeader.setMasterFlushOffset(this.brokerController.getMessageStore().getBrokerInitMaxOffset());
responseHeader.setMasterAddress(this.brokerController.getBrokerAddr());
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand getBrokerHaStatus(ChannelHandlerContext ctx, RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
HARuntimeInfo runtimeInfo = this.brokerController.getMessageStore().getHARuntimeInfo();
if (runtimeInfo != null) {
byte[] body = runtimeInfo.encode();
response.setBody(body);
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
} else {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("Can not get HARuntimeInfo, may be duplicationEnable is true");
}
return response;
}
private RemotingCommand getBrokerEpochCache(ChannelHandlerContext ctx, RemotingCommand request) {
final ReplicasManager replicasManager = this.brokerController.getReplicasManager();
assert replicasManager != null;
final BrokerConfig brokerConfig = this.brokerController.getBrokerConfig();
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
if (!brokerConfig.isEnableControllerMode()) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("this request only for controllerMode ");
return response;
}
final EpochEntryCache entryCache = new EpochEntryCache(brokerConfig.getBrokerClusterName(),
brokerConfig.getBrokerName(), brokerConfig.getBrokerId(), replicasManager.getEpochEntries(), this.brokerController.getMessageStore().getMaxPhyOffset());
response.setBody(entryCache.encode());
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand resetMasterFlushOffset(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
if (this.brokerController.getBrokerConfig().getBrokerId() != MixAll.MASTER_ID) {
ResetMasterFlushOffsetHeader requestHeader = (ResetMasterFlushOffsetHeader) request.decodeCommandCustomHeader(ResetMasterFlushOffsetHeader.class);
if (requestHeader.getMasterFlushOffset() != null) {
this.brokerController.getMessageStore().setMasterFlushedOffset(requestHeader.getMasterFlushOffset());
}
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand notifyBrokerRoleChanged(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
NotifyBrokerRoleChangedRequestHeader requestHeader = (NotifyBrokerRoleChangedRequestHeader) request.decodeCommandCustomHeader(NotifyBrokerRoleChangedRequestHeader.class);
SyncStateSet syncStateSetInfo = RemotingSerializable.decode(request.getBody(), SyncStateSet.class);
RemotingCommand response = RemotingCommand.createResponseCommand(null);
LOGGER.info("Receive notifyBrokerRoleChanged request, try to change brokerRole, request:{}", requestHeader);
final ReplicasManager replicasManager = this.brokerController.getReplicasManager();
if (replicasManager != null) {
try {
replicasManager.changeBrokerRole(requestHeader.getMasterBrokerId(), requestHeader.getMasterAddress(), requestHeader.getMasterEpoch(), requestHeader.getSyncStateSetEpoch(), syncStateSetInfo.getSyncStateSet());
} catch (Exception e) {
throw new RemotingCommandException(e.getMessage());
}
}
response.setCode(ResponseCode.SUCCESS);
response.setRemark(null);
return response;
}
private RemotingCommand createUser(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
RemotingCommand response = RemotingCommand.createResponseCommand(null);
CreateUserRequestHeader requestHeader = request.decodeCommandCustomHeader(CreateUserRequestHeader.class);
if (StringUtils.isEmpty(requestHeader.getUsername())) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark("The username is blank");
return response;
}
UserInfo userInfo = RemotingSerializable.decode(request.getBody(), UserInfo.class);
userInfo.setUsername(requestHeader.getUsername());
User user = UserConverter.convertUser(userInfo);
if (user.getUserType() == UserType.SUPER && isNotSuperUserLogin(request)) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("The super user can only be create by super user");
return response;
}
this.brokerController.getAuthenticationMetadataManager().createUser(user)
.thenAccept(nil -> response.setCode(ResponseCode.SUCCESS))
.exceptionally(ex -> {
LOGGER.error("create user {} error", user.getUsername(), ex);
return handleAuthException(response, ex);
})
.join();
return response;
}
private RemotingCommand updateUser(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
RemotingCommand response = RemotingCommand.createResponseCommand(null);
UpdateUserRequestHeader requestHeader = request.decodeCommandCustomHeader(UpdateUserRequestHeader.class);
if (StringUtils.isEmpty(requestHeader.getUsername())) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark("The username is blank");
return response;
}
UserInfo userInfo = RemotingSerializable.decode(request.getBody(), UserInfo.class);
userInfo.setUsername(requestHeader.getUsername());
User user = UserConverter.convertUser(userInfo);
if (user.getUserType() == UserType.SUPER && isNotSuperUserLogin(request)) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("The super user can only be update by super user");
return response;
}
this.brokerController.getAuthenticationMetadataManager().getUser(requestHeader.getUsername())
.thenCompose(old -> {
if (old == null) {
throw new AuthenticationException("The user is not exist");
}
if (old.getUserType() == UserType.SUPER && isNotSuperUserLogin(request)) {
throw new AuthenticationException("The super user can only be update by super user");
}
return this.brokerController.getAuthenticationMetadataManager().updateUser(user);
}).thenAccept(nil -> response.setCode(ResponseCode.SUCCESS))
.exceptionally(ex -> {
LOGGER.error("update user {} error", requestHeader.getUsername(), ex);
return handleAuthException(response, ex);
})
.join();
return response;
}
private RemotingCommand deleteUser(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
DeleteUserRequestHeader requestHeader = request.decodeCommandCustomHeader(DeleteUserRequestHeader.class);
this.brokerController.getAuthenticationMetadataManager().getUser(requestHeader.getUsername())
.thenCompose(user -> {
if (user == null) {
return CompletableFuture.completedFuture(null);
}
if (user.getUserType() == UserType.SUPER && isNotSuperUserLogin(request)) {
throw new AuthenticationException("The super user can only be update by super user");
}
return this.brokerController.getAuthenticationMetadataManager().deleteUser(requestHeader.getUsername());
}).thenAccept(nil -> response.setCode(ResponseCode.SUCCESS))
.exceptionally(ex -> {
LOGGER.error("delete user {} error", requestHeader.getUsername(), ex);
return handleAuthException(response, ex);
})
.join();
return response;
}
private RemotingCommand getUser(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
GetUserRequestHeader requestHeader = request.decodeCommandCustomHeader(GetUserRequestHeader.class);
if (StringUtils.isBlank(requestHeader.getUsername())) {
response.setCode(ResponseCode.INVALID_PARAMETER);
response.setRemark("The username is blank");
return response;
}
this.brokerController.getAuthenticationMetadataManager().getUser(requestHeader.getUsername())
.thenAccept(user -> {
response.setCode(ResponseCode.SUCCESS);
if (user != null) {
UserInfo userInfo = UserConverter.convertUser(user);
response.setBody(JSON.toJSONString(userInfo).getBytes(StandardCharsets.UTF_8));
}
})
.exceptionally(ex -> {
LOGGER.error("get user {} error", requestHeader.getUsername(), ex);
return handleAuthException(response, ex);
})
.join();
return response;
}
private RemotingCommand listUser(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
ListUsersRequestHeader requestHeader = request.decodeCommandCustomHeader(ListUsersRequestHeader.class);
this.brokerController.getAuthenticationMetadataManager().listUser(requestHeader.getFilter())
.thenAccept(users -> {
response.setCode(ResponseCode.SUCCESS);
if (CollectionUtils.isNotEmpty(users)) {
List<UserInfo> userInfos = UserConverter.convertUsers(users);
response.setBody(JSON.toJSONString(userInfos).getBytes(StandardCharsets.UTF_8));
}
})
.exceptionally(ex -> {
LOGGER.error("list user by {} error", requestHeader.getFilter(), ex);
return handleAuthException(response, ex);
})
.join();
return response;
}
private RemotingCommand createAcl(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
RemotingCommand response = RemotingCommand.createResponseCommand(null);
CreateAclRequestHeader requestHeader = request.decodeCommandCustomHeader(CreateAclRequestHeader.class);
Subject subject = Subject.of(requestHeader.getSubject());
AclInfo aclInfo = RemotingSerializable.decode(request.getBody(), AclInfo.class);
if (aclInfo == null || CollectionUtils.isEmpty(aclInfo.getPolicies())) {
throw new AuthorizationException("The body of acl is null");
}
Acl acl = AclConverter.convertAcl(aclInfo);
if (acl != null && acl.getSubject() == null) {
acl.setSubject(subject);
}
this.brokerController.getAuthorizationMetadataManager().createAcl(acl)
.thenAccept(nil -> response.setCode(ResponseCode.SUCCESS))
.exceptionally(ex -> {
LOGGER.error("create acl for {} error", requestHeader.getSubject(), ex);
return handleAuthException(response, ex);
})
.join();
return response;
}
private RemotingCommand updateAcl(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
RemotingCommand response = RemotingCommand.createResponseCommand(null);
UpdateAclRequestHeader requestHeader = request.decodeCommandCustomHeader(UpdateAclRequestHeader.class);
Subject subject = Subject.of(requestHeader.getSubject());
AclInfo aclInfo = RemotingSerializable.decode(request.getBody(), AclInfo.class);
if (aclInfo == null || CollectionUtils.isEmpty(aclInfo.getPolicies())) {
throw new AuthorizationException("The body of acl is null");
}
Acl acl = AclConverter.convertAcl(aclInfo);
if (acl != null && acl.getSubject() == null) {
acl.setSubject(subject);
}
this.brokerController.getAuthorizationMetadataManager().updateAcl(acl)
.thenAccept(nil -> response.setCode(ResponseCode.SUCCESS))
.exceptionally(ex -> {
LOGGER.error("update acl for {} error", requestHeader.getSubject(), ex);
return handleAuthException(response, ex);
})
.join();
return response;
}
private RemotingCommand deleteAcl(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
DeleteAclRequestHeader requestHeader = request.decodeCommandCustomHeader(DeleteAclRequestHeader.class);
Subject subject = Subject.of(requestHeader.getSubject());
PolicyType policyType = PolicyType.getByName(requestHeader.getPolicyType());
Resource resource = Resource.of(requestHeader.getResource());
this.brokerController.getAuthorizationMetadataManager().deleteAcl(subject, policyType, resource)
.thenAccept(nil -> {
response.setCode(ResponseCode.SUCCESS);
})
.exceptionally(ex -> {
LOGGER.error("delete acl for {} error", requestHeader.getSubject(), ex);
return handleAuthException(response, ex);
})
.join();
return response;
}
private RemotingCommand getAcl(ChannelHandlerContext ctx, RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
GetAclRequestHeader requestHeader = request.decodeCommandCustomHeader(GetAclRequestHeader.class);
Subject subject = Subject.of(requestHeader.getSubject());
this.brokerController.getAuthorizationMetadataManager().getAcl(subject)
.thenAccept(acl -> {
response.setCode(ResponseCode.SUCCESS);
if (acl != null) {
AclInfo aclInfo = AclConverter.convertAcl(acl);
String body = JSON.toJSONString(aclInfo);
response.setBody(body.getBytes(StandardCharsets.UTF_8));
}
})
.exceptionally(ex -> {
LOGGER.error("get acl for {} error", requestHeader.getSubject(), ex);
return handleAuthException(response, ex);
})
.join();
return response;
}
private RemotingCommand listAcl(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
ListAclsRequestHeader requestHeader = request.decodeCommandCustomHeader(ListAclsRequestHeader.class);
this.brokerController.getAuthorizationMetadataManager()
.listAcl(requestHeader.getSubjectFilter(), requestHeader.getResourceFilter())
.thenAccept(acls -> {
response.setCode(ResponseCode.SUCCESS);
if (CollectionUtils.isNotEmpty(acls)) {
List<AclInfo> aclInfos = AclConverter.convertAcls(acls);
String body = JSON.toJSONString(aclInfos);
response.setBody(body.getBytes(StandardCharsets.UTF_8));
}
})
.exceptionally(ex -> {
LOGGER.error("list acl error, subjectFilter:{}, resourceFilter:{}", requestHeader.getSubjectFilter(), requestHeader.getResourceFilter(), ex);
return handleAuthException(response, ex);
})
.join();
return response;
}
private boolean isNotSuperUserLogin(RemotingCommand request) {
String accessKey = request.getExtFields().get("AccessKey");
// if accessKey is null, it may be authentication is not enabled.
if (StringUtils.isEmpty(accessKey)) {
return false;
}
return !this.brokerController.getAuthenticationMetadataManager()
.isSuperUser(accessKey).join();
}
private Void handleAuthException(RemotingCommand response, Throwable ex) {
Throwable throwable = ExceptionUtils.getRealException(ex);
if (throwable instanceof AuthenticationException || throwable instanceof AuthorizationException) {
response.setCode(ResponseCode.NO_PERMISSION);
response.setRemark(throwable.getMessage());
} else {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("An system error occurred, please try again later.");
LOGGER.error("An system error occurred when processing auth admin request.", ex);
}
return null;
}
private boolean validateSlave(RemotingCommand response) {
if (this.brokerController.getMessageStoreConfig().getBrokerRole().equals(BrokerRole.SLAVE)) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("Can't modify topic or subscription group from slave broker, " +
"please execute it from master broker.");
return true;
}
return false;
}
private boolean validateBlackListConfigExist(Properties properties) {
for (String blackConfig : configBlackList) {
if (properties.containsKey(blackConfig)) {
return true;
}
}
return false;
}
private CheckRocksdbCqWriteResult doCheckRocksdbCqWriteProgress(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
CheckRocksdbCqWriteProgressRequestHeader requestHeader = request.decodeCommandCustomHeader(CheckRocksdbCqWriteProgressRequestHeader.class);
MessageStore messageStore = brokerController.getMessageStore();
DefaultMessageStore defaultMessageStore;
if (messageStore instanceof AbstractPluginMessageStore) {
defaultMessageStore = (DefaultMessageStore) ((AbstractPluginMessageStore) messageStore).getNext();
} else {
defaultMessageStore = (DefaultMessageStore) messageStore;
}
ConsumeQueueStoreInterface consumeQueueStore = defaultMessageStore.getQueueStore();
if (!(consumeQueueStore instanceof CombineConsumeQueueStore)) {
CheckRocksdbCqWriteResult result = new CheckRocksdbCqWriteResult();
result.setCheckResult("It is not CombineConsumeQueueStore, no need check");
result.setCheckStatus(CheckRocksdbCqWriteResult.CheckStatus.CHECK_OK.getValue());
return result;
}
return ((CombineConsumeQueueStore) consumeQueueStore).
doCheckCqWriteProgress(requestHeader.getTopic(), requestHeader.getCheckStoreTime(), StoreType.DEFAULT, StoreType.DEFAULT_ROCKSDB);
}
private RemotingCommand transferPopToFsStore(ChannelHandlerContext ctx, RemotingCommand request) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
try {
if (brokerController.getPopConsumerService() != null) {
brokerController.getPopConsumerService().transferToFsStore();
}
response.setCode(ResponseCode.SUCCESS);
} catch (Exception e) {
LOGGER.error("PopConsumerStore transfer from kvStore to fsStore finish [{}]", request, e);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(e.getMessage());
}
return response;
}
}
|
AdminBrokerProcessor
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/filter/GatewayMetricsFilter.java
|
{
"start": 1324,
"end": 3282
}
|
class ____ implements GlobalFilter, Ordered {
private static final Log log = LogFactory.getLog(GatewayMetricsFilter.class);
private final MeterRegistry meterRegistry;
private GatewayTagsProvider compositeTagsProvider;
private final String metricsPrefix;
public GatewayMetricsFilter(MeterRegistry meterRegistry, List<GatewayTagsProvider> tagsProviders,
String metricsPrefix) {
this.meterRegistry = meterRegistry;
this.compositeTagsProvider = tagsProviders.stream().reduce(exchange -> Tags.empty(), GatewayTagsProvider::and);
if (metricsPrefix.endsWith(".")) {
this.metricsPrefix = metricsPrefix.substring(0, metricsPrefix.length() - 1);
}
else {
this.metricsPrefix = metricsPrefix;
}
}
public String getMetricsPrefix() {
return metricsPrefix;
}
@Override
public int getOrder() {
// start the timer as soon as possible and report the metric event before we write
// response to client
return NettyWriteResponseFilter.WRITE_RESPONSE_FILTER_ORDER + 1;
}
@Override
public Mono<Void> filter(ServerWebExchange exchange, GatewayFilterChain chain) {
Sample sample = Timer.start(meterRegistry);
return chain.filter(exchange)
.doOnSuccess(aVoid -> endTimerRespectingCommit(exchange, sample))
.doOnError(throwable -> endTimerRespectingCommit(exchange, sample));
}
private void endTimerRespectingCommit(ServerWebExchange exchange, Sample sample) {
ServerHttpResponse response = exchange.getResponse();
if (response.isCommitted()) {
endTimerInner(exchange, sample);
}
else {
response.beforeCommit(() -> {
endTimerInner(exchange, sample);
return Mono.empty();
});
}
}
private void endTimerInner(ServerWebExchange exchange, Sample sample) {
Tags tags = compositeTagsProvider.apply(exchange);
if (log.isTraceEnabled()) {
log.trace(metricsPrefix + ".requests tags: " + tags);
}
sample.stop(meterRegistry.timer(metricsPrefix + ".requests", tags));
}
}
|
GatewayMetricsFilter
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/processor/internals/TaskExecutorTest.java
|
{
"start": 1265,
"end": 2702
}
|
class ____ {
@Test
public void testPunctuateWithPause() {
final Tasks tasks = mock(Tasks.class);
final TaskManager taskManager = mock(TaskManager.class);
final TaskExecutionMetadata metadata = mock(TaskExecutionMetadata.class);
final TaskExecutor taskExecutor = new TaskExecutor(tasks, taskManager, metadata, new LogContext());
taskExecutor.punctuate();
verify(tasks).activeTasks();
}
@Test
public void testCommitWithOpenTransactionButNoOffsetsEOSV2() {
final Tasks tasks = mock(Tasks.class);
final TaskManager taskManager = mock(TaskManager.class);
final ConsumerGroupMetadata groupMetadata = mock(ConsumerGroupMetadata.class);
when(taskManager.consumerGroupMetadata()).thenReturn(groupMetadata);
final TaskExecutionMetadata metadata = mock(TaskExecutionMetadata.class);
final StreamsProducer producer = mock(StreamsProducer.class);
when(metadata.processingMode()).thenReturn(EXACTLY_ONCE_V2);
when(taskManager.streamsProducer()).thenReturn(producer);
when(producer.transactionInFlight()).thenReturn(true);
final TaskExecutor taskExecutor = new TaskExecutor(tasks, taskManager, metadata, new LogContext());
taskExecutor.commitOffsetsOrTransaction(Collections.emptyMap());
verify(producer).commitTransaction(Collections.emptyMap(), groupMetadata);
}
}
|
TaskExecutorTest
|
java
|
apache__camel
|
components/camel-kubernetes/src/main/java/org/apache/camel/component/kubernetes/cluster/LeaseResourceType.java
|
{
"start": 866,
"end": 1031
}
|
enum ____ {
/**
* A Kubernetes ConfigMap.
*/
ConfigMap,
/**
* A Kubernetes Lease (coordination.k8s.io).
*/
Lease
}
|
LeaseResourceType
|
java
|
quarkusio__quarkus
|
integration-tests/rest-client-reactive/src/main/java/io/quarkus/it/rest/client/main/ClientWithClientLogger.java
|
{
"start": 249,
"end": 314
}
|
interface ____ {
@GET
String call();
}
|
ClientWithClientLogger
|
java
|
quarkusio__quarkus
|
integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KubernetesWithConflictingEnvTest.java
|
{
"start": 407,
"end": 1449
}
|
class ____ {
private static final String APPLICATION_NAME = "conflicting";
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.withApplicationRoot((jar) -> jar.addClasses(GreetingResource.class))
.setApplicationName(APPLICATION_NAME)
.setApplicationVersion("0.1-SNAPSHOT")
.assertBuildException(e -> assertThat(e)
.isInstanceOf(RuntimeException.class)
.hasMessageContaining(
"- 'envvar': first defined as 'envvar' env var with value 'value' redefined as 'envvar' env var with value from field 'field'"))
.withConfigurationResource("kubernetes-with-" + APPLICATION_NAME + "-env.properties");
@ProdBuildResults
private ProdModeTestResults prodModeTestResults;
@Test
public void buildShouldFail() throws IOException {
fail("Build should have failed and therefore this method should not have been called");
}
}
|
KubernetesWithConflictingEnvTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jackson/src/main/java/org/springframework/boot/jackson/JacksonMixinModule.java
|
{
"start": 1069,
"end": 1473
}
|
class ____ extends SimpleModule {
/**
* Register the specified {@link JacksonMixinModuleEntries entries}.
* @param entries the entries to register to this instance
* @param classLoader the classloader to use
*/
public void registerEntries(JacksonMixinModuleEntries entries, @Nullable ClassLoader classLoader) {
entries.doWithEntry(classLoader, this::setMixInAnnotation);
}
}
|
JacksonMixinModule
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/common/serialization/ListDeserializerTest.java
|
{
"start": 8248,
"end": 8920
}
|
class ____ using "
+ "\"" + CommonClientConfigs.DEFAULT_LIST_KEY_SERDE_INNER_CLASS + "\" property.", exception.getMessage());
}
@Test
public void testListValueDeserializerNoArgConstructorsShouldThrowKafkaExceptionDueInvalidInnerClass() {
props.put(CommonClientConfigs.DEFAULT_LIST_VALUE_SERDE_TYPE_CLASS, ArrayList.class);
props.put(CommonClientConfigs.DEFAULT_LIST_VALUE_SERDE_INNER_CLASS, new FakeObject());
final KafkaException exception = assertThrows(
KafkaException.class,
() -> listDeserializer.configure(props, false)
);
assertEquals("Could not determine the inner serde
|
instance
|
java
|
quarkusio__quarkus
|
extensions/elytron-security-properties-file/deployment/src/test/java/io/quarkus/security/test/CustomAuthEmbeddedTestCase.java
|
{
"start": 239,
"end": 881
}
|
class ____ extends CustomAuthEmbeddedBase {
static Class[] testClasses = {
TestSecureServlet.class, TestApplication.class, RolesEndpointClassLevel.class,
ParametrizedPathsResource.class, SubjectExposingResource.class
};
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(testClasses)
.addClasses(CustomAuth.class)
.addAsResource("application-custom-auth-embedded.properties",
"application.properties"));
}
|
CustomAuthEmbeddedTestCase
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java
|
{
"start": 65718,
"end": 67190
}
|
class ____ extends ContainerTransition {
@SuppressWarnings("unchecked")
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
container.sendContainerMonitorStartEvent();
container.metrics.runningContainer();
container.wasLaunched = true;
if (container.isReInitializing()) {
NMAuditLogger.logSuccess(container.user,
AuditConstants.FINISH_CONTAINER_REINIT, "ContainerImpl",
container.containerId.getApplicationAttemptId().getApplicationId(),
container.containerId);
}
container.setIsReInitializing(false);
// Check if this launch was due to a re-initialization.
// If autocommit == true, then wipe the re-init context. This ensures
// that any subsequent failures do not trigger a rollback.
if (container.reInitContext != null
&& !container.reInitContext.canRollback()) {
container.reInitContext = null;
}
if (container.recoveredAsKilled) {
LOG.info("Killing " + container.containerId
+ " due to recovered as killed");
container.addDiagnostics("Container recovered as killed.\n");
container.dispatcher.getEventHandler().handle(
new ContainersLauncherEvent(container,
ContainersLauncherEventType.CLEANUP_CONTAINER));
}
}
}
/**
* Transition from SCHEDULED state to PAUSED state on recovery
*/
static
|
LaunchTransition
|
java
|
elastic__elasticsearch
|
plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceSeedHostsProvider.java
|
{
"start": 1552,
"end": 1973
}
|
class ____ implements SeedHostsProvider {
private static final Logger logger = LogManager.getLogger(GceSeedHostsProvider.class);
/**
* discovery.gce.tags: The gce discovery can filter machines to include in the cluster based on tags.
*/
public static final Setting<List<String>> TAGS_SETTING = Setting.stringListSetting("discovery.gce.tags", Property.NodeScope);
static final
|
GceSeedHostsProvider
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/jdbc/SqlScriptsTestExecutionListener.java
|
{
"start": 5647,
"end": 7987
}
|
class ____ extends AbstractTestExecutionListener implements AotTestExecutionListener {
/**
* The {@link #getOrder() order} value for this listener: {@value}.
* @since 6.2.3
*/
public static final int ORDER = 5000;
private static final String SLASH = "/";
private static final Log logger = LogFactory.getLog(SqlScriptsTestExecutionListener.class);
private static final MethodFilter sqlMethodFilter = ReflectionUtils.USER_DECLARED_METHODS
.and(method -> AnnotatedElementUtils.hasAnnotation(method, Sql.class));
/**
* Returns {@value #ORDER}, which ensures that the {@code SqlScriptsTestExecutionListener}
* is ordered after the
* {@link org.springframework.test.context.transaction.TransactionalTestExecutionListener
* TransactionalTestExecutionListener} and before the
* {@link org.springframework.test.context.event.EventPublishingTestExecutionListener
* EventPublishingTestExecutionListener}.
*/
@Override
public final int getOrder() {
return ORDER;
}
/**
* Execute SQL scripts configured via {@link Sql @Sql} for the supplied
* {@link TestContext} once per test class <em>before</em> any test method
* is run.
* @since 6.1
*/
@Override
public void beforeTestClass(TestContext testContext) throws Exception {
executeClassLevelSqlScripts(testContext, ExecutionPhase.BEFORE_TEST_CLASS);
}
/**
* Execute SQL scripts configured via {@link Sql @Sql} for the supplied
* {@link TestContext} once per test class <em>after</em> all test methods
* have been run.
* @since 6.1
*/
@Override
public void afterTestClass(TestContext testContext) throws Exception {
executeClassLevelSqlScripts(testContext, ExecutionPhase.AFTER_TEST_CLASS);
}
/**
* Execute SQL scripts configured via {@link Sql @Sql} for the supplied
* {@link TestContext} <em>before</em> the current test method.
*/
@Override
public void beforeTestMethod(TestContext testContext) {
executeSqlScripts(testContext, ExecutionPhase.BEFORE_TEST_METHOD);
}
/**
* Execute SQL scripts configured via {@link Sql @Sql} for the supplied
* {@link TestContext} <em>after</em> the current test method.
*/
@Override
public void afterTestMethod(TestContext testContext) {
executeSqlScripts(testContext, ExecutionPhase.AFTER_TEST_METHOD);
}
/**
* Process the supplied test
|
SqlScriptsTestExecutionListener
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/issues/RetryWhileStackOverflowIssueTest.java
|
{
"start": 1032,
"end": 2077
}
|
class ____ extends ContextTestSupport {
private static final boolean PRINT_STACK_TRACE = false;
@Test
public void testRetry() throws Exception {
getMockEndpoint("mock:error").expectedMessageCount(1);
getMockEndpoint("mock:error").message(0).body().isInstanceOf(MyCoolDude.class);
MyCoolDude dude = new MyCoolDude();
template.sendBody("direct:start", dude);
assertMockEndpointsSatisfied();
assertEquals(1000 + 1, dude.getCounter());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
onException(IllegalArgumentException.class).retryWhile(simple("${body.areWeCool} == 'no'")).redeliveryDelay(0)
.handled(true).to("mock:error");
from("direct:start")
.throwException(new IllegalArgumentException("Forced"));
}
};
}
public static
|
RetryWhileStackOverflowIssueTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/CheckpointMetricsBuilder.java
|
{
"start": 1204,
"end": 1317
}
|
class ____ not thread safe, but parts of it can actually be used from different threads.
*/
@NotThreadSafe
public
|
is
|
java
|
apache__camel
|
components/camel-bindy/src/main/java/org/apache/camel/dataformat/bindy/BindyFactory.java
|
{
"start": 1077,
"end": 1718
}
|
interface ____ {
/**
* Prior to bind or unbind the data to and from string or model classes, the factory must create a collection of
* objects representing the model
*
* @throws Exception can be thrown
*/
void initModel() throws Exception;
/**
* The bind allow to read the content of a record (expressed as a List<String>) and map it to the model classes.
*
* @param data List<String> represents the csv, ... data to transform
* @param model Map<String, object> is a collection of objects used to bind data. String is the key name of the
*
|
BindyFactory
|
java
|
google__dagger
|
javatests/dagger/functional/assisted/AssistedFactoryAsQualifiedBindingTest.java
|
{
"start": 2120,
"end": 3918
}
|
interface ____ {
@Provides
@AsProvides
static Bar providesBar(@AsComponentDependency Bar bar) {
return bar;
}
@Provides
@AsProvides
static BarFactory providesBarFactory(@AsComponentDependency BarFactory barFactory) {
return barFactory;
}
@Binds
@AsBinds
Bar bindsBar(@AsComponentDependency Bar bar);
@Binds
@AsBinds
BarFactory bindsBarFactory(@AsComponentDependency BarFactory barFactory);
@BindsOptionalOf
@AsOptional
Bar optionalBar();
@BindsOptionalOf
@AsOptional
BarFactory optionalBarFactory();
@Provides
@AsOptional
static Bar providesOptionalBar(@AsComponentDependency Bar bar) {
return bar;
}
@Provides
@AsOptional
static BarFactory providesOptionalBarFactory(@AsComponentDependency BarFactory barFactory) {
return barFactory;
}
@Multibinds
@AsMultibinding
Set<Bar> barSet();
@Multibinds
@AsMultibinding
Set<BarFactory> barFactorySet();
@Provides
@IntoSet
@AsMultibinding
static Bar providesMultibindingBar(@AsComponentDependency Bar bar) {
return bar;
}
@Provides
@IntoSet
@AsMultibinding
static BarFactory providesMultibindingBarFactory(@AsComponentDependency BarFactory barFactory) {
return barFactory;
}
@Multibinds
Set<Bar> unqualifiedBarSet();
@Multibinds
Set<BarFactory> unqualifiedBarFactorySet();
@Provides
@IntoSet
static Bar providesUnqualifiedMultibindingBar(@AsComponentDependency Bar bar) {
return bar;
}
@Provides
@IntoSet
static BarFactory providesUnqualifiedMultibindingBarFactory(
@AsComponentDependency BarFactory barFactory) {
return barFactory;
}
}
static
|
BarFactoryModule
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/constraint/PlacementConstraintParser.java
|
{
"start": 15562,
"end": 17080
}
|
class ____ extends ConstraintParser {
public TargetConstraintParser(String expression) {
super(new BaseStringTokenizer(expression,
String.valueOf(EXPRESSION_VAL_DELIM)));
}
@Override
public AbstractConstraint parse()
throws PlacementConstraintParseException {
PlacementConstraint.AbstractConstraint placementConstraints = null;
String op = nextToken();
if (op.equalsIgnoreCase(IN) || op.equalsIgnoreCase(NOT_IN)) {
String scope = nextToken();
scope = parseScope(scope);
Set<TargetExpression> targetExpressions = new HashSet<>();
while(hasMoreTokens()) {
String tag = nextToken();
TargetExpression t = parseNameSpace(tag);
targetExpressions.add(t);
}
TargetExpression[] targetArr = targetExpressions.toArray(
new TargetExpression[targetExpressions.size()]);
if (op.equalsIgnoreCase(IN)) {
placementConstraints = PlacementConstraints
.targetIn(scope, targetArr);
} else {
placementConstraints = PlacementConstraints
.targetNotIn(scope, targetArr);
}
} else {
throw new PlacementConstraintParseException(
"expecting " + IN + " or " + NOT_IN + ", but get " + op);
}
return placementConstraints;
}
}
/**
* Constraint parser used to parse a given target expression, such as
* "cardinality, NODE, foo, 0, 1".
*/
public static
|
TargetConstraintParser
|
java
|
spring-projects__spring-security
|
config/src/main/java/org/springframework/security/config/annotation/web/configurers/oauth2/client/OidcBackChannelLogoutTokenValidator.java
|
{
"start": 1981,
"end": 4586
}
|
class ____ implements OAuth2TokenValidator<Jwt> {
private static final String LOGOUT_VALIDATION_URL = "https://openid.net/specs/openid-connect-backchannel-1_0.html#Validation";
private static final String BACK_CHANNEL_LOGOUT_EVENT = "http://schemas.openid.net/event/backchannel-logout";
private final String audience;
private final String issuer;
OidcBackChannelLogoutTokenValidator(ClientRegistration clientRegistration) {
this.audience = clientRegistration.getClientId();
String issuer = clientRegistration.getProviderDetails().getIssuerUri();
Assert.hasText(issuer, "Provider issuer cannot be null");
this.issuer = issuer;
}
@Override
public OAuth2TokenValidatorResult validate(Jwt jwt) {
Collection<OAuth2Error> errors = new ArrayList<>();
LogoutTokenClaimAccessor logoutClaims = jwt::getClaims;
Map<String, Object> events = logoutClaims.getEvents();
if (events == null) {
errors.add(invalidLogoutToken("events claim must not be null"));
}
else if (events.get(BACK_CHANNEL_LOGOUT_EVENT) == null) {
errors.add(invalidLogoutToken("events claim map must contain \"" + BACK_CHANNEL_LOGOUT_EVENT + "\" key"));
}
String issuer = logoutClaims.getIssuer().toExternalForm();
if (issuer == null) {
errors.add(invalidLogoutToken("iss claim must not be null"));
}
else if (!this.issuer.equals(issuer)) {
errors.add(invalidLogoutToken(
"iss claim value must match `ClientRegistration#getProviderDetails#getIssuerUri`"));
}
List<String> audience = logoutClaims.getAudience();
if (audience == null) {
errors.add(invalidLogoutToken("aud claim must not be null"));
}
else if (!audience.contains(this.audience)) {
errors.add(invalidLogoutToken("aud claim value must include `ClientRegistration#getClientId`"));
}
Instant issuedAt = logoutClaims.getIssuedAt();
if (issuedAt == null) {
errors.add(invalidLogoutToken("iat claim must not be null"));
}
String jwtId = logoutClaims.getId();
if (jwtId == null) {
errors.add(invalidLogoutToken("jti claim must not be null"));
}
if (logoutClaims.getSubject() == null && logoutClaims.getSessionId() == null) {
errors.add(invalidLogoutToken("sub and sid claims must not both be null"));
}
if (logoutClaims.getClaim("nonce") != null) {
errors.add(invalidLogoutToken("nonce claim must not be present"));
}
return OAuth2TokenValidatorResult.failure(errors);
}
private static OAuth2Error invalidLogoutToken(String description) {
return new OAuth2Error(OAuth2ErrorCodes.INVALID_TOKEN, description, LOGOUT_VALIDATION_URL);
}
}
|
OidcBackChannelLogoutTokenValidator
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/annotation/Bean.java
|
{
"start": 6621,
"end": 7291
}
|
class ____ the factory methods.
*
* <p>In contrast to the semantics for bean methods in {@code @Configuration} classes,
* <em>'inter-bean references'</em> are not supported in <em>lite</em> mode. Instead,
* when one {@code @Bean}-method invokes another {@code @Bean}-method in <em>lite</em>
* mode, the invocation is a standard Java method invocation; Spring does not intercept
* the invocation via a CGLIB proxy. This is analogous to inter-{@code @Transactional}
* method calls where in proxy mode, Spring does not intercept the invocation —
* Spring does so only in AspectJ mode.
*
* <p>For example:
*
* <pre class="code">
* @Component
* public
|
or
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/ContainerLocalizationCleanupEvent.java
|
{
"start": 1188,
"end": 1799
}
|
class ____ extends
ContainerLocalizationEvent {
private final Map<LocalResourceVisibility, Collection<LocalResourceRequest>>
rsrc;
/**
* Event requesting the cleanup of the rsrc.
* @param c
* @param rsrc
*/
public ContainerLocalizationCleanupEvent(Container c,
Map<LocalResourceVisibility, Collection<LocalResourceRequest>> rsrc) {
super(LocalizationEventType.CLEANUP_CONTAINER_RESOURCES, c);
this.rsrc = rsrc;
}
public
Map<LocalResourceVisibility, Collection<LocalResourceRequest>>
getResources() {
return rsrc;
}
}
|
ContainerLocalizationCleanupEvent
|
java
|
apache__dubbo
|
dubbo-demo/dubbo-demo-spring-boot/dubbo-demo-spring-boot-servlet/src/main/java/org/apache/dubbo/springboot/demo/servlet/ProviderApplication.java
|
{
"start": 1088,
"end": 1234
}
|
class ____ {
public static void main(String[] args) {
SpringApplication.run(ProviderApplication.class, args);
}
}
|
ProviderApplication
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/invoker/basic/PrimitiveParametersInvokerTest.java
|
{
"start": 2866,
"end": 3126
}
|
class ____ {
public String hello(boolean b, char c, int i) {
return "foobar_" + b + "_" + c + "_" + i;
}
public static String helloStatic(long l, double d) {
return "quux_" + l + "_" + d;
}
}
}
|
MyService
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineParser.java
|
{
"start": 1141,
"end": 1418
}
|
interface ____ extends Closeable {
/**
* Method used for parsing.
*
* @return a {@link TimelineFilterList} object.
* @throws TimelineParseException if any problem occurs while parsing.
*/
TimelineFilterList parse() throws TimelineParseException;
}
|
TimelineParser
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/OperatorStreamStateHandle.java
|
{
"start": 1229,
"end": 4586
}
|
class ____ implements OperatorStateHandle {
private static final long serialVersionUID = 35876522969227335L;
/** unique state name -> offsets for available partitions in the handle stream */
private final Map<String, StateMetaInfo> stateNameToPartitionOffsets;
private final StreamStateHandle delegateStateHandle;
public OperatorStreamStateHandle(
Map<String, StateMetaInfo> stateNameToPartitionOffsets,
StreamStateHandle delegateStateHandle) {
this.delegateStateHandle = Preconditions.checkNotNull(delegateStateHandle);
this.stateNameToPartitionOffsets = Preconditions.checkNotNull(stateNameToPartitionOffsets);
}
@Override
public Map<String, StateMetaInfo> getStateNameToPartitionOffsets() {
return stateNameToPartitionOffsets;
}
@Override
public void discardState() throws Exception {
delegateStateHandle.discardState();
}
@Override
public long getStateSize() {
return delegateStateHandle.getStateSize();
}
@Override
public void collectSizeStats(StateObjectSizeStatsCollector collector) {
delegateStateHandle.collectSizeStats(collector);
}
@Override
public FSDataInputStream openInputStream() throws IOException {
return delegateStateHandle.openInputStream();
}
@Override
public Optional<byte[]> asBytesIfInMemory() {
return delegateStateHandle.asBytesIfInMemory();
}
@Override
public Optional<org.apache.flink.core.fs.Path> maybeGetPath() {
return delegateStateHandle.maybeGetPath();
}
@Override
public PhysicalStateHandleID getStreamStateHandleID() {
return delegateStateHandle.getStreamStateHandleID();
}
@Override
public StreamStateHandle getDelegateStateHandle() {
return delegateStateHandle;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof OperatorStreamStateHandle)) {
return false;
}
OperatorStreamStateHandle that = (OperatorStreamStateHandle) o;
if (stateNameToPartitionOffsets.size() != that.stateNameToPartitionOffsets.size()) {
return false;
}
for (Map.Entry<String, StateMetaInfo> entry : stateNameToPartitionOffsets.entrySet()) {
if (!entry.getValue().equals(that.stateNameToPartitionOffsets.get(entry.getKey()))) {
return false;
}
}
return delegateStateHandle.equals(that.delegateStateHandle);
}
@Override
public int hashCode() {
int result = delegateStateHandle.hashCode();
for (Map.Entry<String, StateMetaInfo> entry : stateNameToPartitionOffsets.entrySet()) {
int entryHash = entry.getKey().hashCode();
if (entry.getValue() != null) {
entryHash += entry.getValue().hashCode();
}
result = 31 * result + entryHash;
}
return result;
}
@Override
public String toString() {
return "OperatorStateHandle{"
+ "stateNameToPartitionOffsets="
+ stateNameToPartitionOffsets
+ ", delegateStateHandle="
+ delegateStateHandle
+ '}';
}
}
|
OperatorStreamStateHandle
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/AclSetuserArgs.java
|
{
"start": 23482,
"end": 23624
}
|
class ____ extends KeywordArgument {
public NoCommands() {
super(NOCOMMANDS);
}
}
private static
|
NoCommands
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/jaxb/mapping/spi/db/JaxbCommentable.java
|
{
"start": 184,
"end": 236
}
|
interface ____ {
String getComment();
}
|
JaxbCommentable
|
java
|
apache__camel
|
components/camel-coap/src/test/java/org/apache/camel/coap/CoAPTestSupport.java
|
{
"start": 1096,
"end": 1643
}
|
class ____ extends CamelTestSupport {
protected static final int PORT = AvailablePortFinder.getNextAvailable();
@Override
public void doPostSetup() {
Configuration.createStandardWithoutFile();
}
protected CoapClient createClient(String path) {
return createClient(path, PORT);
}
protected CoapClient createClient(String path, int port) {
String url = String.format("coap://localhost:%d/%s", port, FileUtil.stripLeadingSeparator(path));
return new CoapClient(url);
}
}
|
CoAPTestSupport
|
java
|
grpc__grpc-java
|
stub/src/main/java/io/grpc/stub/MetadataUtils.java
|
{
"start": 2546,
"end": 3405
}
|
class
____(ClientCall<ReqT, RespT> call) {
super(call);
}
@Override
public void start(Listener<RespT> responseListener, Metadata headers) {
headers.merge(extraHeaders);
super.start(responseListener, headers);
}
}
}
/**
* Captures the last received metadata on a channel. Useful for testing.
*
* @param headersCapture to record the last received headers
* @param trailersCapture to record the last received trailers
* @return an implementation of the channel with captures installed.
*/
public static ClientInterceptor newCaptureMetadataInterceptor(
AtomicReference<Metadata> headersCapture, AtomicReference<Metadata> trailersCapture) {
return new MetadataCapturingClientInterceptor(headersCapture, trailersCapture);
}
private static final
|
HeaderAttachingClientCall
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/operators/udf/RemoveRangeIndex.java
|
{
"start": 968,
"end": 1152
}
|
class ____<T> implements MapFunction<Tuple2<Integer, T>, T> {
@Override
public T map(Tuple2<Integer, T> value) throws Exception {
return value.f1;
}
}
|
RemoveRangeIndex
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
|
{
"start": 2948,
"end": 23554
}
|
class ____ {
private static final String TIMELINE_AUX_SERVICE_NAME = "timeline_collector";
private static final Logger LOG =
LoggerFactory.getLogger(TestMRTimelineEventHandling.class);
@Test
public void testTimelineServiceStartInMiniCluster() throws Exception {
Configuration conf = new YarnConfiguration();
/*
* Timeline service should not start if the config is set to false
* Regardless to the value of MAPREDUCE_JOB_EMIT_TIMELINE_DATA
*/
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, false);
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
MiniMRYarnCluster cluster = null;
try {
cluster = new MiniMRYarnCluster(
TestMRTimelineEventHandling.class.getSimpleName(), 1);
cluster.init(conf);
cluster.start();
//verify that the timeline service is not started.
assertNull(cluster.getApplicationHistoryServer(),
"Timeline Service should not have been started");
}
finally {
if(cluster != null) {
cluster.stop();
}
}
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, false);
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
cluster = null;
try {
cluster = new MiniMRYarnCluster(
TestJobHistoryEventHandler.class.getSimpleName(), 1);
cluster.init(conf);
cluster.start();
//verify that the timeline service is not started.
assertNull(cluster.getApplicationHistoryServer(),
"Timeline Service should not have been started");
}
finally {
if(cluster != null) {
cluster.stop();
}
}
}
@Test
public void testMRTimelineEventHandling() throws Exception {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
MiniMRYarnCluster cluster = null;
try {
cluster = new MiniMRYarnCluster(
TestMRTimelineEventHandling.class.getSimpleName(), 1);
cluster.init(conf);
cluster.start();
conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
MiniYARNCluster.getHostname() + ":"
+ cluster.getApplicationHistoryServer().getPort());
TimelineStore ts = cluster.getApplicationHistoryServer()
.getTimelineStore();
String localPathRoot = System.getProperty("test.build.data",
"build/test/data");
Path inDir = new Path(localPathRoot, "input");
Path outDir = new Path(localPathRoot, "output");
RunningJob job =
UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null,
null, null, null, null, null, null, null);
assertEquals(1, entities.getEntities().size());
TimelineEntity tEntity = entities.getEntities().get(0);
assertEquals(job.getID().toString(), tEntity.getEntityId());
assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
assertEquals(EventType.AM_STARTED.toString(),
tEntity.getEvents().get(tEntity.getEvents().size() - 1)
.getEventType());
assertEquals(EventType.JOB_FINISHED.toString(),
tEntity.getEvents().get(0).getEventType());
job = UtilsForTests.runJobFail(new JobConf(conf), inDir, outDir);
assertEquals(JobStatus.FAILED, job.getJobStatus().getState().getValue());
entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null,
null, null, null, null);
assertEquals(2, entities.getEntities().size());
tEntity = entities.getEntities().get(0);
assertEquals(job.getID().toString(), tEntity.getEntityId());
assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
assertEquals(EventType.AM_STARTED.toString(),
tEntity.getEvents().get(tEntity.getEvents().size() - 1)
.getEventType());
assertEquals(EventType.JOB_FAILED.toString(),
tEntity.getEvents().get(0).getEventType());
} finally {
if (cluster != null) {
cluster.stop();
}
}
}
@SuppressWarnings("deprecation")
@Test
public void testMRNewTimelineServiceEventHandling() throws Exception {
LOG.info("testMRNewTimelineServiceEventHandling start.");
String testDir =
new File("target", getClass().getSimpleName() +
"-test_dir").getAbsolutePath();
String storageDir =
testDir + File.separator + "timeline_service_data";
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
// enable new timeline service
conf.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f);
conf.setClass(YarnConfiguration.TIMELINE_SERVICE_WRITER_CLASS,
FileSystemTimelineWriterImpl.class, TimelineWriter.class);
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
// set the file system root directory
conf.set(FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT,
storageDir);
// enable aux-service based timeline collectors
conf.set(YarnConfiguration.NM_AUX_SERVICES, TIMELINE_AUX_SERVICE_NAME);
conf.set(YarnConfiguration.NM_AUX_SERVICES + "." + TIMELINE_AUX_SERVICE_NAME
+ ".class", PerNodeTimelineCollectorsAuxService.class.getName());
conf.setBoolean(YarnConfiguration.SYSTEM_METRICS_PUBLISHER_ENABLED, true);
MiniMRYarnCluster cluster = null;
try {
cluster = new MiniMRYarnCluster(
TestMRTimelineEventHandling.class.getSimpleName(), 1, true);
cluster.init(conf);
cluster.start();
LOG.info("A MiniMRYarnCluster get start.");
Path inDir = new Path(testDir, "input");
Path outDir = new Path(testDir, "output");
LOG.info("Run 1st job which should be successful.");
JobConf successConf = new JobConf(conf);
successConf.set("dummy_conf1",
UtilsForTests.createConfigValue(51 * 1024));
successConf.set("dummy_conf2",
UtilsForTests.createConfigValue(51 * 1024));
successConf.set("huge_dummy_conf1",
UtilsForTests.createConfigValue(101 * 1024));
successConf.set("huge_dummy_conf2",
UtilsForTests.createConfigValue(101 * 1024));
RunningJob job =
UtilsForTests.runJobSucceed(successConf, inDir, outDir);
assertEquals(JobStatus.SUCCEEDED,
job.getJobStatus().getState().getValue());
YarnClient yarnClient = YarnClient.createYarnClient();
yarnClient.init(new Configuration(cluster.getConfig()));
yarnClient.start();
EnumSet<YarnApplicationState> appStates =
EnumSet.allOf(YarnApplicationState.class);
ApplicationId firstAppId = null;
List<ApplicationReport> apps = yarnClient.getApplications(appStates);
assertEquals(apps.size(), 1);
ApplicationReport appReport = apps.get(0);
firstAppId = appReport.getApplicationId();
UtilsForTests.waitForAppFinished(job, cluster);
checkNewTimelineEvent(firstAppId, appReport, storageDir);
LOG.info("Run 2nd job which should be failed.");
job = UtilsForTests.runJobFail(new JobConf(conf), inDir, outDir);
assertEquals(JobStatus.FAILED,
job.getJobStatus().getState().getValue());
apps = yarnClient.getApplications(appStates);
assertEquals(apps.size(), 2);
appReport = apps.get(0).getApplicationId().equals(firstAppId) ?
apps.get(0) : apps.get(1);
checkNewTimelineEvent(firstAppId, appReport, storageDir);
} finally {
if (cluster != null) {
cluster.stop();
}
// Cleanup test file
File testDirFolder = new File(testDir);
if(testDirFolder.isDirectory()) {
FileUtils.deleteDirectory(testDirFolder);
}
}
}
private void checkNewTimelineEvent(ApplicationId appId,
ApplicationReport appReport, String storageDir) throws IOException {
String tmpRoot = storageDir + File.separator + "entities" + File.separator;
File tmpRootFolder = new File(tmpRoot);
assertTrue(tmpRootFolder.isDirectory());
String basePath = tmpRoot + YarnConfiguration.DEFAULT_RM_CLUSTER_ID +
File.separator +
UserGroupInformation.getCurrentUser().getShortUserName() +
File.separator + appReport.getName() +
File.separator + TimelineUtils.DEFAULT_FLOW_VERSION +
File.separator + appReport.getStartTime() +
File.separator + appId.toString();
// for this test, we expect MAPREDUCE_JOB and MAPREDUCE_TASK dirs
String outputDirJob =
basePath + File.separator + "MAPREDUCE_JOB" + File.separator;
File entityFolder = new File(outputDirJob);
assertTrue(entityFolder.isDirectory(),
"Job output directory: " + outputDirJob + " does not exist.");
// check for job event file
String jobEventFileName = appId.toString().replaceAll("application", "job")
+ FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
String jobEventFilePath = outputDirJob + jobEventFileName;
File jobEventFile = new File(jobEventFilePath);
assertTrue(jobEventFile.exists(),
"jobEventFilePath: " + jobEventFilePath + " does not exist.");
verifyEntity(jobEventFile, EventType.JOB_FINISHED.name(),
true, false, null, false);
Set<String> cfgsToCheck = Sets.newHashSet("dummy_conf1", "dummy_conf2",
"huge_dummy_conf1", "huge_dummy_conf2");
verifyEntity(jobEventFile, null, false, true, cfgsToCheck, false);
// for this test, we expect MR job metrics are published in YARN_APPLICATION
String outputAppDir =
basePath + File.separator + "YARN_APPLICATION" + File.separator;
entityFolder = new File(outputAppDir);
assertTrue(entityFolder.isDirectory(),
"Job output directory: " + outputAppDir + " does not exist.");
// check for job event file
String appEventFileName = appId.toString()
+ FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
String appEventFilePath = outputAppDir + appEventFileName;
File appEventFile = new File(appEventFilePath);
assertTrue(appEventFile.exists(),
"appEventFilePath: " + appEventFilePath +
" does not exist.");
verifyEntity(appEventFile, null, true, false, null, false);
verifyEntity(appEventFile, null, false, true, cfgsToCheck, false);
// check for task event file
String outputDirTask =
basePath + File.separator + "MAPREDUCE_TASK" + File.separator;
File taskFolder = new File(outputDirTask);
assertTrue(taskFolder.isDirectory(),
"Task output directory: " + outputDirTask +
" does not exist.");
String taskEventFileName =
appId.toString().replaceAll("application", "task") +
"_m_000000" +
FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
String taskEventFilePath = outputDirTask + taskEventFileName;
File taskEventFile = new File(taskEventFilePath);
assertTrue(taskEventFile.exists(),
"taskEventFileName: " + taskEventFilePath + " does not exist.");
verifyEntity(taskEventFile, EventType.TASK_FINISHED.name(),
true, false, null, true);
// check for task attempt event file
String outputDirTaskAttempt =
basePath + File.separator + "MAPREDUCE_TASK_ATTEMPT" + File.separator;
File taskAttemptFolder = new File(outputDirTaskAttempt);
assertTrue(taskAttemptFolder.isDirectory(),
"TaskAttempt output directory: " + outputDirTaskAttempt +
" does not exist.");
String taskAttemptEventFileName = appId.toString().replaceAll(
"application", "attempt") + "_m_000000_0" +
FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
String taskAttemptEventFilePath = outputDirTaskAttempt +
taskAttemptEventFileName;
File taskAttemptEventFile = new File(taskAttemptEventFilePath);
assertTrue(taskAttemptEventFile.exists(),
"taskAttemptEventFileName: " + taskAttemptEventFilePath +
" does not exist.");
verifyEntity(taskAttemptEventFile, EventType.MAP_ATTEMPT_FINISHED.name(),
true, false, null, true);
}
/**
* Verifies entity by reading the entity file written via FS impl.
* @param entityFile File to be read.
* @param eventId Event to be checked.
* @param chkMetrics If event is not null, this flag determines if metrics
* exist when the event is encountered. If event is null, we merely check
* if metrics exist in the entity file.
* @param chkCfg If event is not null, this flag determines if configs
* exist when the event is encountered. If event is null, we merely check
* if configs exist in the entity file.
* @param cfgsToVerify a set of configs which should exist in the entity file.
* @throws IOException
*/
private void verifyEntity(File entityFile, String eventId,
boolean chkMetrics, boolean chkCfg, Set<String> cfgsToVerify,
boolean checkIdPrefix) throws IOException {
BufferedReader reader = null;
String strLine;
try {
reader = new BufferedReader(new FileReader(entityFile));
long idPrefix = -1;
while ((strLine = reader.readLine()) != null) {
if (strLine.trim().length() > 0) {
org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity
entity =
FileSystemTimelineReaderImpl.getTimelineRecordFromJSON(
strLine.trim(),
org.apache.hadoop.yarn.api.records.timelineservice.
TimelineEntity.class);
LOG.info("strLine.trim()= " + strLine.trim());
if (checkIdPrefix) {
assertTrue(entity.getIdPrefix() > 0,
"Entity ID prefix expected to be > 0");
if (idPrefix == -1) {
idPrefix = entity.getIdPrefix();
} else {
assertEquals(idPrefix, entity.getIdPrefix(),
"Entity ID prefix should be same across " +
"each publish of same entity");
}
}
if (eventId == null) {
// Job metrics are published without any events for
// ApplicationEntity. There is also possibility that some other
// ApplicationEntity is published without events, hence loop till
// its found. Same applies to configs.
if (chkMetrics && entity.getMetrics().size() > 0) {
return;
}
if (chkCfg && entity.getConfigs().size() > 0) {
if (cfgsToVerify == null) {
return;
} else {
// Have configs to verify. Keep on removing configs from the set
// of configs to verify as they are found. When the all the
// entities have been looped through, we will check if the set
// is empty or not(indicating if all configs have been found or
// not).
for (Iterator<String> itr =
cfgsToVerify.iterator(); itr.hasNext();) {
String config = itr.next();
if (entity.getConfigs().containsKey(config)) {
itr.remove();
}
}
// All the required configs have been verified, so return.
if (cfgsToVerify.isEmpty()) {
return;
}
}
}
} else {
for (TimelineEvent event : entity.getEvents()) {
if (event.getId().equals(eventId)) {
if (chkMetrics) {
assertTrue(entity.getMetrics().size() > 0);
}
if (chkCfg) {
assertTrue(entity.getConfigs().size() > 0);
if (cfgsToVerify != null) {
for (String cfg : cfgsToVerify) {
assertTrue(entity.getConfigs().containsKey(cfg));
}
}
}
return;
}
}
}
}
}
if (cfgsToVerify != null) {
assertTrue(cfgsToVerify.isEmpty());
return;
}
fail("Expected event : " + eventId + " not found in the file "
+ entityFile);
} finally {
reader.close();
}
}
@Test
public void testMapreduceJobTimelineServiceEnabled()
throws Exception {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
MiniMRYarnCluster cluster = null;
FileSystem fs = null;
Path inDir = new Path(GenericTestUtils.getTempPath("input"));
Path outDir = new Path(GenericTestUtils.getTempPath("output"));
try {
fs = FileSystem.get(conf);
cluster = new MiniMRYarnCluster(
TestMRTimelineEventHandling.class.getSimpleName(), 1);
cluster.init(conf);
cluster.start();
conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
MiniYARNCluster.getHostname() + ":"
+ cluster.getApplicationHistoryServer().getPort());
TimelineStore ts = cluster.getApplicationHistoryServer()
.getTimelineStore();
RunningJob job =
UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
assertEquals(JobStatus.SUCCEEDED,
job.getJobStatus().getState().getValue());
TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null,
null, null, null, null, null, null, null);
assertEquals(0, entities.getEntities().size());
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
assertEquals(JobStatus.SUCCEEDED,
job.getJobStatus().getState().getValue());
entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null,
null, null, null, null);
assertEquals(1, entities.getEntities().size());
TimelineEntity tEntity = entities.getEntities().get(0);
assertEquals(job.getID().toString(), tEntity.getEntityId());
} finally {
if (cluster != null) {
cluster.stop();
}
deletePaths(fs, inDir, outDir);
}
conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
cluster = null;
try {
cluster = new MiniMRYarnCluster(
TestJobHistoryEventHandler.class.getSimpleName(), 1);
cluster.init(conf);
cluster.start();
conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
MiniYARNCluster.getHostname() + ":"
+ cluster.getApplicationHistoryServer().getPort());
TimelineStore ts = cluster.getApplicationHistoryServer()
.getTimelineStore();
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
RunningJob job =
UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
assertEquals(JobStatus.SUCCEEDED,
job.getJobStatus().getState().getValue());
TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null,
null, null, null, null, null, null, null);
assertEquals(0, entities.getEntities().size());
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
assertEquals(JobStatus.SUCCEEDED,
job.getJobStatus().getState().getValue());
entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null,
null, null, null, null);
assertEquals(1, entities.getEntities().size());
TimelineEntity tEntity = entities.getEntities().get(0);
assertEquals(job.getID().toString(), tEntity.getEntityId());
} finally {
if (cluster != null) {
cluster.stop();
}
deletePaths(fs, inDir, outDir);
}
}
/** Delete input paths recursively. Paths should not be null. */
private void deletePaths(FileSystem fs, Path... paths) {
if (fs == null) {
return;
}
for (Path path : paths) {
try {
fs.delete(path, true);
} catch (Exception ignored) {
}
}
}
}
|
TestMRTimelineEventHandling
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/impl/DefaultComponentReferencePropertiesTest.java
|
{
"start": 1908,
"end": 2963
}
|
class ____ extends DefaultEndpoint {
private Expression expression;
private String stringExpression;
private String name;
private Expression special;
private MyEndpoint(String endpointUri, Component component) {
super(endpointUri, component);
}
@Override
public boolean isSingleton() {
return true;
}
@Override
public Producer createProducer() {
return null;
}
@Override
public Consumer createConsumer(Processor processor) {
return null;
}
public void setExpression(Expression expression) {
this.expression = expression;
}
public void setExpression(String expression) {
stringExpression = expression;
}
public void setName(String name) {
this.name = name;
}
public void setSpecial(Expression special) {
this.special = special;
}
}
public static final
|
MyEndpoint
|
java
|
netty__netty
|
transport-classes-io_uring/src/main/java/io/netty/channel/uring/AbstractIoUringServerChannel.java
|
{
"start": 1286,
"end": 1489
}
|
class ____ extends AbstractIoUringChannel implements ServerChannel {
private static final ChannelMetadata METADATA = new ChannelMetadata(false, 16);
private static final
|
AbstractIoUringServerChannel
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/SequenceInputTypeStrategy.java
|
{
"start": 1825,
"end": 4860
}
|
class ____ implements InputTypeStrategy {
private final List<? extends ArgumentTypeStrategy> argumentStrategies;
private final @Nullable List<String> argumentNames;
public SequenceInputTypeStrategy(
List<? extends ArgumentTypeStrategy> argumentStrategies,
@Nullable List<String> argumentNames) {
Preconditions.checkArgument(
argumentNames == null || argumentNames.size() == argumentStrategies.size());
this.argumentStrategies = argumentStrategies;
this.argumentNames = argumentNames;
}
@Override
public ArgumentCount getArgumentCount() {
return ConstantArgumentCount.of(argumentStrategies.size());
}
@Override
public Optional<List<DataType>> inferInputTypes(
CallContext callContext, boolean throwOnFailure) {
final List<DataType> dataTypes = callContext.getArgumentDataTypes();
if (dataTypes.size() != argumentStrategies.size()) {
return Optional.empty();
}
final List<DataType> inferredDataTypes = new ArrayList<>(dataTypes.size());
for (int i = 0; i < argumentStrategies.size(); i++) {
final ArgumentTypeStrategy argumentTypeStrategy = argumentStrategies.get(i);
final Optional<DataType> inferredDataType =
argumentTypeStrategy.inferArgumentType(callContext, i, throwOnFailure);
if (!inferredDataType.isPresent()) {
return Optional.empty();
}
inferredDataTypes.add(inferredDataType.get());
}
return Optional.of(inferredDataTypes);
}
@Override
public List<Signature> getExpectedSignatures(FunctionDefinition definition) {
final List<Signature.Argument> arguments = new ArrayList<>();
for (int i = 0; i < argumentStrategies.size(); i++) {
if (argumentNames == null) {
arguments.add(argumentStrategies.get(i).getExpectedArgument(definition, i));
} else {
arguments.add(
Signature.Argument.of(
argumentNames.get(i),
argumentStrategies
.get(i)
.getExpectedArgument(definition, i)
.getType()));
}
}
return Collections.singletonList(Signature.of(arguments));
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SequenceInputTypeStrategy that = (SequenceInputTypeStrategy) o;
return Objects.equals(argumentStrategies, that.argumentStrategies)
&& Objects.equals(argumentNames, that.argumentNames);
}
@Override
public int hashCode() {
return Objects.hash(argumentStrategies, argumentNames);
}
}
|
SequenceInputTypeStrategy
|
java
|
quarkusio__quarkus
|
extensions/elytron-security-jdbc/runtime/src/main/java/io/quarkus/elytron/security/jdbc/AttributeMappingConfig.java
|
{
"start": 283,
"end": 521
}
|
interface ____ {
/**
* The index (1 based numbering) of column to map
*/
@WithDefault("0")
int index();
/**
* The target attribute name
*/
String to();
String toString();
}
|
AttributeMappingConfig
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/main/java/org/springframework/web/reactive/result/condition/AbstractRequestCondition.java
|
{
"start": 804,
"end": 1115
}
|
class ____ {@link RequestCondition} types providing implementations of
* {@link #equals(Object)}, {@link #hashCode()}, and {@link #toString()}.
*
* @author Rossen Stoyanchev
* @since 5.0
* @param <T> the type of objects that this RequestCondition can be combined
* with and compared to
*/
public abstract
|
for
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/generated/Projection.java
|
{
"start": 999,
"end": 1089
}
|
interface ____<IN extends RowData, OUT extends RowData> {
OUT apply(IN row);
}
|
Projection
|
java
|
spring-projects__spring-boot
|
module/spring-boot-http-client/src/main/java/org/springframework/boot/http/client/autoconfigure/reactive/ClientHttpConnectorBuilderCustomizer.java
|
{
"start": 989,
"end": 1230
}
|
interface ____<B extends ClientHttpConnectorBuilder<?>> {
/**
* Customize the given builder.
* @param builder the builder to customize
* @return the customized builder
*/
B customize(B builder);
}
|
ClientHttpConnectorBuilderCustomizer
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java
|
{
"start": 7396,
"end": 8683
}
|
class ____ extends TransportRetentionLeaseAction<RenewRequest> {
@Inject
public TransportRenewAction(
final ThreadPool threadPool,
final ClusterService clusterService,
final TransportService transportService,
final ActionFilters actionFilters,
final ProjectResolver projectResolver,
final IndexNameExpressionResolver indexNameExpressionResolver,
final IndicesService indicesService
) {
super(
RENEW.name(),
threadPool,
clusterService,
transportService,
actionFilters,
projectResolver,
indexNameExpressionResolver,
indicesService,
RenewRequest::new
);
}
@Override
void doRetentionLeaseAction(
final IndexShard indexShard,
final RenewRequest request,
final ActionListener<ActionResponse.Empty> listener
) {
indexShard.renewRetentionLease(request.getId(), request.getRetainingSequenceNumber(), request.getSource());
listener.onResponse(ActionResponse.Empty.INSTANCE);
}
}
public static
|
TransportRenewAction
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/MockIntercept.java
|
{
"start": 1273,
"end": 1712
}
|
interface ____<T> {
/**
* Defines custom behavior for handling the mocked object during its execution.
*
* @param mockedObj the mocked `AbfsRestOperation` object
* @param answer the invocation details for the mock method
* @throws AbfsRestOperationException if an error occurs during the
* mock operation handling
*/
void answer(T mockedObj, InvocationOnMock answer) throws AbfsRestOperationException;
}
|
MockIntercept
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/util/concurrent/TestThread.java
|
{
"start": 11114,
"end": 11657
}
|
class ____ {
final String methodName;
final Object result;
final Throwable throwable;
Response(String methodName, @Nullable Object result, @Nullable Throwable throwable) {
this.methodName = methodName;
this.result = result;
this.throwable = throwable;
}
Object getResult() {
if (throwable != null) {
throw new AssertionError(throwable);
}
return result;
}
Throwable getThrowable() {
assertThat(throwable).isNotNull();
return throwable;
}
}
}
|
Response
|
java
|
apache__spark
|
common/sketch/src/main/java/org/apache/spark/util/sketch/BloomFilterImpl.java
|
{
"start": 860,
"end": 4229
}
|
class ____ extends BloomFilterBase implements Serializable {
BloomFilterImpl(int numHashFunctions, long numBits) {
super(numHashFunctions, numBits);
}
private BloomFilterImpl() {}
protected boolean scatterHashAndSetAllBits(HiLoHash inputHash) {
int h1 = inputHash.hi();
int h2 = inputHash.lo();
long bitSize = bits.bitSize();
boolean bitsChanged = false;
for (int i = 1; i <= numHashFunctions; i++) {
int combinedHash = h1 + (i * h2);
// Flip all the bits if it's negative (guaranteed positive number)
if (combinedHash < 0) {
combinedHash = ~combinedHash;
}
bitsChanged |= bits.set(combinedHash % bitSize);
}
return bitsChanged;
}
protected boolean scatterHashAndGetAllBits(HiLoHash inputHash) {
int h1 = inputHash.hi();
int h2 = inputHash.lo();
long bitSize = bits.bitSize();
for (int i = 1; i <= numHashFunctions; i++) {
int combinedHash = h1 + (i * h2);
// Flip all the bits if it's negative (guaranteed positive number)
if (combinedHash < 0) {
combinedHash = ~combinedHash;
}
if (!bits.get(combinedHash % bitSize)) {
return false;
}
}
return true;
}
protected BloomFilterImpl checkCompatibilityForMerge(BloomFilter other)
throws IncompatibleMergeException {
// Duplicates the logic of `isCompatible` here to provide better error message.
if (other == null) {
throw new IncompatibleMergeException("Cannot merge null bloom filter");
}
if (!(other instanceof BloomFilterImpl that)) {
throw new IncompatibleMergeException(
"Cannot merge bloom filter of class " + other.getClass().getName()
);
}
if (this.bitSize() != that.bitSize()) {
throw new IncompatibleMergeException("Cannot merge bloom filters with different bit size");
}
if (this.numHashFunctions != that.numHashFunctions) {
throw new IncompatibleMergeException(
"Cannot merge bloom filters with different number of hash functions"
);
}
return that;
}
@Override
public void writeTo(OutputStream out) throws IOException {
DataOutputStream dos = new DataOutputStream(out);
dos.writeInt(Version.V1.getVersionNumber());
dos.writeInt(numHashFunctions);
// ignore seed
bits.writeTo(dos);
}
private void readFrom0(InputStream in) throws IOException {
DataInputStream dis = new DataInputStream(in);
int version = dis.readInt();
if (version != Version.V1.getVersionNumber()) {
throw new IOException("Unexpected Bloom filter version number (" + version + ")");
}
this.numHashFunctions = dis.readInt();
this.seed = DEFAULT_SEED;
this.bits = BitArray.readFrom(dis);
}
public static BloomFilterImpl readFrom(InputStream in) throws IOException {
BloomFilterImpl filter = new BloomFilterImpl();
filter.readFrom0(in);
return filter;
}
// no longer necessary, but can't remove without triggering MIMA violations
@Deprecated
public static BloomFilter readFrom(byte[] bytes) throws IOException {
return BloomFilter.readFrom(bytes);
}
@Serial
private void writeObject(ObjectOutputStream out) throws IOException {
writeTo(out);
}
@Serial
private void readObject(ObjectInputStream in) throws IOException {
readFrom0(in);
}
}
|
BloomFilterImpl
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/alterTable/MySqlAlterTableTest23.java
|
{
"start": 911,
"end": 1416
}
|
class ____ extends TestCase {
public void test_alter_add_key() throws Exception {
String sql = "alter table xxxxx modify (xxx default '0');";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("ALTER TABLE xxxxx"
+ "\n\tMODIFY COLUMN xxx DEFAULT '0';", output);
}
}
|
MySqlAlterTableTest23
|
java
|
apache__camel
|
core/camel-core-processor/src/main/java/org/apache/camel/processor/MulticastProcessor.java
|
{
"start": 3729,
"end": 4068
}
|
class ____ extends BaseProcessorSupport
implements Navigate<Processor>, Traceable, IdAware, RouteIdAware, ErrorHandlerAware {
private static final Logger LOG = LoggerFactory.getLogger(MulticastProcessor.class);
/**
* Class that represent each step in the multicast route to do
*/
static final
|
MulticastProcessor
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/serializer/DoubleFormatTest2.java
|
{
"start": 495,
"end": 582
}
|
class ____ {
@JSONField(format = "0.00")
public Double value;
}
}
|
Model
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryStringBuilderTest.java
|
{
"start": 5774,
"end": 5935
}
|
class ____ {
void f() {
new StringBuilder().append("foo");
}
}
""")
.doTest();
}
}
|
Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/mapping/PrimitiveArray.java
|
{
"start": 422,
"end": 1104
}
|
class ____ extends Array {
public PrimitiveArray(MetadataBuildingContext buildingContext, PersistentClass owner) {
super( buildingContext, owner );
}
public PrimitiveArray(Supplier<ManagedBean<? extends UserCollectionType>> customTypeBeanResolver, PersistentClass owner, MetadataBuildingContext buildingContext) {
super( customTypeBeanResolver, owner, buildingContext );
}
private PrimitiveArray(PrimitiveArray original) {
super( original );
}
@Override
public Array copy() {
return new PrimitiveArray( this );
}
public boolean isPrimitiveArray() {
return true;
}
public Object accept(ValueVisitor visitor) {
return visitor.accept(this);
}
}
|
PrimitiveArray
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/creators/FactoryAndConstructor2962Test.java
|
{
"start": 854,
"end": 1212
}
|
class ____ {
public int version;
}
}
private final ObjectMapper MAPPER = newJsonMapper();
// [databind#2962]
@Test
public void testImplicitCtorExplicitFactory() throws Exception
{
ExampleDto2962 result = MAPPER.readValue("42", ExampleDto2962.class);
assertEquals(42, result.version);
}
}
|
Json2962
|
java
|
grpc__grpc-java
|
core/src/test/java/io/grpc/internal/SpiffeUtilTest.java
|
{
"start": 3892,
"end": 4882
}
|
class ____ {
@Parameter
public String uri;
@Test
public void parseFailureTest() {
assertThrows(IllegalArgumentException.class, () -> SpiffeUtil.parse(uri));
}
@Parameters(name = "spiffeId={0}")
public static Collection<String> data() {
return Arrays.asList(
"spiffe:///",
"spiffe://example!com",
"spiffe://exampleя.com/workload-1",
"spiffe://example.com/us/florida/miamiя",
"spiffe:/trustdomain/path",
"spiffe:///path",
"spiffe://trust%20domain/path",
"spiffe://user@trustdomain/path",
"spiffe:// /",
"",
"http://trustdomain/path",
"//trustdomain/path",
"://trustdomain/path",
"piffe://trustdomain/path",
"://",
"://trustdomain",
"spiff",
"spiffe",
"spiffe:////",
"spiffe://trust.domain/../path"
);
}
}
public static
|
ParseFailureTest
|
java
|
quarkusio__quarkus
|
integration-tests/amazon-lambda/src/test/java/io/quarkus/it/amazon/lambda/profiles/EphemeralPortProfile.java
|
{
"start": 127,
"end": 336
}
|
class ____ implements QuarkusTestProfile {
@Override
public Map<String, String> getConfigOverrides() {
return Map.of("quarkus.lambda.mock-event-server.test-port", "0");
}
}
|
EphemeralPortProfile
|
java
|
quarkusio__quarkus
|
extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/typesafe/ObjectValidationSuccessTest.java
|
{
"start": 365,
"end": 1058
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(Movie.class, MovieExtensions.class)
.addAsResource(new StringAsset("{@java.lang.Object obj}"
+ "{@java.lang.Object anotherObj}"
+ "{obj.toString}:{anotherObj.raw}"),
"templates/object.html"));
@Inject
Template object;
@Test
public void testResult() {
assertEquals("hello:<strong>", object.data("obj", "hello").data("anotherObj", "<strong>").render());
}
}
|
ObjectValidationSuccessTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/injection/guice/Key.java
|
{
"start": 6009,
"end": 7523
}
|
interface ____ {
Annotation getAnnotation();
Class<? extends Annotation> getAnnotationType();
boolean hasAttributes();
AnnotationStrategy withoutAttributes();
}
/**
* Gets the strategy for an annotation.
*/
static AnnotationStrategy strategyFor(Annotation annotation) {
Objects.requireNonNull(annotation, "annotation");
Class<? extends Annotation> annotationType = annotation.annotationType();
ensureRetainedAtRuntime(annotationType);
ensureIsBindingAnnotation(annotationType);
if (annotationType.getMethods().length == 0) {
return new AnnotationTypeStrategy(annotationType, annotation);
}
return new AnnotationInstanceStrategy(annotation);
}
private static void ensureRetainedAtRuntime(Class<? extends Annotation> annotationType) {
if (Annotations.isRetainedAtRuntime(annotationType) == false) {
throw new IllegalArgumentException(
annotationType.getName() + " is not retained at runtime. Please annotate it with @Retention(RUNTIME)."
);
}
}
private static void ensureIsBindingAnnotation(Class<? extends Annotation> annotationType) {
if (isBindingAnnotation(annotationType) == false) {
throw new IllegalArgumentException(
annotationType.getName() + " is not a binding annotation. Please annotate it with @BindingAnnotation."
);
}
}
|
AnnotationStrategy
|
java
|
apache__rocketmq
|
client/src/main/java/org/apache/rocketmq/client/exception/OffsetNotFoundException.java
|
{
"start": 856,
"end": 1245
}
|
class ____ extends MQBrokerException {
public OffsetNotFoundException() {
}
public OffsetNotFoundException(int responseCode, String errorMessage) {
super(responseCode, errorMessage);
}
public OffsetNotFoundException(int responseCode, String errorMessage, String brokerAddr) {
super(responseCode, errorMessage, brokerAddr);
}
}
|
OffsetNotFoundException
|
java
|
quarkusio__quarkus
|
integration-tests/rest-client-reactive-stork/src/test/java/io/quarkus/it/rest/reactive/stork/SlowWiremockServer.java
|
{
"start": 339,
"end": 1087
}
|
class ____ extends WiremockBase {
static final String SLOW_RESPONSE = "hello, I'm a slow server";
@Override
int httpPort() {
return 8767;
}
@Override
int httpsPort() {
return 8444;
}
@Override
protected Map<String, String> initWireMock(WireMockServer server) {
server.stubFor(WireMock.get("/hello")
.willReturn(aResponse().withFixedDelay(1000)
.withBody(SLOW_RESPONSE).withStatus(200)));
server.stubFor(WireMock.get(urlPathTemplate("/hello/v2/{name}"))
.willReturn(aResponse().withFixedDelay(1000).withBody(SLOW_RESPONSE).withStatus(200)));
return Map.of("slow-service", "localhost:8444");
}
}
|
SlowWiremockServer
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webmvc/src/main/java/org/springframework/boot/webmvc/autoconfigure/WebMvcProperties.java
|
{
"start": 5799,
"end": 7205
}
|
class ____ {
/**
* Path of the dispatcher servlet. Setting a custom value for this property is not
* compatible with the PathPatternParser matching strategy.
*/
private String path = "/";
/**
* Load on startup priority of the dispatcher servlet.
*/
private int loadOnStartup = -1;
public String getPath() {
return this.path;
}
public void setPath(String path) {
Assert.notNull(path, "'path' must not be null");
Assert.isTrue(!path.contains("*"), "'path' must not contain wildcards");
this.path = path;
}
public int getLoadOnStartup() {
return this.loadOnStartup;
}
public void setLoadOnStartup(int loadOnStartup) {
this.loadOnStartup = loadOnStartup;
}
public String getServletMapping() {
if (this.path.isEmpty() || this.path.equals("/")) {
return "/";
}
if (this.path.endsWith("/")) {
return this.path + "*";
}
return this.path + "/*";
}
public String getPath(String path) {
String prefix = getServletPrefix();
if (!path.startsWith("/")) {
path = "/" + path;
}
return prefix + path;
}
public String getServletPrefix() {
String result = this.path;
int index = result.indexOf('*');
if (index != -1) {
result = result.substring(0, index);
}
if (result.endsWith("/")) {
result = result.substring(0, result.length() - 1);
}
return result;
}
}
public static
|
Servlet
|
java
|
apache__camel
|
test-infra/camel-test-infra-smb/src/test/java/org/apache/camel/test/infra/smb/services/SmbServiceFactory.java
|
{
"start": 1190,
"end": 2404
}
|
class ____ extends SingletonService<SmbService> implements SmbService {
public SingletonSmbService(SmbService service, String name) {
super(service, name);
}
@Override
public String address() {
return getService().address();
}
@Override
public String password() {
return getService().password();
}
@Override
public String shareName() {
return getService().shareName();
}
@Override
public String smbFile(String file) {
return getService().smbFile(file);
}
@Override
public String userName() {
return getService().userName();
}
@Override
public <T> T copyFileFromContainer(String fileName, ThrowingFunction<InputStream, T> function) {
return getService().copyFileFromContainer(fileName, function);
}
}
public static SmbService createService() {
return builder()
.addLocalMapping(SmbLocalContainerService::new)
.addRemoteMapping(SmbRemoteService::new)
.build();
}
public static
|
SingletonSmbService
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/XChangeEndpointBuilderFactory.java
|
{
"start": 7890,
"end": 9897
}
|
interface ____ {
/**
* XChange (camel-xchange)
* Access market data and trade on Bitcoin and Altcoin exchanges.
*
* Category: blockchain
* Since: 2.21
* Maven coordinates: org.apache.camel:camel-xchange
*
* @return the dsl builder for the headers' name.
*/
default XChangeHeaderNameBuilder xchange() {
return XChangeHeaderNameBuilder.INSTANCE;
}
/**
* XChange (camel-xchange)
* Access market data and trade on Bitcoin and Altcoin exchanges.
*
* Category: blockchain
* Since: 2.21
* Maven coordinates: org.apache.camel:camel-xchange
*
* Syntax: <code>xchange:name</code>
*
* Path parameter: name (required)
* The exchange to connect to
*
* @param path name
* @return the dsl builder
*/
default XChangeEndpointBuilder xchange(String path) {
return XChangeEndpointBuilderFactory.endpointBuilder("xchange", path);
}
/**
* XChange (camel-xchange)
* Access market data and trade on Bitcoin and Altcoin exchanges.
*
* Category: blockchain
* Since: 2.21
* Maven coordinates: org.apache.camel:camel-xchange
*
* Syntax: <code>xchange:name</code>
*
* Path parameter: name (required)
* The exchange to connect to
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path name
* @return the dsl builder
*/
default XChangeEndpointBuilder xchange(String componentName, String path) {
return XChangeEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the XChange component.
*/
public static
|
XChangeBuilders
|
java
|
quarkusio__quarkus
|
integration-tests/grpc-hibernate-reactive/src/main/java/com/example/reactive/Item.java
|
{
"start": 143,
"end": 204
}
|
class ____ extends PanacheEntity {
public String text;
}
|
Item
|
java
|
google__guice
|
core/test/com/google/inject/ScopesTest.java
|
{
"start": 35552,
"end": 35817
}
|
class ____ {
private S(int preventInjectionWithoutProvider) {}
}
/**
* Provides all the instances of S simultaneously using {@link CyclicBarrier} with {@code
* nThreads}. Intended to be used for threads synchronization during injection.
*/
static
|
S
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/cluster/coordination/ClusterFormationInfoAction.java
|
{
"start": 2763,
"end": 2837
}
|
class ____ equal
return 1;
}
}
public static
|
are
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsPatternSatisfying_Pattern_Test.java
|
{
"start": 1341,
"end": 2768
}
|
class ____ {
@Test
void should_pass_if_string_contains_given_pattern_and_first_match_satisfies_assertion() {
// GIVEN
Pattern pattern = Pattern.compile(".o(.o)");
// WHEN/THEN
then("Frodo").containsPatternSatisfying(pattern, matcher -> assertThat(matcher.group(1)).isEqualTo("do"));
}
@Test
void should_fail_if_string_does_not_contain_given_pattern() {
// GIVEN
Pattern pattern = Pattern.compile("(o.a)");
// WHEN
var assertionError = expectAssertionError(() -> assertThat("Frodo").containsPatternSatisfying(pattern,
matcher -> assertThat(true).isTrue()));
// THEN
then(assertionError).hasMessage(shouldContainPattern("Frodo", pattern.toString()).create());
}
@Test
void should_pass_if_string_contains_given_pattern_but_match_does_not_satisfy_assertion() {
// GIVEN
Pattern pattern = Pattern.compile(".(a.)");
// WHEN
var assertionError = expectAssertionError(() -> assertThat("bar").containsPatternSatisfying(pattern,
matcher -> assertThat(matcher.group(1)).contains("z")));
// THEN
then(assertionError).hasMessage(shouldContain("ar", "z", StandardComparisonStrategy.instance()).create());
}
}
|
CharSequenceAssert_containsPatternSatisfying_Pattern_Test
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.